Actual source code: pbvec.c

  1: #define PETSCVEC_DLL
  2: /*
  3:    This file contains routines for Parallel vector operations.
  4:  */
 5:  #include src/vec/impls/mpi/pvecimpl.h

  7: /*
  8:        Note this code is very similar to VecPublish_Seq()
  9: */
 12: static PetscErrorCode VecPublish_MPI(PetscObject obj)
 13: {
 15:   return(0);
 16: }

 20: PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 21: {
 22:   PetscScalar    sum,work;

 26:   VecDot_Seq(xin,yin,&work);
 27:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
 28:   *z = sum;
 29:   return(0);
 30: }

 34: PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 35: {
 36:   PetscScalar    sum,work;

 40:   VecTDot_Seq(xin,yin,&work);
 41:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
 42:   *z   = sum;
 43:   return(0);
 44: }

 48: PetscErrorCode VecSetOption_MPI(Vec v,VecOption op)
 49: {
 51:   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
 52:     v->stash.donotstash = PETSC_TRUE;
 53:   } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
 54:     v->stash.donotstash = PETSC_FALSE;
 55:   }
 56:   return(0);
 57: }
 58: 
 59: EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
 61: EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);

 66: PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
 67: {
 69:   Vec_MPI        *v = (Vec_MPI *)vin->data;

 72:   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
 73:   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
 74:   v->array = (PetscScalar *)a;
 75:   if (v->localrep) {
 76:     VecPlaceArray(v->localrep,a);
 77:   }
 78:   return(0);
 79: }

 81: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
 82: EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);

 84: static struct _VecOps DvOps = { VecDuplicate_MPI,
 85:             VecDuplicateVecs_Default,
 86:             VecDestroyVecs_Default,
 87:             VecDot_MPI,
 88:             VecMDot_MPI,
 89:             VecNorm_MPI,
 90:             VecTDot_MPI,
 91:             VecMTDot_MPI,
 92:             VecScale_Seq,
 93:             VecCopy_Seq,
 94:             VecSet_Seq,
 95:             VecSwap_Seq,
 96:             VecAXPY_Seq,
 97:             VecAXPBY_Seq,
 98:             VecMAXPY_Seq,
 99:             VecAYPX_Seq,
100:             VecWAXPY_Seq,
101:             VecPointwiseMult_Seq,
102:             VecPointwiseDivide_Seq,
103:             VecSetValues_MPI,
104:             VecAssemblyBegin_MPI,
105:             VecAssemblyEnd_MPI,
106:             VecGetArray_Seq,
107:             VecGetSize_MPI,
108:             VecGetSize_Seq,
109:             VecRestoreArray_Seq,
110:             VecMax_MPI,
111:             VecMin_MPI,
112:             VecSetRandom_Seq,
113:             VecSetOption_MPI,
114:             VecSetValuesBlocked_MPI,
115:             VecDestroy_MPI,
116:             VecView_MPI,
117:             VecPlaceArray_MPI,
118:             VecReplaceArray_Seq,
119:             VecDot_Seq,
120:             VecTDot_Seq,
121:             VecNorm_Seq,
122:             VecLoadIntoVector_Default,
123:             VecReciprocal_Default,
124:             0, /* VecViewNative... */
125:             VecConjugate_Seq,
126:             0,
127:             0,
128:             VecResetArray_Seq,
129:             0,
130:             VecMaxPointwiseDivide_Seq,
131:             VecLoad_Binary,
132:             VecPointwiseMax_Seq,
133:             VecPointwiseMaxAbs_Seq,
134:             VecPointwiseMin_Seq,
135:             VecGetValues_MPI};

139: /*
140:     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
141:     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
142:     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
143: */
144: PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[],PetscMap map)
145: {
146:   Vec_MPI        *s;
148:   PetscMPIInt    size,rank;

151:   MPI_Comm_size(v->comm,&size);
152:   MPI_Comm_rank(v->comm,&rank);

154:   v->bops->publish   = VecPublish_MPI;
155:   PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->n+nghost+1)*sizeof(PetscScalar));
156:   PetscNew(Vec_MPI,&s);
157:   PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
158:   v->data        = (void*)s;
159:   s->nghost      = nghost;
160:   v->mapping     = 0;
161:   v->bmapping    = 0;
162:   v->petscnative = PETSC_TRUE;

164:   if (array) {
165:     s->array           = (PetscScalar *)array;
166:     s->array_allocated = 0;
167:   } else {
168:     PetscInt n         = v->n+nghost;
169:     PetscMalloc(n*sizeof(PetscScalar),&s->array);
170:     s->array_allocated = s->array;
171:     PetscMemzero(s->array,v->n*sizeof(PetscScalar));
172:   }

174:   /* By default parallel vectors do not have local representation */
175:   s->localrep    = 0;
176:   s->localupdate = 0;

178:   v->stash.insertmode  = NOT_SET_VALUES;

180:   if (!v->map) {
181:     if (!map) {
182:       PetscMapCreateMPI(v->comm,v->n,v->N,&v->map);
183:     } else {
184:       v->map = map;
185:       PetscObjectReference((PetscObject)map);
186:     }
187:   }
188:   /* create the stashes. The block-size for bstash is set later when 
189:      VecSetValuesBlocked is called.
190:   */
191:   VecStashCreate_Private(v->comm,1,&v->stash);
192:   VecStashCreate_Private(v->comm,v->bs,&v->bstash);
193: 
194: #if defined(PETSC_HAVE_MATLAB)
195:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
196:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
197: #endif
198:   PetscObjectChangeTypeName((PetscObject)v,VECMPI);
199:   PetscPublishAll(v);
200:   return(0);
201: }

203: /*MC
204:    VECMPI - VECMPI = "mpi" - The basic parallel vector

206:    Options Database Keys:
207: . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()

209:   Level: beginner

211: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
212: M*/

217: PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
218: {

222:   if (vv->bs > 0) {
223:     PetscSplitOwnershipBlock(vv->comm,vv->bs,&vv->n,&vv->N);
224:   } else {
225:     PetscSplitOwnership(vv->comm,&vv->n,&vv->N);
226:   }
227:   VecCreate_MPI_Private(vv,0,0,PETSC_NULL);
228:   return(0);
229: }

234: /*@C
235:    VecCreateMPIWithArray - Creates a parallel, array-style vector,
236:    where the user provides the array space to store the vector values.

238:    Collective on MPI_Comm

240:    Input Parameters:
241: +  comm  - the MPI communicator to use
242: .  n     - local vector length, cannot be PETSC_DECIDE
243: .  N     - global vector length (or PETSC_DECIDE to have calculated)
244: -  array - the user provided array to store the vector values

246:    Output Parameter:
247: .  vv - the vector
248:  
249:    Notes:
250:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
251:    same type as an existing vector.

253:    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
254:    at a later stage to SET the array for storing the vector values.

256:    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
257:    The user should not free the array until the vector is destroyed.

259:    Level: intermediate

261:    Concepts: vectors^creating with array

263: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
264:           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()

266: @*/
267: PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
268: {

272:   if (n == PETSC_DECIDE) {
273:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
274:   }
275:   PetscSplitOwnership(comm,&n,&N);
276:   VecCreate(comm,vv);
277:   VecSetSizes(*vv,n,N);
278:   VecCreate_MPI_Private(*vv,0,array,PETSC_NULL);
279:   return(0);
280: }

284: /*@C
285:     VecGhostGetLocalForm - Obtains the local ghosted representation of 
286:     a parallel vector created with VecCreateGhost().

288:     Not Collective

290:     Input Parameter:
291: .   g - the global vector. Vector must be have been obtained with either
292:         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().

294:     Output Parameter:
295: .   l - the local (ghosted) representation

297:     Notes:
298:     This routine does not actually update the ghost values, but rather it
299:     returns a sequential vector that includes the locations for the ghost
300:     values and their current values. The returned vector and the original
301:     vector passed in share the same array that contains the actual vector data.

303:     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
304:     finished using the object.

306:     Level: advanced

308:    Concepts: vectors^ghost point access

310: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()

312: @*/
313: PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
314: {
316:   PetscTruth     isseq,ismpi;


322:   PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
323:   PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
324:   if (ismpi) {
325:     Vec_MPI *v  = (Vec_MPI*)g->data;
326:     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
327:     *l = v->localrep;
328:   } else if (isseq) {
329:     *l = g;
330:   } else {
331:     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",g->type_name);
332:   }
333:   PetscObjectReference((PetscObject)*l);
334:   return(0);
335: }

339: /*@C
340:     VecGhostRestoreLocalForm - Restores the local ghosted representation of 
341:     a parallel vector obtained with VecGhostGetLocalForm().

343:     Not Collective

345:     Input Parameter:
346: +   g - the global vector
347: -   l - the local (ghosted) representation

349:     Notes:
350:     This routine does not actually update the ghost values, but rather it
351:     returns a sequential vector that includes the locations for the ghost values
352:     and their current values.

354:     Level: advanced

356: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
357: @*/
358: PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
359: {
361:   PetscObjectDereference((PetscObject)*l);
362:   return(0);
363: }

367: /*@
368:    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
369:    local representation to global or global representation to local.

371:    Collective on Vec

373:    Input Parameters:
374: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
375: .  insertmode - one of ADD_VALUES or INSERT_VALUES
376: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

378:    Notes:
379:    Use the following to update the ghost regions with correct values from the owning process
380: .vb
381:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
382:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
383: .ve

385:    Use the following to accumulate the ghost region values onto the owning processors
386: .vb
387:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
388:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
389: .ve

391:    To accumulate the ghost region values onto the owning processors and then update
392:    the ghost regions correctly, call the later followed by the former, i.e.,
393: .vb
394:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
395:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
396:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
397:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
398: .ve

400:    Level: advanced

402: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
403:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

405: @*/
406: PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
407: {
408:   Vec_MPI        *v;


414:   v  = (Vec_MPI*)g->data;
415:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
416:   if (!v->localupdate) return(0);
417: 
418:   if (scattermode == SCATTER_REVERSE) {
419:     VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);
420:   } else {
421:     VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);
422:   }
423:   return(0);
424: }

428: /*@
429:    VecGhostUpdateEnd - End the vector scatter to update the vector from
430:    local representation to global or global representation to local.

432:    Collective on Vec

434:    Input Parameters:
435: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
436: .  insertmode - one of ADD_VALUES or INSERT_VALUES
437: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

439:    Notes:

441:    Use the following to update the ghost regions with correct values from the owning process
442: .vb
443:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
444:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
445: .ve

447:    Use the following to accumulate the ghost region values onto the owning processors
448: .vb
449:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
450:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
451: .ve

453:    To accumulate the ghost region values onto the owning processors and then update
454:    the ghost regions correctly, call the later followed by the former, i.e.,
455: .vb
456:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
457:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
458:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
459:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
460: .ve

462:    Level: advanced

464: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
465:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

467: @*/
468: PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
469: {
470:   Vec_MPI        *v;


476:   v  = (Vec_MPI*)g->data;
477:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
478:   if (!v->localupdate) return(0);

480:   if (scattermode == SCATTER_REVERSE) {
481:     VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);
482:   } else {
483:     VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);
484:   }
485:   return(0);
486: }

490: /*@C
491:    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
492:    the caller allocates the array space.

494:    Collective on MPI_Comm

496:    Input Parameters:
497: +  comm - the MPI communicator to use
498: .  n - local vector length 
499: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
500: .  nghost - number of local ghost points
501: .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
502: -  array - the space to store the vector values (as long as n + nghost)

504:    Output Parameter:
505: .  vv - the global vector representation (without ghost points as part of vector)
506:  
507:    Notes:
508:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
509:    of the vector.

511:    Level: advanced

513:    Concepts: vectors^creating with array
514:    Concepts: vectors^ghosted

516: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
517:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
518:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

520: @*/
521: PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
522: {
524:   Vec_MPI        *w;
525:   PetscScalar    *larray;
526:   IS             from,to;

529:   *vv = 0;

531:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
532:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
533:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
534:   PetscSplitOwnership(comm,&n,&N);
535:   /* Create global representation */
536:   VecCreate(comm,vv);
537:   VecSetSizes(*vv,n,N);
538:   VecCreate_MPI_Private(*vv,nghost,array,PETSC_NULL);
539:   w    = (Vec_MPI *)(*vv)->data;
540:   /* Create local representation */
541:   VecGetArray(*vv,&larray);
542:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
543:   PetscLogObjectParent(*vv,w->localrep);
544:   VecRestoreArray(*vv,&larray);

546:   /*
547:        Create scatter context for scattering (updating) ghost values 
548:   */
549:   ISCreateGeneral(comm,nghost,ghosts,&from);
550:   ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
551:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
552:   PetscLogObjectParent(*vv,w->localupdate);
553:   ISDestroy(to);
554:   ISDestroy(from);

556:   return(0);
557: }

561: /*@C
562:    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.

564:    Collective on MPI_Comm

566:    Input Parameters:
567: +  comm - the MPI communicator to use
568: .  n - local vector length 
569: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
570: .  nghost - number of local ghost points
571: -  ghosts - global indices of ghost points

573:    Output Parameter:
574: .  vv - the global vector representation (without ghost points as part of vector)
575:  
576:    Notes:
577:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
578:    of the vector.

580:    Level: advanced

582:    Concepts: vectors^ghosted

584: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
585:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
586:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
587:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

589: @*/
590: PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
591: {

595:   VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
596:   return(0);
597: }

601: PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
602: {
604:   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
605:   PetscScalar    *array;

608:   VecCreate(win->comm,v);
609:   VecSetSizes(*v,win->n,win->N);
610:   VecCreate_MPI_Private(*v,w->nghost,0,win->map);
611:   vw   = (Vec_MPI *)(*v)->data;
612:   PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));

614:   /* save local representation of the parallel vector (and scatter) if it exists */
615:   if (w->localrep) {
616:     VecGetArray(*v,&array);
617:     VecCreateSeqWithArray(PETSC_COMM_SELF,win->n+w->nghost,array,&vw->localrep);
618:     PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
619:     VecRestoreArray(*v,&array);
620:     PetscLogObjectParent(*v,vw->localrep);
621:     vw->localupdate = w->localupdate;
622:     if (vw->localupdate) {
623:       PetscObjectReference((PetscObject)vw->localupdate);
624:     }
625:   }

627:   /* New vector should inherit stashing property of parent */
628:   (*v)->stash.donotstash = win->stash.donotstash;
629: 
630:   PetscOListDuplicate(win->olist,&(*v)->olist);
631:   PetscFListDuplicate(win->qlist,&(*v)->qlist);
632:   if (win->mapping) {
633:     (*v)->mapping = win->mapping;
634:     PetscObjectReference((PetscObject)win->mapping);
635:   }
636:   if (win->bmapping) {
637:     (*v)->bmapping = win->bmapping;
638:     PetscObjectReference((PetscObject)win->bmapping);
639:   }
640:   (*v)->bs        = win->bs;
641:   (*v)->bstash.bs = win->bstash.bs;

643:   return(0);
644: }

646: /* ------------------------------------------------------------------------------------------*/
649: /*@C
650:    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
651:    the caller allocates the array space. Indices in the ghost region are based on blocks.

653:    Collective on MPI_Comm

655:    Input Parameters:
656: +  comm - the MPI communicator to use
657: .  bs - block size
658: .  n - local vector length 
659: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
660: .  nghost - number of local ghost blocks
661: .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
662: -  array - the space to store the vector values (as long as n + nghost*bs)

664:    Output Parameter:
665: .  vv - the global vector representation (without ghost points as part of vector)
666:  
667:    Notes:
668:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
669:    of the vector.

671:    n is the local vector size (total local size not the number of blocks) while nghost
672:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
673:    portion is bs*nghost

675:    Level: advanced

677:    Concepts: vectors^creating ghosted
678:    Concepts: vectors^creating with array

680: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
681:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
682:           VecCreateGhostWithArray(), VecCreateGhostBlocked()

684: @*/
685: PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
686: {
688:   Vec_MPI        *w;
689:   PetscScalar    *larray;
690:   IS             from,to;

693:   *vv = 0;

695:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
696:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
697:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
698:   PetscSplitOwnership(comm,&n,&N);
699:   /* Create global representation */
700:   VecCreate(comm,vv);
701:   VecSetSizes(*vv,n,N);
702:   VecCreate_MPI_Private(*vv,nghost*bs,array,PETSC_NULL);
703:   VecSetBlockSize(*vv,bs);
704:   w    = (Vec_MPI *)(*vv)->data;
705:   /* Create local representation */
706:   VecGetArray(*vv,&larray);
707:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
708:   VecSetBlockSize(w->localrep,bs);
709:   PetscLogObjectParent(*vv,w->localrep);
710:   VecRestoreArray(*vv,&larray);

712:   /*
713:        Create scatter context for scattering (updating) ghost values 
714:   */
715:   ISCreateBlock(comm,bs,nghost,ghosts,&from);
716:   ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
717:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
718:   PetscLogObjectParent(*vv,w->localupdate);
719:   ISDestroy(to);
720:   ISDestroy(from);

722:   return(0);
723: }

727: /*@C
728:    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
729:         The indicing of the ghost points is done with blocks.

731:    Collective on MPI_Comm

733:    Input Parameters:
734: +  comm - the MPI communicator to use
735: .  bs - the block size
736: .  n - local vector length 
737: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
738: .  nghost - number of local ghost blocks
739: -  ghosts - global indices of ghost blocks

741:    Output Parameter:
742: .  vv - the global vector representation (without ghost points as part of vector)
743:  
744:    Notes:
745:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
746:    of the vector.

748:    n is the local vector size (total local size not the number of blocks) while nghost
749:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
750:    portion is bs*nghost

752:    Level: advanced

754:    Concepts: vectors^ghosted

756: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
757:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
758:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()

760: @*/
761: PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
762: {

766:   VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
767:   return(0);
768: }

770: /*
771:     These introduce a ghosted vector where the ghosting is determined by the call to 
772:   VecSetLocalToGlobalMapping()
773: */

777: PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
778: {
780:   Vec_MPI        *v = (Vec_MPI *)vv->data;

783:   v->nghost = map->n - vv->n;

785:   /* we need to make longer the array space that was allocated when the vector was created */
786:   PetscFree(v->array_allocated);
787:   PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
788:   v->array = v->array_allocated;
789: 
790:   /* Create local representation */
791:   VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
792:   PetscLogObjectParent(vv,v->localrep);
793:   return(0);
794: }


799: PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
800: {
802:   Vec_MPI        *v = (Vec_MPI *)vv->data;

805:   VecSetValues(v->localrep,n,ix,values,mode);
806:   return(0);
807: }

812: PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
813: {

817:   VecSetType(vv,VECMPI);
818: 
819:   /* overwrite the functions to handle setting values locally */
820:   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
821:   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
822:   vv->ops->assemblybegin           = 0;
823:   vv->ops->assemblyend             = 0;
824:   vv->ops->setvaluesblocked        = 0;
825:   vv->ops->setvaluesblocked        = 0;

827:   return(0);
828: }