Actual source code: pbvec.c
1: /*$Id: pbvec.c,v 1.173 2001/09/12 03:26:59 bsmith Exp $*/
3: /*
4: This file contains routines for Parallel vector operations.
5: */
6: #include src/vec/impls/mpi/pvecimpl.h
8: /*
9: Note this code is very similar to VecPublish_Seq()
10: */
11: static int VecPublish_MPI(PetscObject obj)
12: {
13: #if defined(PETSC_HAVE_AMS)
14: Vec v = (Vec) obj;
15: Vec_MPI *s = (Vec_MPI*)v->data;
16: int ierr,(*f)(AMS_Memory,char *,Vec);
17: #endif
20: #if defined(PETSC_HAVE_AMS)
21: /* if it is already published then return */
22: if (v->amem >=0) return(0);
24: PetscObjectPublishBaseBegin(obj);
25: AMS_Memory_add_field((AMS_Memory)v->amem,"values",s->array,v->n,AMS_DOUBLE,AMS_READ,
26: AMS_DISTRIBUTED,AMS_REDUCT_UNDEF);
28: /*
29: If the vector knows its "layout" let it set it, otherwise it defaults
30: to correct 1d distribution
31: */
32: PetscObjectQueryFunction(obj,"AMSSetFieldBlock_C",(void (**)(void))&f);
33: if (f) {
34: (*f)((AMS_Memory)v->amem,"values",v);
35: }
36: PetscObjectPublishBaseEnd(obj);
37: #endif
38: return(0);
39: }
41: int VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
42: {
43: PetscScalar sum,work;
44: int ierr;
47: VecDot_Seq(xin,yin,&work);
48: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
49: *z = sum;
50: return(0);
51: }
53: int VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
54: {
55: PetscScalar sum,work;
56: int ierr;
59: VecTDot_Seq(xin,yin,&work);
60: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
61: *z = sum;
62: return(0);
63: }
65: int VecSetOption_MPI(Vec v,VecOption op)
66: {
68: if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
69: v->stash.donotstash = PETSC_TRUE;
70: } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
71: v->stash.donotstash = PETSC_FALSE;
72: }
73: return(0);
74: }
75:
76: EXTERN int VecDuplicate_MPI(Vec,Vec *);
77: EXTERN_C_BEGIN
78: EXTERN int VecView_MPI_Draw(Vec,PetscViewer);
79: EXTERN_C_END
81: static struct _VecOps DvOps = { VecDuplicate_MPI,
82: VecDuplicateVecs_Default,
83: VecDestroyVecs_Default,
84: VecDot_MPI,
85: VecMDot_MPI,
86: VecNorm_MPI,
87: VecTDot_MPI,
88: VecMTDot_MPI,
89: VecScale_Seq,
90: VecCopy_Seq,
91: VecSet_Seq,
92: VecSwap_Seq,
93: VecAXPY_Seq,
94: VecAXPBY_Seq,
95: VecMAXPY_Seq,
96: VecAYPX_Seq,
97: VecWAXPY_Seq,
98: VecPointwiseMult_Seq,
99: VecPointwiseDivide_Seq,
100: VecSetValues_MPI,
101: VecAssemblyBegin_MPI,
102: VecAssemblyEnd_MPI,
103: VecGetArray_Seq,
104: VecGetSize_MPI,
105: VecGetSize_Seq,
106: VecRestoreArray_Seq,
107: VecMax_MPI,
108: VecMin_MPI,
109: VecSetRandom_Seq,
110: VecSetOption_MPI,
111: VecSetValuesBlocked_MPI,
112: VecDestroy_MPI,
113: VecView_MPI,
114: VecPlaceArray_Seq,
115: VecReplaceArray_Seq,
116: VecDot_Seq,
117: VecTDot_Seq,
118: VecNorm_Seq,
119: VecLoadIntoVector_Default,
120: VecReciprocal_Default,
121: 0, /* VecViewNative... */
122: VecConjugate_Seq,
123: 0,
124: 0,
125: VecResetArray_Seq};
127: /*
128: VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
129: VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
130: VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
131: */
132: int VecCreate_MPI_Private(Vec v,int nghost,const PetscScalar array[],PetscMap map)
133: {
134: Vec_MPI *s;
135: int ierr,size,rank;
138: MPI_Comm_size(v->comm,&size);
139: MPI_Comm_rank(v->comm,&rank);
141: v->bops->publish = VecPublish_MPI;
142: PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->n+nghost+1)*sizeof(PetscScalar));
143: ierr = PetscNew(Vec_MPI,&s);
144: ierr = PetscMemzero(s,sizeof(Vec_MPI));
145: ierr = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
146: v->data = (void*)s;
147: s->nghost = nghost;
148: v->mapping = 0;
149: v->bmapping = 0;
150: v->petscnative = PETSC_TRUE;
152: if (array) {
153: s->array = (PetscScalar *)array;
154: s->array_allocated = 0;
155: } else {
156: ierr = PetscMalloc((v->n+nghost+1)*sizeof(PetscScalar),&s->array);
157: s->array_allocated = s->array;
158: ierr = PetscMemzero(s->array,v->n*sizeof(PetscScalar));
159: }
161: /* By default parallel vectors do not have local representation */
162: s->localrep = 0;
163: s->localupdate = 0;
165: v->stash.insertmode = NOT_SET_VALUES;
167: if (!v->map) {
168: if (!map) {
169: PetscMapCreateMPI(v->comm,v->n,v->N,&v->map);
170: } else {
171: v->map = map;
172: PetscObjectReference((PetscObject)map);
173: }
174: }
175: /* create the stashes. The block-size for bstash is set later when
176: VecSetValuesBlocked is called.
177: */
178: VecStashCreate_Private(v->comm,1,&v->stash);
179: VecStashCreate_Private(v->comm,v->bs,&v->bstash);
180:
181: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE)
182: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
183: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
184: #endif
185: PetscObjectChangeTypeName((PetscObject)v,VECMPI);
186: PetscPublishAll(v);
187: return(0);
188: }
190: EXTERN_C_BEGIN
191: int VecCreate_MPI(Vec vv)
192: {
196: if (vv->bs > 0) {
197: PetscSplitOwnershipBlock(vv->comm,vv->bs,&vv->n,&vv->N);
198: } else {
199: PetscSplitOwnership(vv->comm,&vv->n,&vv->N);
200: }
201: VecCreate_MPI_Private(vv,0,0,PETSC_NULL);
202: VecSetSerializeType(vv,VEC_SER_MPI_BINARY);
203: return(0);
204: }
206: int VecSerialize_MPI(MPI_Comm comm, Vec *vec, PetscViewer viewer, PetscTruth store)
207: {
208: Vec v;
209: Vec_MPI *x;
210: PetscScalar *array;
211: int fd;
212: int vars, locVars, ghostVars;
213: int size;
214: int ierr;
217: PetscViewerBinaryGetDescriptor(viewer, &fd);
218: if (store) {
219: v = *vec;
220: x = (Vec_MPI *) v->data;
221: PetscBinaryWrite(fd, &v->N, 1, PETSC_INT, 0);
222: PetscBinaryWrite(fd, &v->n, 1, PETSC_INT, 0);
223: PetscBinaryWrite(fd, &x->nghost, 1, PETSC_INT, 0);
224: PetscBinaryWrite(fd, x->array, v->n + x->nghost, PETSC_SCALAR, 0);
225: } else {
226: PetscBinaryRead(fd, &vars, 1, PETSC_INT);
227: PetscBinaryRead(fd, &locVars, 1, PETSC_INT);
228: PetscBinaryRead(fd, &ghostVars, 1, PETSC_INT);
229: MPI_Allreduce(&locVars, &size, 1, MPI_INT, MPI_SUM, comm);
230: if (size != vars) SETERRQ(PETSC_ERR_ARG_CORRUPT, "Invalid row partition");
231: VecCreate(comm, &v);
232: VecSetSizes(v, locVars, vars);
233: if (locVars + ghostVars > 0) {
234: PetscMalloc((locVars + ghostVars) * sizeof(PetscScalar), &array);
235: PetscBinaryRead(fd, array, locVars + ghostVars, PETSC_SCALAR);
236: VecCreate_MPI_Private(v, ghostVars, array, PETSC_NULL);
237: ((Vec_MPI *) v->data)->array_allocated = array;
238: } else {
239: VecCreate_MPI_Private(v, ghostVars, PETSC_NULL, PETSC_NULL);
240: }
242: VecAssemblyBegin(v);
243: VecAssemblyEnd(v);
244: *vec = v;
245: }
247: return(0);
248: }
249: EXTERN_C_END
251: /*@C
252: VecCreateMPIWithArray - Creates a parallel, array-style vector,
253: where the user provides the array space to store the vector values.
255: Collective on MPI_Comm
257: Input Parameters:
258: + comm - the MPI communicator to use
259: . n - local vector length, cannot be PETSC_DECIDE
260: . N - global vector length (or PETSC_DECIDE to have calculated)
261: - array - the user provided array to store the vector values
263: Output Parameter:
264: . vv - the vector
265:
266: Notes:
267: Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
268: same type as an existing vector.
270: If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
271: at a later stage to SET the array for storing the vector values.
273: PETSc does NOT free the array when the vector is destroyed via VecDestroy().
274: The user should not free the array until the vector is destroyed.
276: Level: intermediate
278: Concepts: vectors^creating with array
280: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
281: VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
283: @*/
284: int VecCreateMPIWithArray(MPI_Comm comm,int n,int N,const PetscScalar array[],Vec *vv)
285: {
289: if (n == PETSC_DECIDE) {
290: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
291: }
292: PetscSplitOwnership(comm,&n,&N);
293: VecCreate(comm,vv);
294: VecSetSizes(*vv,n,N);
295: VecCreate_MPI_Private(*vv,0,array,PETSC_NULL);
296: return(0);
297: }
299: /*@C
300: VecGhostGetLocalForm - Obtains the local ghosted representation of
301: a parallel vector created with VecCreateGhost().
303: Not Collective
305: Input Parameter:
306: . g - the global vector. Vector must be have been obtained with either
307: VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
309: Output Parameter:
310: . l - the local (ghosted) representation
312: Notes:
313: This routine does not actually update the ghost values, but rather it
314: returns a sequential vector that includes the locations for the ghost
315: values and their current values. The returned vector and the original
316: vector passed in share the same array that contains the actual vector data.
318: One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
319: finished using the object.
321: Level: advanced
323: Concepts: vectors^ghost point access
325: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
327: @*/
328: int VecGhostGetLocalForm(Vec g,Vec *l)
329: {
330: int ierr;
331: PetscTruth isseq,ismpi;
337: PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
338: PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
339: if (ismpi) {
340: Vec_MPI *v = (Vec_MPI*)g->data;
341: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
342: *l = v->localrep;
343: } else if (isseq) {
344: *l = g;
345: } else {
346: SETERRQ1(1,"Vector type %s does not have local representation",g->type_name);
347: }
348: PetscObjectReference((PetscObject)*l);
349: return(0);
350: }
352: /*@C
353: VecGhostRestoreLocalForm - Restores the local ghosted representation of
354: a parallel vector obtained with VecGhostGetLocalForm().
356: Not Collective
358: Input Parameter:
359: + g - the global vector
360: - l - the local (ghosted) representation
362: Notes:
363: This routine does not actually update the ghost values, but rather it
364: returns a sequential vector that includes the locations for the ghost values
365: and their current values.
367: Level: advanced
369: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
370: @*/
371: int VecGhostRestoreLocalForm(Vec g,Vec *l)
372: {
374: PetscObjectDereference((PetscObject)*l);
375: return(0);
376: }
378: /*@
379: VecGhostUpdateBegin - Begins the vector scatter to update the vector from
380: local representation to global or global representation to local.
382: Collective on Vec
384: Input Parameters:
385: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
386: . insertmode - one of ADD_VALUES or INSERT_VALUES
387: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
389: Notes:
390: Use the following to update the ghost regions with correct values from the owning process
391: .vb
392: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
393: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
394: .ve
396: Use the following to accumulate the ghost region values onto the owning processors
397: .vb
398: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
399: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
400: .ve
402: To accumulate the ghost region values onto the owning processors and then update
403: the ghost regions correctly, call the later followed by the former, i.e.,
404: .vb
405: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
406: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
407: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
408: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
409: .ve
411: Level: advanced
413: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
414: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
416: @*/
417: int VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
418: {
419: Vec_MPI *v;
420: int ierr;
425: v = (Vec_MPI*)g->data;
426: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
427: if (!v->localupdate) return(0);
428:
429: if (scattermode == SCATTER_REVERSE) {
430: VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);
431: } else {
432: VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);
433: }
434: return(0);
435: }
437: /*@
438: VecGhostUpdateEnd - End the vector scatter to update the vector from
439: local representation to global or global representation to local.
441: Collective on Vec
443: Input Parameters:
444: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
445: . insertmode - one of ADD_VALUES or INSERT_VALUES
446: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
448: Notes:
450: Use the following to update the ghost regions with correct values from the owning process
451: .vb
452: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
453: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
454: .ve
456: Use the following to accumulate the ghost region values onto the owning processors
457: .vb
458: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
459: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
460: .ve
462: To accumulate the ghost region values onto the owning processors and then update
463: the ghost regions correctly, call the later followed by the former, i.e.,
464: .vb
465: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
466: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
467: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
468: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
469: .ve
471: Level: advanced
473: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
474: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
476: @*/
477: int VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
478: {
479: Vec_MPI *v;
480: int ierr;
485: v = (Vec_MPI*)g->data;
486: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
487: if (!v->localupdate) return(0);
489: if (scattermode == SCATTER_REVERSE) {
490: VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);
491: } else {
492: VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);
493: }
494: return(0);
495: }
497: /*@C
498: VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
499: the caller allocates the array space.
501: Collective on MPI_Comm
503: Input Parameters:
504: + comm - the MPI communicator to use
505: . n - local vector length
506: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
507: . nghost - number of local ghost points
508: . ghosts - global indices of ghost points (or PETSC_NULL if not needed)
509: - array - the space to store the vector values (as long as n + nghost)
511: Output Parameter:
512: . vv - the global vector representation (without ghost points as part of vector)
513:
514: Notes:
515: Use VecGhostGetLocalForm() to access the local, ghosted representation
516: of the vector.
518: Level: advanced
520: Concepts: vectors^creating with array
521: Concepts: vectors^ghosted
523: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
524: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
525: VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
527: @*/
528: int VecCreateGhostWithArray(MPI_Comm comm,int n,int N,int nghost,const int ghosts[],const PetscScalar array[],Vec *vv)
529: {
530: int ierr;
531: Vec_MPI *w;
532: PetscScalar *larray;
533: IS from,to;
536: *vv = 0;
538: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
539: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
540: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
541: PetscSplitOwnership(comm,&n,&N);
542: /* Create global representation */
543: VecCreate(comm,vv);
544: VecSetSizes(*vv,n,N);
545: VecCreate_MPI_Private(*vv,nghost,array,PETSC_NULL);
546: w = (Vec_MPI *)(*vv)->data;
547: /* Create local representation */
548: VecGetArray(*vv,&larray);
549: VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
550: PetscLogObjectParent(*vv,w->localrep);
551: VecRestoreArray(*vv,&larray);
553: /*
554: Create scatter context for scattering (updating) ghost values
555: */
556: ISCreateGeneral(comm,nghost,ghosts,&from);
557: ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
558: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
559: PetscLogObjectParent(*vv,w->localupdate);
560: ISDestroy(to);
561: ISDestroy(from);
563: return(0);
564: }
566: /*@C
567: VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
569: Collective on MPI_Comm
571: Input Parameters:
572: + comm - the MPI communicator to use
573: . n - local vector length
574: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
575: . nghost - number of local ghost points
576: - ghosts - global indices of ghost points
578: Output Parameter:
579: . vv - the global vector representation (without ghost points as part of vector)
580:
581: Notes:
582: Use VecGhostGetLocalForm() to access the local, ghosted representation
583: of the vector.
585: Level: advanced
587: Concepts: vectors^ghosted
589: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
590: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
591: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
592: VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
594: @*/
595: int VecCreateGhost(MPI_Comm comm,int n,int N,int nghost,const int ghosts[],Vec *vv)
596: {
600: VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
601: return(0);
602: }
604: int VecDuplicate_MPI(Vec win,Vec *v)
605: {
606: int ierr;
607: Vec_MPI *vw,*w = (Vec_MPI *)win->data;
608: PetscScalar *array;
609: #if defined(PETSC_HAVE_AMS)
610: int (*f)(AMS_Memory,char *,Vec);
611: #endif
614: VecCreate(win->comm,v);
615: VecSetSizes(*v,win->n,win->N);
616: VecCreate_MPI_Private(*v,w->nghost,0,win->map);
617: vw = (Vec_MPI *)(*v)->data;
618: PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));
620: /* save local representation of the parallel vector (and scatter) if it exists */
621: if (w->localrep) {
622: VecGetArray(*v,&array);
623: VecCreateSeqWithArray(PETSC_COMM_SELF,win->n+w->nghost,array,&vw->localrep);
624: PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
625: VecRestoreArray(*v,&array);
626: PetscLogObjectParent(*v,vw->localrep);
627: vw->localupdate = w->localupdate;
628: if (vw->localupdate) {
629: PetscObjectReference((PetscObject)vw->localupdate);
630: }
631: }
633: /* New vector should inherit stashing property of parent */
634: (*v)->stash.donotstash = win->stash.donotstash;
635:
636: PetscOListDuplicate(win->olist,&(*v)->olist);
637: PetscFListDuplicate(win->qlist,&(*v)->qlist);
638: if (win->mapping) {
639: (*v)->mapping = win->mapping;
640: PetscObjectReference((PetscObject)win->mapping);
641: }
642: if (win->bmapping) {
643: (*v)->bmapping = win->bmapping;
644: PetscObjectReference((PetscObject)win->bmapping);
645: }
646: (*v)->bs = win->bs;
647: (*v)->bstash.bs = win->bstash.bs;
649: #if defined(PETSC_HAVE_AMS)
650: /*
651: If the vector knows its "layout" let it set it, otherwise it defaults
652: to correct 1d distribution
653: */
654: PetscObjectQueryFunction((PetscObject)(*v),"AMSSetFieldBlock_C",(void (**)(void))&f);
655: if (f) {
656: (*f)((AMS_Memory)(*v)->amem,"values",*v);
657: }
658: #endif
659: return(0);
660: }
662: /* ------------------------------------------------------------------------------------------*/
663: /*@C
664: VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
665: the caller allocates the array space. Indices in the ghost region are based on blocks.
667: Collective on MPI_Comm
669: Input Parameters:
670: + comm - the MPI communicator to use
671: . bs - block size
672: . n - local vector length
673: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
674: . nghost - number of local ghost blocks
675: . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
676: - array - the space to store the vector values (as long as n + nghost*bs)
678: Output Parameter:
679: . vv - the global vector representation (without ghost points as part of vector)
680:
681: Notes:
682: Use VecGhostGetLocalForm() to access the local, ghosted representation
683: of the vector.
685: n is the local vector size (total local size not the number of blocks) while nghost
686: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
687: portion is bs*nghost
689: Level: advanced
691: Concepts: vectors^creating ghosted
692: Concepts: vectors^creating with array
694: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
695: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
696: VecCreateGhostWithArray(), VecCreateGhostBlocked()
698: @*/
699: int VecCreateGhostBlockWithArray(MPI_Comm comm,int bs,int n,int N,int nghost,const int ghosts[],const PetscScalar array[],Vec *vv)
700: {
701: int ierr;
702: Vec_MPI *w;
703: PetscScalar *larray;
704: IS from,to;
707: *vv = 0;
709: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
710: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
711: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
712: PetscSplitOwnership(comm,&n,&N);
713: /* Create global representation */
714: VecCreate(comm,vv);
715: VecSetSizes(*vv,n,N);
716: VecCreate_MPI_Private(*vv,nghost*bs,array,PETSC_NULL);
717: VecSetBlockSize(*vv,bs);
718: w = (Vec_MPI *)(*vv)->data;
719: /* Create local representation */
720: VecGetArray(*vv,&larray);
721: VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
722: VecSetBlockSize(w->localrep,bs);
723: PetscLogObjectParent(*vv,w->localrep);
724: VecRestoreArray(*vv,&larray);
726: /*
727: Create scatter context for scattering (updating) ghost values
728: */
729: ISCreateBlock(comm,bs,nghost,ghosts,&from);
730: ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
731: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
732: PetscLogObjectParent(*vv,w->localupdate);
733: ISDestroy(to);
734: ISDestroy(from);
736: return(0);
737: }
739: /*@C
740: VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
741: The indicing of the ghost points is done with blocks.
743: Collective on MPI_Comm
745: Input Parameters:
746: + comm - the MPI communicator to use
747: . bs - the block size
748: . n - local vector length
749: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
750: . nghost - number of local ghost blocks
751: - ghosts - global indices of ghost blocks
753: Output Parameter:
754: . vv - the global vector representation (without ghost points as part of vector)
755:
756: Notes:
757: Use VecGhostGetLocalForm() to access the local, ghosted representation
758: of the vector.
760: n is the local vector size (total local size not the number of blocks) while nghost
761: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
762: portion is bs*nghost
764: Level: advanced
766: Concepts: vectors^ghosted
768: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
769: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
770: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
772: @*/
773: int VecCreateGhostBlock(MPI_Comm comm,int bs,int n,int N,int nghost,const int ghosts[],Vec *vv)
774: {
778: VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
779: return(0);
780: }
782: /*
783: These introduce a ghosted vector where the ghosting is determined by the call to
784: VecSetLocalToGlobalMapping()
785: */
787: int VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
788: {
789: int ierr;
790: Vec_MPI *v = (Vec_MPI *)vv->data;
793: v->nghost = map->n - vv->n;
795: /* we need to make longer the array space that was allocated when the vector was created */
796: ierr = PetscFree(v->array_allocated);
797: ierr = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
798: v->array = v->array_allocated;
799:
800: /* Create local representation */
801: VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
802: PetscLogObjectParent(vv,v->localrep);
804: return(0);
805: }
808: int VecSetValuesLocal_FETI(Vec vv,int n,const int *ix,const PetscScalar *values,InsertMode mode)
809: {
810: int ierr;
811: Vec_MPI *v = (Vec_MPI *)vv->data;
814: VecSetValues(v->localrep,n,ix,values,mode);
815: return(0);
816: }
818: EXTERN_C_BEGIN
819: int VecCreate_FETI(Vec vv)
820: {
824: VecSetType(vv,VECMPI);
825:
826: /* overwrite the functions to handle setting values locally */
827: vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
828: vv->ops->setvalueslocal = VecSetValuesLocal_FETI;
829: vv->ops->assemblybegin = 0;
830: vv->ops->assemblyend = 0;
831: vv->ops->setvaluesblocked = 0;
832: vv->ops->setvaluesblocked = 0;
834: return(0);
835: }
836: EXTERN_C_END