Actual source code: da1.c
1: /*$Id: da1.c,v 1.129 2001/09/07 20:12:17 bsmith Exp $*/
3: /*
4: Code for manipulating distributed regular 1d arrays in parallel.
5: This file was created by Peter Mell 6/30/95
6: */
8: #include src/dm/da/daimpl.h
10: #if defined (PETSC_HAVE_AMS)
11: EXTERN_C_BEGIN
12: EXTERN int AMSSetFieldBlock_DA(AMS_Memory,char *,Vec);
13: EXTERN_C_END
14: #endif
16: int DAView_1d(DA da,PetscViewer viewer)
17: {
18: int rank,ierr;
19: PetscTruth isascii,isdraw,isbinary;
22: MPI_Comm_rank(da->comm,&rank);
24: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
25: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
26: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
27: if (isascii) {
28: PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %d m %d w %d s %dn",rank,da->M,
29: da->m,da->w,da->s);
30: PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %d %dn",da->xs,da->xe);
31: PetscViewerFlush(viewer);
32: } else if (isdraw) {
33: PetscDraw draw;
34: double ymin = -1,ymax = 1,xmin = -1,xmax = da->M,x;
35: int base;
36: char node[10];
37: PetscTruth isnull;
39: PetscViewerDrawGetDraw(viewer,0,&draw);
40: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
42: PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
43: PetscDrawSynchronizedClear(draw);
45: /* first processor draws all node lines */
46: if (!rank) {
47: int xmin_tmp;
48: ymin = 0.0; ymax = 0.3;
49:
50: /* ADIC doesn't like doubles in a for loop */
51: for (xmin_tmp =0; xmin_tmp < (int)da->M; xmin_tmp++) {
52: PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);
53: }
55: xmin = 0.0; xmax = da->M - 1;
56: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
57: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);
58: }
60: PetscDrawSynchronizedFlush(draw);
61: PetscDrawPause(draw);
63: /* draw my box */
64: ymin = 0; ymax = 0.3; xmin = da->xs / da->w; xmax = (da->xe / da->w) - 1;
65: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
66: PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
67: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
68: PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);
70: /* Put in index numbers */
71: base = da->base / da->w;
72: for (x=xmin; x<=xmax; x++) {
73: sprintf(node,"%d",base++);
74: PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);
75: }
77: PetscDrawSynchronizedFlush(draw);
78: PetscDrawPause(draw);
79: } else if (isbinary) {
80: DAView_Binary(da,viewer);
81: } else {
82: SETERRQ1(1,"Viewer type %s not supported for DA 1d",((PetscObject)viewer)->type_name);
83: }
84: return(0);
85: }
87: EXTERN int DAPublish_Petsc(PetscObject);
89: /*@C
90: DACreate1d - Creates an object that will manage the communication of one-dimensional
91: regular array data that is distributed across some processors.
93: Collective on MPI_Comm
95: Input Parameters:
96: + comm - MPI communicator
97: . wrap - type of periodicity should the array have, if any. Use
98: either DA_NONPERIODIC or DA_XPERIODIC
99: . M - global dimension of the array
100: . dof - number of degrees of freedom per node
101: . lc - array containing number of nodes in the X direction on each processor,
102: or PETSC_NULL. If non-null, must be of length as m.
103: - s - stencil width
105: Output Parameter:
106: . inra - the resulting distributed array object
108: Options Database Key:
109: + -da_view - Calls DAView() at the conclusion of DACreate1d()
110: - -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
112: Level: beginner
114: Notes:
115: If you are having problems with running out of memory than run with the option -da_noao
117: The array data itself is NOT stored in the DA, it is stored in Vec objects;
118: The appropriate vector objects can be obtained with calls to DACreateGlobalVector()
119: and DACreateLocalVector() and calls to VecDuplicate() if more are needed.
122: .keywords: distributed array, create, one-dimensional
124: .seealso: DADestroy(), DAView(), DACreate2d(), DACreate3d(), DAGlobalToLocalBegin(),
125: DAGlobalToLocalEnd(), DALocalToGlobal(), DALocalToLocalBegin(), DALocalToLocalEnd(),
126: DAGetInfo(), DACreateGlobalVector(), DACreateLocalVector(), DACreateNaturalVector(), DALoad(), DAView()
128: @*/
129: int DACreate1d(MPI_Comm comm,DAPeriodicType wrap,int M,int dof,int s,int *lc,DA *inra)
130: {
131: int rank,size,xs,xe,x,Xs,Xe,ierr,start,end,m;
132: int i,*idx,nn,left,refine_x = 2,tM = M;
133: PetscTruth flg1,flg2;
134: DA da;
135: Vec local,global;
136: VecScatter ltog,gtol;
137: IS to,from;
141: *inra = 0;
142: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
143: DMInitializePackage(PETSC_NULL);
144: #endif
146: if (dof < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %d",dof);
147: if (s < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %d",s);
149: PetscOptionsBegin(comm,PETSC_NULL,"1d DA Options","DA");
150: if (M < 0) {
151: tM = -M;
152: PetscOptionsInt("-da_grid_x","Number of grid points in x direction","DACreate1d",tM,&tM,PETSC_NULL);
153: }
154: PetscOptionsInt("-da_refine_x","Refinement ratio in x direction","DACreate1d",refine_x,&refine_x,PETSC_NULL);
155: PetscOptionsEnd();
156: M = tM;
158: PetscHeaderCreate(da,_p_DA,struct _DAOps,DA_COOKIE,0,"DA",comm,DADestroy,DAView);
159: PetscLogObjectCreate(da);
160: da->bops->publish = DAPublish_Petsc;
161: da->ops->createglobalvector = DACreateGlobalVector;
162: da->ops->getinterpolation = DAGetInterpolation;
163: da->ops->getcoloring = DAGetColoring;
164: da->ops->getmatrix = DAGetMatrix;
165: da->ops->refine = DARefine;
166: PetscLogObjectMemory(da,sizeof(struct _p_DA));
167: da->dim = 1;
168: da->interptype = DA_Q1;
169: da->refine_x = refine_x;
170: PetscMalloc(dof*sizeof(char*),&da->fieldname);
171: PetscMemzero(da->fieldname,dof*sizeof(char*));
172: MPI_Comm_size(comm,&size);
173: MPI_Comm_rank(comm,&rank);
175: m = size;
177: if (M < m) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"More processors than data points! %d %d",m,M);
178: if ((M-1) < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %d %d",M-1,s);
180: /*
181: Determine locally owned region
182: xs is the first local node number, x is the number of local nodes
183: */
184: if (!lc) {
185: PetscOptionsHasName(PETSC_NULL,"-da_partition_blockcomm",&flg1);
186: PetscOptionsHasName(PETSC_NULL,"-da_partition_nodes_at_end",&flg2);
187: if (flg1) { /* Block Comm type Distribution */
188: xs = rank*M/m;
189: x = (rank + 1)*M/m - xs;
190: } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
191: x = (M + rank)/m;
192: if (M/m == x) { xs = rank*x; }
193: else { xs = rank*(x-1) + (M+rank)%(x*m); }
194: } else { /* The odd nodes are evenly distributed across the first k nodes */
195: /* Regular PETSc Distribution */
196: x = M/m + ((M % m) > rank);
197: if (rank >= (M % m)) {xs = (rank * (int)(M/m) + M % m);}
198: else {xs = rank * (int)(M/m) + rank;}
199: }
200: } else {
201: x = lc[rank];
202: xs = 0;
203: for (i=0; i<rank; i++) {
204: xs += lc[i];
205: }
206: /* verify that data user provided is consistent */
207: left = xs;
208: for (i=rank; i<size; i++) {
209: left += lc[i];
210: }
211: if (left != M) {
212: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of lc across processors not equal to M %d %d",left,M);
213: }
214: }
216: /* From now on x,s,xs,xe,Xs,Xe are the exact location in the array */
217: x *= dof;
218: s *= dof; /* NOTE: here change s to be absolute stencil distance */
219: xs *= dof;
220: xe = xs + x;
222: /* determine ghost region */
223: if (wrap == DA_XPERIODIC) {
224: Xs = xs - s;
225: Xe = xe + s;
226: } else {
227: if ((xs-s) >= 0) Xs = xs-s; else Xs = 0;
228: if ((xe+s) <= M*dof) Xe = xe+s; else Xe = M*dof;
229: }
231: /* allocate the base parallel and sequential vectors */
232: VecCreateMPI(comm,x,PETSC_DECIDE,&global);
233: VecSetBlockSize(global,dof);
234: VecCreateSeq(PETSC_COMM_SELF,(Xe-Xs),&local);
235: VecSetBlockSize(local,dof);
236:
237: /* Create Local to Global Vector Scatter Context */
238: /* local to global inserts non-ghost point region into global */
239: VecGetOwnershipRange(global,&start,&end);
240: ISCreateStride(comm,x,start,1,&to);
241: ISCreateStride(comm,x,xs-Xs,1,&from);
242: VecScatterCreate(local,from,global,to,<og);
243: PetscLogObjectParent(da,to);
244: PetscLogObjectParent(da,from);
245: PetscLogObjectParent(da,ltog);
246: ISDestroy(from);
247: ISDestroy(to);
249: /* Create Global to Local Vector Scatter Context */
250: /* global to local must retrieve ghost points */
251: ISCreateStride(comm,(Xe-Xs),0,1,&to);
252:
253: PetscMalloc((x+2*s)*sizeof(int),&idx);
254: PetscLogObjectMemory(da,(x+2*s)*sizeof(int));
256: nn = 0;
257: if (wrap == DA_XPERIODIC) { /* Handle all cases with wrap first */
259: for (i=0; i<s; i++) { /* Left ghost points */
260: if ((xs-s+i)>=0) { idx[nn++] = xs-s+i;}
261: else { idx[nn++] = M*dof+(xs-s+i);}
262: }
264: for (i=0; i<x; i++) { idx [nn++] = xs + i;} /* Non-ghost points */
265:
266: for (i=0; i<s; i++) { /* Right ghost points */
267: if ((xe+i)<M*dof) { idx [nn++] = xe+i; }
268: else { idx [nn++] = (xe+i) - M*dof;}
269: }
270: } else { /* Now do all cases with no wrapping */
272: if (s <= xs) {for (i=0; i<s; i++) {idx[nn++] = xs - s + i;}}
273: else {for (i=0; i<xs; i++) {idx[nn++] = i;}}
275: for (i=0; i<x; i++) { idx [nn++] = xs + i;}
276:
277: if ((xe+s)<=M*dof) {for (i=0; i<s; i++) {idx[nn++]=xe+i;}}
278: else {for (i=xe; i<(M*dof); i++) {idx[nn++]=i; }}
279: }
281: ISCreateGeneral(comm,nn,idx,&from);
282: VecScatterCreate(global,from,local,to,>ol);
283: PetscLogObjectParent(da,to);
284: PetscLogObjectParent(da,from);
285: PetscLogObjectParent(da,gtol);
286: ISDestroy(to);
287: ISDestroy(from);
289: da->M = M; da->N = 1; da->m = m; da->n = 1;
290: da->xs = xs; da->xe = xe; da->ys = 0; da->ye = 1; da->zs = 0; da->ze = 1;
291: da->Xs = Xs; da->Xe = Xe; da->Ys = 0; da->Ye = 1; da->Zs = 0; da->Ze = 1;
292: da->P = 1; da->p = 1; da->w = dof; da->s = s/dof;
294: PetscLogObjectParent(da,global);
295: PetscLogObjectParent(da,local);
297: da->global = global;
298: da->local = local;
299: da->gtol = gtol;
300: da->ltog = ltog;
301: da->idx = idx;
302: da->Nl = nn;
303: da->base = xs;
304: da->ops->view = DAView_1d;
305: da->wrap = wrap;
306: da->stencil_type = DA_STENCIL_STAR;
308: /*
309: Set the local to global ordering in the global vector, this allows use
310: of VecSetValuesLocal().
311: */
312: ISLocalToGlobalMappingCreateNC(comm,nn,idx,&da->ltogmap);
313: VecSetLocalToGlobalMapping(da->global,da->ltogmap);
314: ISLocalToGlobalMappingBlock(da->ltogmap,da->w,&da->ltogmapb);
315: VecSetLocalToGlobalMappingBlock(da->global,da->ltogmapb);
316: PetscLogObjectParent(da,da->ltogmap);
318: da->ltol = PETSC_NULL;
319: da->ao = PETSC_NULL;
321: PetscOptionsHasName(PETSC_NULL,"-da_view",&flg1);
322: if (flg1) {DAView(da,PETSC_VIEWER_STDOUT_(da->comm));}
323: PetscOptionsHasName(PETSC_NULL,"-da_view_draw",&flg1);
324: if (flg1) {DAView(da,PETSC_VIEWER_DRAW_(da->comm));}
325: PetscOptionsHasName(PETSC_NULL,"-help",&flg1);
326: if (flg1) {DAPrintHelp(da);}
327: *inra = da;
328: PetscPublishAll(da);
329: #if defined(PETSC_HAVE_AMS)
330: PetscObjectComposeFunctionDynamic((PetscObject)global,"AMSSetFieldBlock_C",
331: "AMSSetFieldBlock_DA",AMSSetFieldBlock_DA);
332: PetscObjectComposeFunctionDynamic((PetscObject)local,"AMSSetFieldBlock_C",
333: "AMSSetFieldBlock_DA",AMSSetFieldBlock_DA);
334: if (((PetscObject)global)->amem > -1) {
335: AMSSetFieldBlock_DA(((PetscObject)global)->amem,"values",global);
336: }
337: #endif
338: VecSetOperation(global,VECOP_VIEW,(void(*)(void))VecView_MPI_DA);
339: VecSetOperation(global,VECOP_LOADINTOVECTOR,(void(*)(void))VecLoadIntoVector_Binary_DA);
340: return(0);
341: }