Actual source code: gridBC.c
1: #ifdef PETSC_RCS_HEADER
2: static char vcid[] = "$Id: gridBC.c,v 1.3 2000/01/30 18:27:13 huangp Exp $";
3: #endif
5: #include "src/grid/gridimpl.h" /*I "grid.h" I*//*I "gvec.h" I*/
6: #include "src/vec/impls/mpi/pvecimpl.h" /* For GridCalcBCValues(), soon will not be needed */
8: /*------------------------------------------------ Standard Functions -----------------------------------------------*/
9: /*@
10: GridDuplicateBC - Duplicates the boundary conditions of one grid in another.
12: Collective on Grid
14: Input Parameter:
15: . grid - The grid
17: Output Parameter:
18: . newGrid - The altered grid
20: Level: intermediate
22: .keywords: grid, duplicate, BC
23: .seealso: GridDuplicate()
24: @*/
25: int GridDuplicateBC(Grid grid, Grid newGrid)
26: {
27: int bc;
33: for(bc = 0; bc < grid->numBC; bc++) {
34: GridAddBC(newGrid, grid->bc[bc].boundary, grid->bc[bc].field, grid->bc[bc].func, grid->bc[bc].reduce);
35: }
36: for(bc = 0; bc < grid->numPointBC; bc++) {
37: GridAddPointBC(newGrid, grid->bc[bc].point[0], grid->bc[bc].point[1], grid->bc[bc].point[2], grid->bc[bc].field,
38: grid->bc[bc].func, grid->bc[bc].reduce);
39: }
40: return(0);
41: }
43: /*@
44: GridFinalizeBC - Destroys all structures associated with explicit system
45: reduction using boundary conditions. This should be called after all
46: calculation is finished, prior to GridDestroy().
48: Collective on Grid
50: Input Parameter:
51: . grid - The grid
53: Level: beginner
55: .keywords: grid, boundary conditions
56: .seealso: GridSetBC(), GridAddBC()
57: @*/
58: int GridFinalizeBC(Grid grid)
59: {
64: if (grid->bdReduceVec) {
65: GVecDestroy(grid->bdReduceVec);
66: }
67: if (grid->bdReduceVecOld) {
68: GVecDestroy(grid->bdReduceVecOld);
69: }
70: if (grid->bdReduceVecDiff) {
71: GVecDestroy(grid->bdReduceVecDiff);
72: }
73: if (grid->bdReduceMat) {
74: GMatDestroy(grid->bdReduceMat);
75: }
76: if (grid->reduceVec) {
77: GVecDestroy(grid->reduceVec);
78: }
80: return(0);
81: }
83: /*----------------------------------------------- Database Functions ------------------------------------------------*/
84: /*@C GridSetBC
85: This function sets the boundary condition to use for the problem.
87: Collective on Grid
89: Input Parameters:
90: + grid - The grid
91: . bd - The marker for the boundary along which conditions are applied
92: . field - The field to which the boundary condition is applied
93: . f - The function which defines the boundary condition
94: - reduce - The flag for explicit reduction of the system
96: Level: intermediate
98: .keywords active field
99: .seealso GridAddBC, GridAddMatOperator, GridAddRhsOperator, GridSetRhsFunction
100: @*/
101: int GridSetBC(Grid grid, int bd, int field, PointFunction f, PetscTruth reduce)
102: {
107: GridValidField(grid, field);
108: grid->numBC = 0;
109: GridAddBC(grid, bd, field, f, reduce);
110: return(0);
111: }
113: /*@C GridAddBC
114: This function adds a boundary condition to use for the problem.
116: Collective on Grid
118: Input Parameters:
119: + grid - The grid
120: . bd - The marker for the boundary along which conditions are applied
121: . field - The field to which the boundary condition is applied
122: . f - The function which defines the boundary condition
123: - reduce - The flag for explicit reduction of the system
125: Level: intermediate
127: .keywords active field
128: .seealso GridSetBC, GridAddMatOperator, GridAddRhsOperator, GridSetRhsFunction
129: @*/
130: int GridAddBC(Grid grid, int bd, int field, PointFunction f, PetscTruth reduce)
131: {
132: GridBC *tempBC;
133: int bdIndex;
134: int ierr;
138: GridValidField(grid, field);
139: while (grid->numBC >= grid->maxBC) {
140: PetscMalloc(grid->maxBC*2 * sizeof(GridBC), &tempBC);
141: PetscLogObjectMemory(grid, grid->maxBC * sizeof(GridBC));
142: PetscMemcpy(tempBC, grid->bc, grid->maxBC * sizeof(GridBC));
143: PetscFree(grid->bc);
144: grid->bc = tempBC;
145: grid->maxBC *= 2;
146: }
147: /* Make sure boundary is legal */
148: MeshGetBoundaryIndex(grid->mesh, bd, &bdIndex);
149: grid->bc[grid->numBC].boundary = bd;
150: grid->bc[grid->numBC].field = field;
151: grid->bc[grid->numBC].func = f;
152: grid->bc[grid->numBC].reduce = reduce;
153: grid->bc[grid->numBC].node = -1;
154: grid->numBC++;
155: /* Check whether to reduce system */
156: if (reduce == PETSC_TRUE) grid->reduceSystem = PETSC_TRUE;
157: return(0);
158: }
160: /*@C GridSetPointBC
161: This function sets the boundary condition to use for the problem at a point.
163: Collective on Grid
165: Input Parameters:
166: + grid - The grid
167: . x,y,z - The point at which conditions are applied
168: . field - The field to which the boundary condition is applied
169: . f - The function which defines the boundary condition
170: - reduce - The flag for explicit reduction of the system
172: Level: intermediate
174: .keywords active field
175: .seealso GridAddBC, GridAddMatOperator, GridAddRhsOperator, GridSetRhsFunction
176: @*/
177: int GridSetPointBC(Grid grid, double x, double y, double z, int field, PointFunction f, PetscTruth reduce)
178: {
183: grid->numPointBC = 0;
184: GridAddPointBC(grid, x, y, z, field, f, reduce);
185: return(0);
186: }
188: /*@C GridAddPointBC
189: This function adds a boundary condition to use for the problem at a point.
191: Collective on Grid
193: Input Parameters:
194: + grid - The grid
195: . x,y,z - The point at which conditions are applied
196: . field - The field to which the boundary condition is applied
197: . f - The function which defines the boundary condition
198: - reduce - The flag for explicit reduction of the system
200: Level: intermediate
202: .keywords active field
203: .seealso GridSetBC, GridAddMatOperator, GridAddRhsOperator, GridSetRhsFunction
204: @*/
205: int GridAddPointBC(Grid grid, double x, double y, double z, int field, PointFunction f, PetscTruth reduce)
206: {
207: GridBC *tempBC;
208: int ierr;
212: GridValidField(grid, field);
213: while (grid->numPointBC >= grid->maxPointBC) {
214: PetscMalloc(grid->maxPointBC*2 * sizeof(GridBC), &tempBC);
215: PetscLogObjectMemory(grid, grid->maxPointBC * sizeof(GridBC));
216: PetscMemcpy(tempBC, grid->pointBC, grid->maxPointBC * sizeof(GridBC));
217: PetscFree(grid->pointBC);
218: grid->pointBC = tempBC;
219: grid->maxPointBC *= 2;
220: }
221: if (GridGetNearestBdNode(grid, field, x, y, z, &grid->pointBC[grid->numPointBC].node)) {
222: SETERRQ3(PETSC_ERR_ARG_WRONG, "Invalid point {%g,%g,%g} specified for boundary condition", x, y, z);
223: }
224: grid->pointBC[grid->numPointBC].point[0] = x;
225: grid->pointBC[grid->numPointBC].point[1] = y;
226: grid->pointBC[grid->numPointBC].point[2] = z;
227: grid->pointBC[grid->numPointBC].field = field;
228: grid->pointBC[grid->numPointBC].func = f;
229: grid->pointBC[grid->numPointBC].reduce = reduce;
230: grid->pointBC[grid->numPointBC].boundary = -1;
231: grid->numPointBC++;
232: /* Check whether to reduce system */
233: if (reduce == PETSC_TRUE) grid->reduceSystem = PETSC_TRUE;
234: return(0);
235: }
237: /*@
238: GridSetBCMultiplier - This sets the scalar multiplier used for reduction components on the rhs.
240: Collective on Grid
242: Input Parameters:
243: + grid - The grid
244: - alpha - The scalar multiplier
246: Note:
247: For example, this should be -1 in a nonlinear iteration. The default is 1.
249: Level: developer
251: .keywords: grid, reduction, boundary conditions
252: .seealso: GridGetBCMultiplier(), GridSetBC(), GridAddBC()
253: @*/
254: int GridSetBCMultiplier(Grid grid, PetscScalar alpha)
255: {
258: grid->reduceAlpha = alpha;
259: return(0);
260: }
262: /*@
263: GridGetBCMultiplier - This gets the scalar multiplier used for reduction components on the rhs.
265: Not collective
267: Input Parameter:
268: . grid - The grid
270: Output Parameter:
271: . alpha - The scalar multiplier
273: Level: developer
275: .keywords: grid, reduction, boundary conditions
276: .seealso: GridSetBCMultiplier(), GridSetBC(), GridAddBC()
277: @*/
278: int GridGetBCMultiplier(Grid grid, PetscScalar *alpha)
279: {
283: *alpha = grid->reduceAlpha;
284: return(0);
285: }
287: /*@
288: GridSetBCContext - This sets the optional user context passed to all
289: routines which assemble boundary reduction information. Must be called
290: before GridSetUp().
292: Collective on Grid
294: Input Parameters:
295: + grid - The grid
296: - ctx - The context
298: Level: intermediate
300: .keywords: grid, reduction, boundary conditions
301: .seealso: GridGetBCContext(), GridSetBC(), GridAddBC()
302: @*/
303: int GridSetBCContext(Grid grid, void *ctx)
304: {
307: grid->reduceContext = ctx;
308: return(0);
309: }
311: /*@
312: GridGetBCContext - This gets the optional user context passed to all
313: routines which assemble boundary reduction information.
315: Not collective
317: Input Parameter:
318: . grid - The grid
320: Output parameter:
321: . ctx - The context
323: Level: intermediate
325: .keywords: grid, reduction, boundary conditions
326: .seealso: GridSetBCContext(), GridSetBC(), GridAddBC()
327: @*/
328: int GridGetBCContext(Grid grid, void **ctx)
329: {
333: *ctx = grid->reduceContext;
334: return(0);
335: }
337: /*@
338: GridSetBCValuesType - This determines which boundary values are used to reduce
339: the system. It is intended to allow time dependent boundary conditions to be
340: used, and also supports the difference of two sets of values.
342: Collective on Grid
344: Input Parameter:
345: . grid - The grid
347: Level: intermediate
349: .keywords: grid, reduction, boundary conditions
350: .seealso: GridSetBC(), GridAddBC()
351: @*/
352: int GridSetBCValuesType(Grid grid, BCValuesType type)
353: {
356: if (grid->reduceSystem == PETSC_FALSE)
357: return(0);
359: switch(type) {
360: case BC_VALUES:
361: grid->bdReduceVecCur = grid->bdReduceVec;
362: break;
363: case BC_VALUES_OLD:
364: if (grid->bdReduceVecOld == PETSC_NULL) {
365: SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Old boundary values not stored");
366: }
367: grid->bdReduceVecCur = grid->bdReduceVecOld;
368: break;
369: case BC_VALUES_DIFF:
370: if (grid->bdReduceVecDiff == PETSC_NULL) {
371: SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Difference of boundary values not stored");
372: }
373: grid->bdReduceVecCur = grid->bdReduceVecDiff;
374: break;
375: default:
376: SETERRQ1(PETSC_ERR_ARG_WRONG, "Invalid type %d for boundary value calculation", type);
377: }
378: return(0);
379: }
381: /*---------------------------------------------- Calculation Functions ----------------------------------------------*/
382: /*@C GridCalcPointBCNodes
383: This function recalculates the nodes used for point boundary conditions.
385: Collective on Grid
387: Input Parameter:
388: . grid - The grid
390: Notes:
391: This function is called by GridReform() after the mesh is recalculated.
393: Level: advanced
395: .keywords grid, point BC, node
396: .seealso GridSetBC, GridAddMatOperator, GridAddRhsOperator, GridSetRhsFunction
397: @*/
398: int GridCalcPointBCNodes(Grid grid)
399: {
400: double x, y, z;
401: int bc;
404: for(bc = 0; bc < grid->numPointBC; bc++) {
405: x = grid->pointBC[bc].point[0];
406: y = grid->pointBC[bc].point[1];
407: z = grid->pointBC[bc].point[2];
408: if (GridGetNearestBdNode(grid, grid->pointBC[bc].field, x, y, z, &grid->pointBC[bc].node)) {
409: SETERRQ3(PETSC_ERR_ARG_WRONG, "Invalid point {%g,%g,%g} specified for boundary condition", x, y, z);
410: }
411: }
412: return(0);
413: }
415: int GridSaveBCValues_Private(Grid grid, VarOrdering reduceOrder, Vec reduceVec) {
416: PetscScalar *array, *arrayOld;
417: int ierr;
420: /* Create storage for reduction of Rhs */
421: if (grid->bdReduceVecOld == PETSC_NULL) {
422: GVecDuplicate(reduceVec, &grid->bdReduceVecOld);
423: } else if (((Vec_MPI *) grid->bdReduceVecOld->data)->nghost != ((Vec_MPI *) grid->bdReduceVec->data)->nghost) {
424: GVecDestroy(grid->bdReduceVecOld);
425: GVecDuplicate(reduceVec, &grid->bdReduceVecOld);
426: }
427: /* VecCopy(grid->bdReduceVec, grid->bdReduceVecOld); */
428: VecGetArray(reduceVec, &array);
429: VecGetArray(grid->bdReduceVecOld, &arrayOld);
430: PetscMemcpy(arrayOld, array, reduceOrder->numOverlapVars * sizeof(PetscScalar));
431: VecRestoreArray(reduceVec, &array);
432: VecRestoreArray(grid->bdReduceVecOld, &arrayOld);
433: return(0);
434: }
436: int GridCalcGridBCValues_Private(Grid grid, VarOrdering reduceOrder, Vec reduceVec, void *ctx) {
437: GridBC *gBC = grid->bc;
438: VarOrdering bcOrder;
439: int bc;
440: int ierr;
443: /* Evaluate the vector of boundary values --
444: If order->localStart[field] is NULL, this means the field is not present in the ordering. This is
445: a better check than seeing if the field is active, since we might want to pass in an order on that
446: field to make boundary values for an inactive field.
447: */
448: for(bc = 0; bc < grid->numBC; bc++) {
449: if (gBC[bc].reduce != PETSC_TRUE) continue;
450: if (reduceOrder->localStart[gBC[bc].field] == PETSC_NULL) continue;
451: VarOrderingCreateSubset(reduceOrder, 1, &gBC[bc].field, PETSC_FALSE, &bcOrder);
452: (*grid->ops->gvecevaluatefunctionboundary)(grid, reduceVec, gBC[bc].boundary, bcOrder, gBC[bc].func, 1.0, ctx);
453: VarOrderingDestroy(bcOrder);
454: #ifdef PETSC_USE_BOPT_g
455: #endif
456: }
457: return(0);
458: }
460: int GridCalcPointBCValues_Private(Grid grid, VarOrdering reduceOrder, Vec reduceVec, void *ctx) {
461: GridBC *pBC = grid->pointBC;
462: int **localStart = reduceOrder->localStart;
463: int *offsets = reduceOrder->offsets;
464: int *localOffsets = reduceOrder->localOffsets;
465: FieldClassMap map;
466: VarOrdering bcOrder;
467: PetscScalar *array;
468: double x, y, z;
469: int numNodes, firstVar, rank;
470: int bc, field, numComp, node, nclass, row;
471: int ierr;
474: MPI_Comm_rank(grid->comm, &rank);
475: VarOrderingGetClassMap(reduceOrder, &map);
476: numNodes = map->numNodes;
477: firstVar = reduceOrder->firstVar[rank];
478: /* Evaluate the vector of boundary values --
479: If order->localStart[field] is NULL, this means the field is not present in the ordering. This is
480: a better check than seeing if the field is active, since we might want to pass in an order on that
481: field to make boundary values for an inactive field.
482: */
483: VecGetArray(reduceVec, &array);
484: for(bc = 0; bc < grid->numPointBC; bc++) {
485: /* BC is not reduced out of the system */
486: if (pBC[bc].reduce != PETSC_TRUE) continue;
487: /* Field is not present in the ordering */
488: if (reduceOrder->localStart[pBC[bc].field] == PETSC_NULL) continue;
490: field = pBC[bc].field;
491: numComp = grid->fields[field].numComp;
492: VarOrderingCreateSubset(reduceOrder, 1, &field, PETSC_FALSE, &bcOrder);
494: /* Point is in another domain */
495: if (pBC[bc].node < 0) continue;
496: node = pBC[bc].node;
497: nclass = map->classes[node];
499: if (node >= numNodes) {
500: row = localOffsets[node-numNodes];
501: } else {
502: row = offsets[node] - firstVar + localStart[field][nclass];
503: }
504: x = pBC[bc].point[0];
505: y = pBC[bc].point[1];
506: z = pBC[bc].point[2];
507: (*pBC[bc].func)(1, numComp, &x, &y, &z, &array[row], ctx);
509: VarOrderingDestroy(bcOrder);
510: #ifdef PETSC_USE_BOPT_g
511: #endif
512: }
513: VecRestoreArray(reduceVec, &array);
514: return(0);
515: }
517: int GridCalcBCValues_Private(Grid grid, VarOrdering reduceOrder, Vec reduceVec, PetscTruth save, void *ctx) {
518: PetscScalar *array;
519: int numGhostVars;
520: int ierr;
523: numGhostVars = reduceOrder->numOverlapVars - reduceOrder->numLocVars;
524: if (((Vec_MPI *) reduceVec->data)->nghost != numGhostVars) {
525: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid reduce vector size %d should be %d", ((Vec_MPI *) reduceVec->data)->nghost, numGhostVars);
526: }
528: if (save == PETSC_TRUE) {
529: GridSaveBCValues_Private(grid, reduceOrder, reduceVec);
530: }
532: /* Initialize vector */
533: /* VecSet(&zero, reduceVec); */
534: VecGetArray(reduceVec, &array);
535: PetscMemzero(array, reduceOrder->numOverlapVars * sizeof(PetscScalar));
536: VecRestoreArray(reduceVec, &array);
538: GridCalcGridBCValues_Private(grid, reduceOrder, reduceVec, ctx);
539: GridCalcPointBCValues_Private(grid, reduceOrder, reduceVec, ctx);
541: return(0);
542: }
544: /*@
545: GridCalcBCValues - This function calculates the boundary values. It
546: is normally called once a timestep when using time dependent boundary
547: conditions.
549: Collective on Grid
551: Input Parameters:
552: + grid - The grid
553: . save - A flag used to store old values, usually for timestepping
554: - ctx - The context
556: Level: advanced
558: .keywords: grid, reduction, boundary conditions
559: .seealso: GridSetBCContext(), GridSetBC(), GridAddBC()
560: @*/
561: int GridCalcBCValues(Grid grid, PetscTruth save, void *ctx)
562: {
568: if (grid->reduceSystem == PETSC_TRUE) {
570: GridCalcBCValues_Private(grid, grid->reduceOrder, grid->bdReduceVec, save, ctx);
571: }
573: return(0);
574: }
576: /*@
577: GridCalcBCValuesDifference - This function calculates the difference of the
578: last two sets of boundary values and puts it in an internal vector. This is
579: is normally used to implement the Rhs time derivative in a GTS.
581: Collective on Grid
583: Input Parameter:
584: . grid - The grid
586: Level: advanced
588: .keywords: grid, reduction, boundary conditions
589: .seealso: GridSetBCContext(), GridSetBC(), GridAddBC()
590: @*/
591: int GridCalcBCValuesDifference(Grid grid)
592: {
593: PetscScalar *array, *arrayOld, *arrayDiff;
594: int numGhostVars;
595: register int i, n;
596: int ierr;
600: if (grid->reduceSystem == PETSC_TRUE) {
601: numGhostVars = grid->reduceOrder->numOverlapVars - grid->reduceOrder->numLocVars;
602: if (((Vec_MPI *) grid->bdReduceVec->data)->nghost != numGhostVars) {
603: SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid reduce vector size");
604: }
605: if (grid->bdReduceVecOld == PETSC_NULL) {
606: SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "No previous boundary values");
607: }
608: /* Create storage for reduction of Rhs */
609: if (grid->bdReduceVecDiff == PETSC_NULL) {
610: GVecDuplicate(grid->bdReduceVec, &grid->bdReduceVecDiff);
611: } else if (((Vec_MPI *) grid->bdReduceVecDiff->data)->nghost != ((Vec_MPI *) grid->bdReduceVec->data)->nghost) {
612: GVecDestroy(grid->bdReduceVecDiff);
613: GVecDuplicate(grid->bdReduceVec, &grid->bdReduceVecDiff);
614: }
615: /* VecWAXPY(&minusOne, grid->bdReduceVecOld, grid->bdReduceVec, grid->bdReduceVecDiff); */
616: VecGetArray(grid->bdReduceVec, &array);
617: VecGetArray(grid->bdReduceVecOld, &arrayOld);
618: VecGetArray(grid->bdReduceVecDiff, &arrayDiff);
619: n = grid->reduceOrder->numOverlapVars;
620: PetscLogFlops(n);
621: for(i = 0; i < n; i++)
622: arrayDiff[i] = array[i] - arrayOld[i];
623: VecRestoreArray(grid->bdReduceVec, &array);
624: VecRestoreArray(grid->bdReduceVecOld, &arrayOld);
625: VecRestoreArray(grid->bdReduceVecDiff, &arrayDiff);
626: }
627: #ifdef PETSC_USE_BOPT_g
628: #endif
630: return(0);
631: }
633: /*----------------------------------------------- Reduction Functions -----------------------------------------------*/
634: /*@C
635: GridSetReduceSystem - This function determines whether unknowns associated
636: with boundary conditions are eliminated from the system.
638: Collective on Grid
640: Input Parameters:
641: + grid - The grid
642: - reduce - The flag for explicit reduction of the system
644: Level: intermediate
646: .keywords grid, boundary condition, reduce
647: .seealso GridGetReduceSystem(), GridSetBC(), GridAddBC(), GridSetPointBC(), GridAddPointBC()
648: @*/
649: int GridSetReduceSystem(Grid grid, PetscTruth reduce)
650: {
653: grid->reduceSystem = reduce;
654: return(0);
655: }
657: /*@C
658: GridGetReduceSystem - This function reveals whether unknowns associated
659: with boundary conditions are eliminated from the system.
661: Not collective
663: Input Parameter:
664: . grid - The grid
666: Output Parameter:
667: . reduce - The flag for explicit reduction of the system
669: Level: intermediate
671: .keywords grid, boundary condition, reduce
672: .seealso GridSetReduceSystem(), GridSetBC(), GridAddBC(), GridSetPointBC(), GridAddPointBC()
673: @*/
674: int GridGetReduceSystem(Grid grid, PetscTruth *reduce)
675: {
679: *reduce = grid->reduceSystem;
680: return(0);
681: }
683: /*@C
684: GridSetReduceElement - This function determines whether element matrices and vectors
685: are reduced on the fly, or if boundary operators are stored and applied.
687: Collective on Grid
689: Input Parameters:
690: + grid - The grid
691: - reduce - The flag for explicit reduction of the system
693: Level: intermediate
695: .keywords grid, boundary condition, reduce, element
696: .seealso GridGetReduceElement(), GridSetBC(), GridAddBC(), GridSetPointBC(), GridAddPointBC()
697: @*/
698: int GridSetReduceElement(Grid grid, PetscTruth reduce)
699: {
702: grid->reduceElement = reduce;
703: return(0);
704: }
706: /*@C
707: GridGetReduceElement - This function indicates whether element matrices and vectors
708: are reduced on the fly, or if boundary operators are stored and applied.
710: Not collective
712: Input Parameter:
713: . grid - The grid
715: Output Parameter:
716: . reduce - The flag for explicit reduction of the system
718: Level: intermediate
720: .keywords grid, boundary condition, reduce, element
721: .seealso GridSetReduceElement(), GridSetBC(), GridAddBC(), GridSetPointBC(), GridAddPointBC()
722: @*/
723: int GridGetReduceElement(Grid grid, PetscTruth *reduce)
724: {
728: *reduce = grid->reduceElement;
729: return(0);
730: }
732: /*---------------------------------------------- Application Functions ----------------------------------------------*/
733: /*
734: GridResetConstrainedMultiply_Private - This function resets the mulplication routine for constrained matrices
736: Input Parameters:
737: + grid - The Grid
738: - A - The GMat
740: Level: developer
742: .keywords Grid, GMat, reset, constrained, multiply
743: .seealso GridEvaluateRhs
744: */
745: int GridResetConstrainedMultiply_Private(Grid grid, GMat A) {
746: PetscTruth isConstrained, explicitConstraints;
747: void (*oldMult)(void);
748: int ierr;
751: GridIsConstrained(grid, &isConstrained);
752: GridGetExplicitConstraints(grid, &explicitConstraints);
753: if (isConstrained == PETSC_TRUE) {
754: if (explicitConstraints == PETSC_FALSE) {
755: MatShellGetOperation(A, MATOP_MULT, &oldMult);
756: if (oldMult != (void (*)(void)) GMatMatMultConstrained) {
757: MatShellSetOperation(A, MATOP_MULT_CONSTRAINED, oldMult);
758: }
759: MatShellSetOperation(A, MATOP_MULT, (void (*)(void)) GMatMatMultConstrained);
761: MatShellGetOperation(A, MATOP_MULT_TRANSPOSE, &oldMult);
762: if (oldMult != (void (*)(void)) GMatMatMultTransposeConstrained) {
763: MatShellSetOperation(A, MATOP_MULT_TRANSPOSE_CONSTRAINED, oldMult);
764: }
765: MatShellSetOperation(A, MATOP_MULT_TRANSPOSE, (void (*)(void)) GMatMatMultTransposeConstrained);
766: } else {
767: MatShellGetOperation(A, MATOP_MULT_CONSTRAINED, &oldMult);
768: if (oldMult != PETSC_NULL) {
769: MatShellSetOperation(A, MATOP_MULT, oldMult);
770: }
772: MatShellGetOperation(A, MATOP_MULT_TRANSPOSE_CONSTRAINED, &oldMult);
773: if (oldMult != PETSC_NULL) {
774: MatShellSetOperation(A, MATOP_MULT_TRANSPOSE, oldMult);
775: }
776: }
777: }
778: return(0);
779: }
781: /*@C GridSetBoundary
782: This function sets Dirchlet boundary conditions on the linear problem arising
783: from the underlying grid.
785: Collective on Grid
787: Input Parameters:
788: + bd - The marker for the boundary to apply conditions along
789: . field - The field to which the conditions apply
790: . diag - The scalar to be placed on the diagonal
791: . f - The function which defines the boundary condition
792: - ctx - The user-supplied context
794: Output Parameters:
795: + A - The system matrix
796: - b - The Rhs vector
798: Level: intermediate
800: .keywords boundary conditions, finite element
801: .seealso MeshGetBoundaryStart
802: @*/
803: int GridSetBoundary(int bd, int field, PetscScalar diag, PointFunction f, GMat A, GVec b, void *ctx)
804: {
805: Grid grid;
806: int ierr;
810: GMatGetGrid(A, &grid);
811: GridSetBoundaryRectangular(bd, field, diag, f, grid->order, A, b, ctx);
812: return(0);
813: }
815: /*@C GridSetBoundaryRectangular
816: This function sets Dirchlet boundary conditions on the linear problem arising
817: from the underlying grid, and the default variable ordering can be overridden.
819: Collective on Grid
821: Input Parameters:
822: + bd - The marker for the boundary to apply conditions along
823: . field - The field to which the conditions apply
824: . diag - The scalar to be placed on the diagonal
825: . f - The function which defines the boundary condition
826: . order - The test variable ordering
827: - ctx - The user-supplied context
829: Output Parameters:
830: + A - The system matrix
831: - b - The Rhs vector
833: Level: advanced
835: .keywords boundary conditions, finite element
836: .seealso MeshGetBoundaryStart
837: @*/
838: int GridSetBoundaryRectangular(int bd, int field, PetscScalar diag, PointFunction f, VarOrdering order, GMat A, GVec b, void *ctx)
839: {
840: Grid grid, grid2;
841: Mesh mesh;
842: int comp; /* The number of field components */
843: int size; /* The number of nodes in the boundary */
844: int *localStart; /* The offset of this field on a node of a given class */
845: int node; /* The canonical node number of the current boundary node */
846: int nclass; /* The class of the current boundary node */
847: double *x, *y, *z; /* Coordinates of the boundary nodes */
848: int vars; /* The number of variables affected (var/node * size) */
849: int *offsets; /* The canonical variable number for the first variable on each node */
850: int *rows; /* Rows corresponding to boundary nodes */
851: PetscScalar *values; /* Boundary values */
852: PetscScalar elem = diag;
853: IS is;
854: int rank;
855: int i, j, count;
856: #ifdef PETSC_USE_BOPT_g
857: PetscTruth opt;
858: #endif
859: int ierr;
865: GMatGetGrid(A, &grid);
866: GVecGetGrid(b, &grid2);
867: if (grid != grid2) SETERRQ(PETSC_ERR_ARG_INCOMP, "Matrix and vector have different underlying grids");
868: GridValidField(grid, field);
869: MPI_Comm_rank(grid->comm, &rank);
870: mesh = grid->mesh;
871: comp = grid->fields[field].disc->comp;
872: offsets = order->offsets;
873: localStart = order->localStart[field];
875: /* Support for constrained problems */
876: VecGetSize(b, &size);
877: if (grid->isConstrained) {
878: if (size != grid->constraintOrder->numVars) SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid vector size");
879: offsets = grid->constraintOrder->offsets;
880: } else {
881: if (size != grid->order->numVars) SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid vector size");
882: }
884: /* Allocate memory */
885: GridGetBoundarySize(grid, bd, field, &size);
886: if (size == 0) {
887: #ifdef PETSC_USE_BOPT_g
888: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
889: if (opt == PETSC_TRUE) {
890: PetscSynchronizedFlush(grid->comm);
891: PetscSynchronizedFlush(grid->comm);
892: }
893: #endif
894: VecAssemblyBegin(b);
895: VecAssemblyEnd(b);
896: ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, &is);
897: MatZeroRows(A, is, &elem);
898: ISDestroy(is);
899: return(0);
900: }
901: vars = size*comp;
902: PetscMalloc(size * sizeof(double), &x);
903: PetscMalloc(size * sizeof(double), &y);
904: PetscMalloc(size * sizeof(double), &z);
905: PetscMalloc(vars * sizeof(PetscScalar), &values);
906: PetscMalloc(vars * sizeof(int), &rows);
908: /* Loop over boundary nodes */
909: GridGetBoundaryStart(grid, bd, field, PETSC_FALSE, &node, &nclass);
910: for(i = 0, count = 0; node >= 0; i++) {
911: for(j = 0; j < comp; j++, count++) {
912: rows[count] = offsets[node] + j + localStart[nclass];
913: #ifdef PETSC_USE_BOPT_g
914: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
915: if (opt == PETSC_TRUE) {
916: PetscSynchronizedPrintf(grid->comm, "[%d]bd %d field: %d node: %d row: %d class: %dn",
917: rank, bd, field, node, rows[count], nclass);
918: }
919: #endif
920: }
921: MeshGetNodeCoords(mesh, node, &x[i], &y[i], &z[i]);
922: GridGetBoundaryNext(grid, bd, field, PETSC_FALSE, &node, &nclass);
923: }
924: #ifdef PETSC_USE_BOPT_g
925: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
926: if (opt == PETSC_TRUE) {
927: PetscSynchronizedFlush(grid->comm);
928: }
929: #endif
930: /* Get boundary values */
931: (*f)(size, comp, x, y, z, values, ctx);
932: /* Put values in Rhs */
933: #ifdef PETSC_USE_BOPT_g
934: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
935: if (opt == PETSC_TRUE) {
936: PetscPrintf(grid->comm, "Setting boundary values in rhs bd %d field %dn", bd, field);
937: for(i = 0; i < vars; i++) PetscSynchronizedPrintf(grid->comm, " row: %d val: %gn", rows[i], PetscRealPart(values[i]));
938: PetscSynchronizedFlush(grid->comm);
939: }
940: #endif
941: VecSetValues(b, vars, rows, values, INSERT_VALUES);
942: VecAssemblyBegin(b);
943: VecAssemblyEnd(b);
944: /* Set rows of A to the identity */
945: ISCreateGeneral(PETSC_COMM_SELF, vars, rows, &is);
946: MatZeroRows(A, is, &elem);
947: ISDestroy(is);
949: GridResetConstrainedMultiply_Private(grid, A);
951: PetscFree(x);
952: PetscFree(y);
953: PetscFree(z);
954: PetscFree(values);
955: PetscFree(rows);
956: return(0);
957: }
959: /*------------------------------------------------- Matrix Functions ------------------------------------------------*/
960: /*@C GridSetMatBoundary
961: This function sets Dirchlet boundary conditions on the linear system matrix arising
962: from the underlying grid.
964: Collective on GMat
966: Input Parameters:
967: + bd - The marker for the boundary to apply conditions along
968: . field - The field to which the conditions apply
969: . diag - The scalar to be placed on the diagonal
970: - ctx - The user-supplied context
972: Output Parameter:
973: . A - The system matrix
975: Level: advanced
977: .keywords boundary conditions, finite element
978: .seealso MeshGetBoundaryStart
979: @*/
980: int GridSetMatBoundary(int bd, int field, PetscScalar diag, GMat A, void *ctx)
981: {
982: Grid grid;
983: int ierr;
987: GMatGetGrid(A, &grid);
988: GridSetMatBoundaryRectangular(1, &bd, &field, diag, grid->order, A, ctx);
989: return(0);
990: }
992: /*@C GridSetMatBoundaryRectangular
993: This function sets Dirchlet boundary conditions on the linear system matrix arising
994: from the underlying grid, and the default variable ordering can be overridden.
996: Collective on GMat
998: Input Parameters:
999: + num - The number of boundary conditions
1000: . bd - The markers for each boundary to apply conditions along
1001: . field - The fields to which the conditions apply
1002: . diag - The scalar to be placed on the diagonal
1003: . order - The test variable ordering
1004: - ctx - The user-supplied context
1006: Output Parameter:
1007: . A - The system matrix
1009: Level: advanced
1011: .keywords boundary conditions, finite element
1012: .seealso MeshGetBoundaryStart
1013: @*/
1014: int GridSetMatBoundaryRectangular(int num, int *bd, int *field, PetscScalar diag, VarOrdering order, GMat A, void *ctx)
1015: {
1016: Grid grid;
1017: int comp; /* The number of field components */
1018: int size; /* The number of nodes in the boundary */
1019: int totSize; /* The number of nodes in all boundaries */
1020: int *localStart; /* The offset of this field on a node of a given class */
1021: int node; /* The canonical node number of the current boundary node */
1022: int nclass; /* The class of the current boundary node */
1023: int vars; /* The number of variables affected (var/node * size) */
1024: int *offsets; /* The canonical variable number for the first variable on each node */
1025: int *rows; /* Rows corresponding to boundary nodes */
1026: PetscScalar elem = diag;
1027: IS is;
1028: int rank;
1029: int b, i, j, count;
1030: #ifdef PETSC_USE_BOPT_g
1031: PetscTruth opt;
1032: #endif
1033: int ierr;
1038: GMatGetGrid(A, &grid);
1039: offsets = order->offsets;
1040: MPI_Comm_rank(grid->comm, &rank);
1042: /* Allocate memory */
1043: for(b = 0, totSize = 0, vars = 0; b < num; b++) {
1044: GridValidField(grid, field[b]);
1045: GridGetBoundarySize(grid, bd[b], field[b], &size);
1046: totSize += size;
1047: vars += size*grid->fields[field[b]].disc->comp;
1048: }
1049: if (totSize == 0) {
1050: #ifdef PETSC_USE_BOPT_g
1051: PetscSynchronizedFlush(grid->comm);
1052: #endif
1053: ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, &is);
1054: MatZeroRows(A, is, &elem);
1055: ISDestroy(is);
1056: return(0);
1057: }
1058: PetscMalloc(vars * sizeof(int), &rows);
1060: /* Loop over boundaries */
1061: for(b = 0, count = 0; b < num; b++) {
1062: comp = grid->fields[field[b]].disc->comp;
1063: localStart = order->localStart[field[b]];
1064: /* Loop over boundary nodes */
1065: GridGetBoundaryStart(grid, bd[b], field[b], PETSC_FALSE, &node, &nclass);
1066: for(i = 0; node >= 0; i++) {
1067: for(j = 0; j < comp; j++, count++) {
1068: rows[count] = offsets[node] + j + localStart[nclass];
1069: #ifdef PETSC_USE_BOPT_g
1070: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1071: if (opt == PETSC_TRUE) {
1072: PetscSynchronizedPrintf(grid->comm, "[%d]bd %d field: %d node: %d row: %d class: %dn",
1073: rank, bd[b], field[b], node, rows[count], nclass);
1074: }
1075: #endif
1076: }
1077: GridGetBoundaryNext(grid, bd[b], field[b], PETSC_FALSE, &node, &nclass);
1078: }
1079: }
1080: #ifdef PETSC_USE_BOPT_g
1081: PetscSynchronizedFlush(grid->comm);
1082: if (count != vars) SETERRQ2(PETSC_ERR_PLIB, "Boundary size %d should be %d", count, vars);
1083: #endif
1084: /* Set rows of A to the identity */
1085: ISCreateGeneral(PETSC_COMM_SELF, vars, rows, &is);
1086: MatZeroRows(A, is, &elem);
1087: ISDestroy(is);
1089: GridResetConstrainedMultiply_Private(grid, A);
1091: PetscFree(rows);
1092: return(0);
1093: }
1095: /*@C GridSetMatPointBoundary
1096: This function sets Dirchlet boundary conditions on the linear system matrix arising
1097: from the underlying grid.
1099: Collective on GMat
1101: Input Parameters:
1102: + node - The constrained node
1103: . field - The field to which the conditions apply
1104: . diag - The scalar to be placed on the diagonal
1105: - ctx - The user-supplied context
1107: Output Parameter:
1108: . A - The system matrix
1110: Level: advanced
1112: .keywords boundary conditions, finite element
1113: .seealso MeshGetBoundaryStart
1114: @*/
1115: int GridSetMatPointBoundary(int node, int field, PetscScalar diag, GMat A, void *ctx)
1116: {
1117: Grid grid;
1118: int ierr;
1122: GMatGetGrid(A, &grid);
1123: GridSetMatPointBoundaryRectangular(node, field, diag, grid->order, A, ctx);
1124: return(0);
1125: }
1127: /*@C GridSetMatPointBoundaryRectangular
1128: This function sets Dirchlet boundary conditions on the linear system matrix arising
1129: from the underlying grid, and the default variable ordering can be overridden.
1131: Collective on GMat
1133: Input Parameters:
1134: + node - The constrained node
1135: . field - The field to which the conditions apply
1136: . diag - The scalar to be placed on the diagonal
1137: . order - The test variable ordering
1138: - ctx - The user-supplied context
1140: Output Parameter:
1141: . A - The system matrix
1143: Level: advanced
1145: .keywords boundary conditions, finite element
1146: .seealso MeshGetBoundaryStart
1147: @*/
1148: int GridSetMatPointBoundaryRectangular(int node, int field, PetscScalar diag, VarOrdering order, GMat A, void *ctx)
1149: {
1150: Grid grid;
1151: int comp; /* The number of field components */
1152: int *localStart; /* The offset of this field on a node of a given class */
1153: int nclass; /* The class of the current boundary node */
1154: int *offsets; /* The canonical variable number for the first variable on each node */
1155: int *rows; /* Rows corresponding to boundary nodes */
1156: PetscScalar elem = diag;
1157: IS is;
1158: int rank;
1159: int j;
1160: #ifdef PETSC_USE_BOPT_g
1161: PetscTruth opt;
1162: #endif
1163: int ierr;
1166: if (node < 0) {
1167: ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, &is);
1168: MatZeroRows(A, is, &elem);
1169: ISDestroy(is);
1170: return(0);
1171: }
1174: GMatGetGrid(A, &grid);
1175: GridValidField(grid, field);
1176: MPI_Comm_rank(grid->comm, &rank);
1177: comp = grid->fields[field].disc->comp;
1178: offsets = order->offsets;
1179: localStart = order->localStart[field];
1181: /* Allocate memory */
1182: PetscMalloc(comp * sizeof(int), &rows);
1184: GridGetNodeClass(grid, node, &nclass);
1185: for(j = 0; j < comp; j++) {
1186: rows[j] = offsets[node] + j + localStart[nclass];
1187: #ifdef PETSC_USE_BOPT_g
1188: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1189: if (opt == PETSC_TRUE) {
1190: PetscPrintf(PETSC_COMM_SELF, "[%d]field: %d node: %d row: %d class: %dn", rank, field, node, rows[j], nclass);
1191: }
1192: #endif
1193: }
1194: #ifdef PETSC_USE_BOPT_g
1195: #endif
1196: /* Set rows of A to the identity */
1197: ISCreateGeneral(PETSC_COMM_SELF, comp, rows, &is);
1198: MatZeroRows(A, is, &elem);
1199: ISDestroy(is);
1201: GridResetConstrainedMultiply_Private(grid, A);
1203: PetscFree(rows);
1204: return(0);
1205: }
1207: /*------------------------------------------------- Vector Functions ------------------------------------------------*/
1208: /*@C GridSetVecBoundary
1209: This function sets Dirchlet boundary conditions on the linear Rhs arising
1210: from the underlying grid.
1212: Collective on GVec
1214: Input Parameters:
1215: + bd - The marker for the boundary to apply conditions along
1216: . field - The field to which the conditions apply
1217: . f - The function which defines the boundary condition
1218: - ctx - The user-supplied context
1220: Output Parameter:
1221: . b - The Rhs vector
1223: Level: advanced
1225: .keywords boundary conditions, finite element
1226: .seealso MeshGetBoundaryStart
1227: @*/
1228: int GridSetVecBoundary(int bd, int field, PointFunction f, GVec b, void *ctx)
1229: {
1230: Grid grid;
1231: int ierr;
1235: GVecGetGrid(b, &grid);
1236: GridSetVecBoundaryRectangular(1, &bd, &field, &f, grid->order, b, ctx);
1237: return(0);
1238: }
1240: /*@C GridSetVecBoundaryRectangular
1241: This function sets Dirchlet boundary conditions on the linear Rhs arising
1242: from the underlying grid, and the default variable ordering can be overridden.
1244: Collective on GVec
1246: Input Parameters:
1247: + num - The number of boundary conditions
1248: . bd - The markers for each boundary to apply conditions along
1249: . field - The fields to which the conditions apply
1250: . f - The functions which define the boundary conditions
1251: . order - The test variable ordering
1252: - ctx - The user-supplied context
1254: Output Parameter:
1255: . b - The Rhs vector
1257: Level: advanced
1259: .keywords boundary conditions, finite element
1260: .seealso MeshGetBoundaryStart
1261: @*/
1262: int GridSetVecBoundaryRectangular(int num, int *bd, int *field, PointFunction *f, VarOrdering order, GVec b, void *ctx)
1263: {
1264: Grid grid;
1265: Mesh mesh;
1266: int comp; /* The number of field components */
1267: int *sizes; /* The number of nodes in each boundary */
1268: int totSize; /* The number of nodes in all boundaries */
1269: int maxSize; /* The maximum number of nodes in any boundary */
1270: int *localStart; /* The offset of this field on a node of a given class */
1271: int node; /* The canonical node number of the current boundary node */
1272: int nclass; /* The class of the current boundary node */
1273: double *x, *y, *z; /* Coordinates of the boundary nodes */
1274: int vars; /* The number of variables affected (var/node * size) */
1275: int *offsets; /* The canonical variable number for the first variable on each node */
1276: int *rows; /* Rows corresponding to boundary nodes */
1277: PetscScalar *values; /* Boundary values */
1278: int size, rank;
1279: int c, i, j, count, off;
1280: #ifdef PETSC_USE_BOPT_g
1281: PetscTruth opt;
1282: #endif
1283: int ierr;
1288: GVecGetGrid(b, &grid);
1289: mesh = grid->mesh;
1290: offsets = order->offsets;
1291: MPI_Comm_rank(grid->comm, &rank);
1293: /* Support for constrained problems */
1294: VecGetSize(b, &size);
1295: if (grid->isConstrained) {
1296: if (size != grid->constraintOrder->numVars) {
1297: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->constraintOrder->numVars);
1298: }
1299: offsets = grid->constraintOrder->offsets;
1300: } else {
1301: if (size != grid->order->numVars) {
1302: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->order->numVars);
1303: }
1304: }
1306: /* Allocate memory */
1307: PetscMalloc(num * sizeof(int), &sizes);
1308: for(c = 0, totSize = 0, maxSize = 0, vars = 0; c < num; c++) {
1309: GridValidField(grid, field[c]);
1310: GridGetBoundarySize(grid, bd[c], field[c], &sizes[c]);
1311: totSize += sizes[c];
1312: maxSize = PetscMax(maxSize, sizes[c]);
1313: vars += sizes[c]*grid->fields[field[c]].disc->comp;
1314: }
1315: if (totSize == 0) {
1316: #ifdef PETSC_USE_BOPT_g
1317: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1318: if (opt == PETSC_TRUE) {
1319: PetscSynchronizedFlush(grid->comm);
1320: PetscSynchronizedFlush(grid->comm);
1321: }
1322: #endif
1323: VecAssemblyBegin(b);
1324: VecAssemblyEnd(b);
1325: return(0);
1326: }
1327: PetscMalloc(maxSize * sizeof(double), &x);
1328: PetscMalloc(maxSize * sizeof(double), &y);
1329: PetscMalloc(maxSize * sizeof(double), &z);
1330: PetscMalloc(vars * sizeof(PetscScalar), &values);
1331: PetscMalloc(vars * sizeof(int), &rows);
1333: /* Loop over boundaries */
1334: for(c = 0, count = 0, off = 0; c < num; c++, off = count) {
1335: if (sizes[c] == 0) {
1336: #ifdef PETSC_USE_BOPT_g
1337: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1338: if (opt == PETSC_TRUE) {
1339: PetscSynchronizedFlush(grid->comm);
1340: PetscSynchronizedFlush(grid->comm);
1341: }
1342: #endif
1343: continue;
1344: }
1345: comp = grid->fields[field[c]].disc->comp;
1346: localStart = order->localStart[field[c]];
1347: /* Loop over boundary nodes */
1348: GridGetBoundaryStart(grid, bd[c], field[c], PETSC_FALSE, &node, &nclass);
1349: for(i = 0; node >= 0; i++) {
1350: for(j = 0; j < comp; j++, count++) {
1351: rows[count] = offsets[node] + j + localStart[nclass];
1352: #ifdef PETSC_USE_BOPT_g
1353: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1354: if (opt == PETSC_TRUE) {
1355: PetscSynchronizedPrintf(grid->comm, "[%d]bd %d field: %d node: %d row: %d class: %dn",
1356: rank, bd[c], field[c], node, rows[count], nclass);
1357: }
1358: #endif
1359: }
1360: MeshGetNodeCoords(mesh, node, &x[i], &y[i], &z[i]);
1361: GridGetBoundaryNext(grid, bd[c], field[c], PETSC_FALSE, &node, &nclass);
1362: }
1363: #ifdef PETSC_USE_BOPT_g
1364: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1365: if (opt == PETSC_TRUE) {
1366: PetscSynchronizedFlush(grid->comm);
1367: }
1368: #endif
1369: /* Get boundary values */
1370: (*(f[c]))(sizes[c], comp, x, y, z, &values[off], ctx);
1371: #ifdef PETSC_USE_BOPT_g
1372: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1373: if (opt == PETSC_TRUE) {
1374: PetscPrintf(grid->comm, "Setting boundary values in rhs bd %d field %dn", bd[c], field[c]);
1375: for(i = off; i < count; i++) PetscSynchronizedPrintf(grid->comm, " row: %d val: %gn", rows[i], PetscRealPart(values[i]));
1376: PetscSynchronizedFlush(grid->comm);
1377: }
1378: #endif
1379: }
1380: if (count != vars) SETERRQ2(PETSC_ERR_PLIB, "Boundary size %d should be %d", count, vars);
1381: /* Put values in Rhs */
1382: VecSetValues(b, vars, rows, values, INSERT_VALUES);
1383: VecAssemblyBegin(b);
1384: VecAssemblyEnd(b);
1386: PetscFree(sizes);
1387: PetscFree(x);
1388: PetscFree(y);
1389: PetscFree(z);
1390: PetscFree(values);
1391: PetscFree(rows);
1392: return(0);
1393: }
1395: /*@C GridSetVecPointBoundary
1396: This function sets Dirchlet boundary conditions on the linear Rhs arising
1397: from the underlying grid.
1399: Collective on GVec
1401: Input Parameters:
1402: + node - The constrained node
1403: . field - The field to which the conditions apply
1404: . f - The function which defines the boundary condition
1405: - ctx - The user-supplied context
1407: Output Parameter:
1408: . b - The Rhs vector
1410: Level: advanced
1412: .keywords boundary conditions, finite element
1413: .seealso MeshGetBoundaryStart
1414: @*/
1415: int GridSetVecPointBoundary(int node, int field, PointFunction f, GVec b, void *ctx)
1416: {
1417: Grid grid;
1418: int ierr;
1422: GVecGetGrid(b, &grid);
1423: GridSetVecPointBoundaryRectangular(node, field, f, grid->order, b, ctx);
1424: return(0);
1425: }
1427: /*@C GridSetVecPointBoundaryRectangular
1428: This function sets Dirchlet boundary conditions on the linear Rhs arising
1429: from the underlying grid, and the default variable ordering can be overridden.
1431: Collective on GVec
1433: Input Parameters:
1434: + node - The constriained node
1435: . field - The field to which the conditions apply
1436: . f - The function which defines the boundary condition
1437: . order - The test variable ordering
1438: - ctx - The user-supplied context
1440: Output Parameter:
1441: . b - The Rhs vector
1443: Level: advanced
1445: .keywords boundary conditions, finite element
1446: .seealso MeshGetBoundaryStart
1447: @*/
1448: int GridSetVecPointBoundaryRectangular(int node, int field, PointFunction f, VarOrdering order, GVec b, void *ctx)
1449: {
1450: Grid grid;
1451: Mesh mesh;
1452: int comp; /* The number of field components */
1453: int size; /* The number of nodes in the boundary */
1454: int *localStart; /* The offset of this field on a node of a given class */
1455: int nclass; /* The class of the current boundary node */
1456: double x, y, z; /* Coordinates of the boundary nodes */
1457: int *offsets; /* The canonical variable number for the first variable on each node */
1458: int *rows; /* Rows corresponding to boundary nodes */
1459: PetscScalar *values; /* Boundary values */
1460: int rank;
1461: int c;
1462: #ifdef PETSC_USE_BOPT_g
1463: PetscTruth opt;
1464: #endif
1465: int ierr;
1468: if (node < 0) {
1469: VecAssemblyBegin(b);
1470: VecAssemblyEnd(b);
1471: return(0);
1472: }
1475: GVecGetGrid(b, &grid);
1476: GridValidField(grid, field);
1477: MPI_Comm_rank(grid->comm, &rank);
1478: mesh = grid->mesh;
1479: comp = grid->fields[field].disc->comp;
1480: offsets = order->offsets;
1481: localStart = order->localStart[field];
1484: /* Support for constrained problems */
1485: VecGetSize(b, &size);
1486: if (grid->isConstrained) {
1487: if (size != grid->constraintOrder->numVars) {
1488: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->constraintOrder->numVars);
1489: }
1490: offsets = grid->constraintOrder->offsets;
1491: } else {
1492: if (size != grid->order->numVars) {
1493: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->order->numVars);
1494: }
1495: }
1497: /* Allocate memory */
1498: size = 1;
1499: PetscMalloc(comp * sizeof(PetscScalar), &values);
1500: PetscMalloc(comp * sizeof(int), &rows);
1502: MeshGetNodeCoords(mesh, node, &x, &y, &z);
1503: GridGetNodeClass(grid, node, &nclass);
1504: for(c = 0; c < comp; c++) {
1505: rows[c] = offsets[node] + c + localStart[nclass];
1506: #ifdef PETSC_USE_BOPT_g
1507: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1508: if (opt == PETSC_TRUE) {
1509: PetscPrintf(PETSC_COMM_SELF, "[%d]field: %d node: %d row: %d class: %dn", rank, field, node, rows[c], nclass);
1510: }
1511: #endif
1512: }
1513: /* Get boundary values */
1514: (*f)(size, comp, &x, &y, &z, values, ctx);
1515: /* Put values in Rhs */
1516: #ifdef PETSC_USE_BOPT_g
1517: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1518: if (opt == PETSC_TRUE) {
1519: PetscPrintf(PETSC_COMM_SELF, "Setting boundary values on rhs node %d field %dn", node, field);
1520: for(c = 0; c < comp; c++) PetscPrintf(PETSC_COMM_SELF, " row: %d val: %gn", rows[c], PetscRealPart(values[c]));
1521: }
1522: #endif
1523: VecSetValues(b, comp, rows, values, INSERT_VALUES);
1524: VecAssemblyBegin(b);
1525: VecAssemblyEnd(b);
1527: PetscFree(values);
1528: PetscFree(rows);
1529: return(0);
1530: }
1532: /*@C GridSetVecBoundaryDifference
1533: This function sets Dirchlet boundary conditions on the linear Rhs arising
1534: from the underlying grid, but actually sets it to the difference of the
1535: function value and the value in the given vector. This is commonly used in
1536: a time dependent, nonlinear problem for which we would like the rhs boundary
1537: values to be:
1539: U^{n+1}_k - U^{n+1}_{k+1}
1541: where n is the time iteration index, and k is the Newton iteration index. This
1542: means that the solution will be updated to U^{n+1}_{k+1} if the Jacobian is the
1543: identity for that row. This is very useful for time dependent boundary conditions
1544: for which the traditional method of letting the rhs value be zero does not work.
1546: Collective on GVec
1548: Input Parameters:
1549: + bd - The marker for the boundary to apply conditions along
1550: . u - A grid vector, usually the previous solution
1551: . field - The field to which the conditions apply
1552: . f - The function which defines the boundary condition
1553: - ctx - The user-supplied context
1555: Output Parameter:
1556: . b - The Rhs vector
1558: Level: advanced
1560: .keywords boundary conditions, finite element
1561: .seealso MeshGetBoundaryStart
1562: @*/
1563: int GridSetVecBoundaryDifference(int bd, int field, GVec u, PointFunction f, GVec b, void *ctx)
1564: {
1565: Grid grid;
1566: int ierr;
1570: GVecGetGrid(b, &grid);
1571: GridSetVecBoundaryDifferenceRectangular(bd, field, u, f, grid->order, b, ctx);
1572: return(0);
1573: }
1575: /*@C GridSetVecBoundaryDifferenceRectangular
1576: This function sets Dirchlet boundary conditions on the linear Rhs arising
1577: from the underlying grid, but actually sets it to the difference of the
1578: function value and the value in the given vector. This is commonly used in
1579: a time dependent, nonlinear problem for which we would like the rhs boundary
1580: values to be:
1582: U^{n+1}_k - U^{n+1}_{k+1}
1584: where n is the time iteration index, and k is the Newton iteration index. This
1585: means that the solution will be updated to U^{n+1}_{k+1} if the Jacobian is the
1586: identity for that row. This is very useful for time dependent boundary conditions
1587: for which the traditional method of letting the rhs value be zero does not work.
1589: Collective on GVec
1591: Input Parameters:
1592: + bd - The marker for the boundary to apply conditions along
1593: . u - A grid vector, usually the previous solution
1594: . field - The field to which the conditions apply
1595: . f - The function which defines the boundary condition
1596: . order - The test variable ordering
1597: - ctx - The user-supplied context
1599: Output Parameter:
1600: . b - The Rhs vector
1602: Level: advanced
1604: .keywords boundary conditions, finite element
1605: .seealso MeshGetBoundaryStart
1606: @*/
1607: int GridSetVecBoundaryDifferenceRectangular(int bd, int field, GVec u, PointFunction f, VarOrdering order, GVec b, void *ctx)
1608: {
1609: Grid grid;
1610: Mesh mesh;
1611: int comp; /* The number of field components */
1612: int size; /* The number of nodes in the boundary */
1613: int *localStart; /* The offset of this field on a node of a given class */
1614: int node; /* The canonical node number of the current boundary node */
1615: int nclass; /* The class of the current boundary node */
1616: double *x, *y, *z; /* Coordinates of the boundary nodes */
1617: int vars; /* The number of variables affected (var/node * size) */
1618: int *offsets; /* The canonical variable number for the first variable on each node */
1619: int *rows; /* Rows corresponding to boundary nodes */
1620: PetscScalar *values; /* Boundary values */
1621: PetscScalar *uArray; /* The values in the vector u */
1622: int firstVar; /* The canonical number of the first variable in this domain */
1623: int rank;
1624: int i, j, count;
1625: #ifdef PETSC_USE_BOPT_g
1626: PetscTruth opt;
1627: #endif
1628: int ierr;
1633: GVecGetGrid(b, &grid);
1634: GridValidField(grid, field);
1635: MPI_Comm_rank(grid->comm, &rank);
1636: mesh = grid->mesh;
1637: comp = grid->fields[field].disc->comp;
1638: firstVar = order->firstVar[rank];
1639: offsets = order->offsets;
1640: localStart = order->localStart[field];
1642: /* Support for constrained problems */
1643: VecGetSize(b, &size);
1644: if (grid->isConstrained) {
1645: if (size != grid->constraintOrder->numVars) {
1646: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->constraintOrder->numVars);
1647: }
1648: offsets = grid->constraintOrder->offsets;
1649: } else {
1650: if (size != grid->order->numVars) {
1651: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->order->numVars);
1652: }
1653: }
1655: /* Allocate memory */
1656: GridGetBoundarySize(grid, bd, field, &size);
1657: if (size == 0) {
1658: #ifdef PETSC_USE_BOPT_g
1659: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1660: if (opt == PETSC_TRUE) {
1661: PetscSynchronizedFlush(grid->comm);
1662: PetscSynchronizedFlush(grid->comm);
1663: }
1664: #endif
1665: VecAssemblyBegin(b);
1666: VecAssemblyEnd(b);
1667: return(0);
1668: }
1669: vars = size*comp;
1670: PetscMalloc(size * sizeof(double), &x);
1671: PetscMalloc(size * sizeof(double), &y);
1672: PetscMalloc(size * sizeof(double), &z);
1673: PetscMalloc(vars * sizeof(PetscScalar), &values);
1674: PetscMalloc(vars * sizeof(int), &rows);
1676: /* Loop over boundary nodes */
1677: GridGetBoundaryStart(grid, bd, field, PETSC_FALSE, &node, &nclass);
1678: for(i = 0, count = 0; node >= 0; i++) {
1679: for(j = 0; j < comp; j++, count++) {
1680: rows[count] = offsets[node] + j + localStart[nclass];
1681: #ifdef PETSC_USE_BOPT_g
1682: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1683: if (opt == PETSC_TRUE) {
1684: PetscSynchronizedPrintf(grid->comm, "[%d]bd %d field: %d node: %d row: %d class: %dn",
1685: rank, bd, field, node, rows[count], nclass);
1686: }
1687: #endif
1688: }
1689: MeshGetNodeCoords(mesh, node, &x[i], &y[i], &z[i]);
1690: GridGetBoundaryNext(grid, bd, field, PETSC_FALSE, &node, &nclass);
1691: }
1692: #ifdef PETSC_USE_BOPT_g
1693: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1694: if (opt == PETSC_TRUE) {
1695: PetscSynchronizedFlush(grid->comm);
1696: }
1697: #endif
1698: /* Get boundary values */
1699: (*f)(size, comp, x, y, z, values, ctx);
1700: /* Taking the difference (we know that no values are off-processor) */
1701: VecGetArray(u, &uArray);
1702: for(i = 0; i < vars; i++)
1703: values[i] = uArray[rows[i]-firstVar] - values[i];
1704: VecRestoreArray(u, &uArray);
1705: /* Put values in Rhs */
1706: #ifdef PETSC_USE_BOPT_g
1707: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1708: if (opt == PETSC_TRUE) {
1709: PetscPrintf(grid->comm, "Setting boundary values in rhs bd %d field %dn", bd, field);
1710: for(i = 0; i < vars; i++) PetscSynchronizedPrintf(grid->comm, " row: %d val: %gn", rows[i], PetscRealPart(values[i]));
1711: PetscSynchronizedFlush(grid->comm);
1712: }
1713: #endif
1714: VecSetValues(b, vars, rows, values, INSERT_VALUES);
1715: VecAssemblyBegin(b);
1716: VecAssemblyEnd(b);
1718: PetscFree(x);
1719: PetscFree(y);
1720: PetscFree(z);
1721: PetscFree(values);
1722: PetscFree(rows);
1723: return(0);
1724: }
1726: /*@C GridSetVecPointBoundaryDifference
1727: This function sets Dirchlet boundary conditions on the linear Rhs arising
1728: from the underlying grid, but actually sets it to the difference of the
1729: function value and the value in the given vector. This is commonly used in
1730: a time dependent, nonlinear problem for which we would like the rhs boundary
1731: values to be:
1733: U^{n+1}_k - U^{n+1}_{k+1}
1735: where n is the time iteration index, and k is the Newton iteration index. This
1736: means that the solution will be updated to U^{n+1}_{k+1} if the Jacobian is the
1737: identity for that row. This is very useful for time dependent boundary conditions
1738: for which the traditional method of letting the rhs value be zero does not work.
1740: Collective on GVec
1742: Input Parameters:
1743: + node - The constrained node
1744: . u - A grid vector, usually the previous solution
1745: . field - The field to which the conditions apply
1746: . f - The function which defines the boundary condition
1747: - ctx - The user-supplied context
1749: Output Parameter:
1750: . b - The Rhs vector
1752: Level: advanced
1754: .keywords boundary conditions, finite element
1755: .seealso MeshGetBoundaryStart
1756: @*/
1757: int GridSetVecPointBoundaryDifference(int node, int field, GVec u, PointFunction f, GVec b, void *ctx)
1758: {
1759: Grid grid;
1760: int ierr;
1764: GVecGetGrid(b, &grid);
1765: GridSetVecPointBoundaryDifferenceRectangular(node, field, u, f, grid->order, b, ctx);
1766: return(0);
1767: }
1769: /*@C GridSetVecBoundaryDifferenceRectangular
1770: This function sets Dirchlet boundary conditions on the linear Rhs arising
1771: from the underlying grid, but actually sets it to the difference of the
1772: function value and the value in the given vector. This is commonly used in
1773: a time dependent, nonlinear problem for which we would like the rhs boundary
1774: values to be:
1776: U^{n+1}_k - U^{n+1}_{k+1}
1778: where n is the time iteration index, and k is the Newton iteration index. This
1779: means that the solution will be updated to U^{n+1}_{k+1} if the Jacobian is the
1780: identity for that row. This is very useful for time dependent boundary conditions
1781: for which the traditional method of letting the rhs value be zero does not work.
1783: Collective on GVec
1785: Input Parameters:
1786: + node - The constrained node
1787: . u - A grid vector, usually the previous solution
1788: . field - The field to which the conditions apply
1789: . f - The function which defines the boundary condition
1790: . order - The test variable ordering
1791: - ctx - The user-supplied context
1793: Output Parameter:
1794: . b - The Rhs vector
1796: Level: advanced
1798: .keywords boundary conditions, finite element
1799: .seealso MeshGetBoundaryStart
1800: @*/
1801: int GridSetVecPointBoundaryDifferenceRectangular(int node, int field, GVec u, PointFunction f, VarOrdering order, GVec b, void *ctx)
1802: {
1803: Grid grid;
1804: Mesh mesh;
1805: int comp; /* The number of field components */
1806: int size; /* The number of nodes in the boundary */
1807: int *localStart; /* The offset of this field on a node of a given class */
1808: int nclass; /* The class of the current boundary node */
1809: double x, y, z; /* Coordinates of the boundary nodes */
1810: int *offsets; /* The canonical variable number for the first variable on each node */
1811: int *rows; /* Rows corresponding to boundary nodes */
1812: PetscScalar *values; /* Boundary values */
1813: PetscScalar *uArray; /* The values in the vector u */
1814: int firstVar; /* The canonical number of the first variable in this domain */
1815: int rank;
1816: int i, j;
1817: #ifdef PETSC_USE_BOPT_g
1818: PetscTruth opt;
1819: #endif
1820: int ierr;
1823: if (node < 0) {
1824: VecAssemblyBegin(b);
1825: VecAssemblyEnd(b);
1826: return(0);
1827: }
1829: GVecGetGrid(b, &grid);
1830: GridGetMesh(grid, &mesh);
1831: GridValidField(grid, field);
1832: MPI_Comm_rank(grid->comm, &rank);
1833: comp = grid->fields[field].disc->comp;
1834: firstVar = order->firstVar[rank];
1835: offsets = order->offsets;
1836: localStart = order->localStart[field];
1838: /* Support for constrained problems */
1839: VecGetSize(b, &size);
1840: if (grid->isConstrained) {
1841: if (size != grid->constraintOrder->numVars) {
1842: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->constraintOrder->numVars);
1843: }
1844: offsets = grid->constraintOrder->offsets;
1845: } else {
1846: if (size != grid->order->numVars) {
1847: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->order->numVars);
1848: }
1849: }
1851: /* Allocate memory */
1852: size = 1;
1853: PetscMalloc(comp * sizeof(PetscScalar), &values);
1854: PetscMalloc(comp * sizeof(int), &rows);
1856: MeshGetNodeCoords(mesh, node, &x, &y, &z);
1857: GridGetNodeClass(grid, node, &nclass);
1858: for(j = 0; j < comp; j++) {
1859: rows[j] = offsets[node] + j + localStart[nclass];
1860: #ifdef PETSC_USE_BOPT_g
1861: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1862: if (opt == PETSC_TRUE) {
1863: PetscPrintf(PETSC_COMM_SELF, "[%d]field: %d node: %d row: %d class: %dn", rank, field, node, rows[j], nclass);
1864: }
1865: #endif
1866: }
1867: /* Get boundary values */
1868: (*f)(size, comp, &x, &y, &z, values, ctx);
1869: /* Taking the difference (we know that no values are off-processor) */
1870: VecGetArray(u, &uArray);
1871: for(i = 0; i < comp; i++) values[i] = uArray[rows[i]-firstVar] - values[i];
1872: VecRestoreArray(u, &uArray);
1873: /* Put values in Rhs */
1874: #ifdef PETSC_USE_BOPT_g
1875: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1876: if (opt == PETSC_TRUE) {
1877: PetscPrintf(grid->comm, "Setting boundary values on rhs node %d field %dn", node, field);
1878: for(i = 0; i < comp; i++) PetscPrintf(PETSC_COMM_SELF, " row: %d val: %gn", rows[i], PetscRealPart(values[i]));
1879: }
1880: #endif
1881: VecSetValues(b, comp, rows, values, INSERT_VALUES);
1882: VecAssemblyBegin(b);
1883: VecAssemblyEnd(b);
1885: PetscFree(values);
1886: PetscFree(rows);
1887: return(0);
1888: }