Actual source code: gvec2d.c

  1: #ifdef PETSC_RCS_HEADER
  2: static char vcid[] = "$Id: gvec2d.c,v 1.22 2000/10/08 00:27:05 knepley Exp $";
  3: #endif

  5: /* Implements FE vectors derived from 2d triangular grids */
  6: #include "petscsles.h"                 /* For ALE Operators */
  7: #include "src/gvec/gvecimpl.h"         /*I "gvec.h" I*/
  8: #include "src/mesh/impls/triangular/triimpl.h"
  9: #include "gvec2d.h"

 11: int GVecGetLocalGVec_Triangular_2D(GVec g, GVec *gvec) {
 12:   SETERRQ(PETSC_ERR_SUP, " ");
 13: }

 15: int GVecRestoreLocalGVec_Triangular_2D(GVec g, GVec *gvec) {
 16:   SETERRQ(PETSC_ERR_SUP, " ");
 17: }

 19: int GVecGlobalToLocal_Triangular_2D(GVec g, InsertMode mode, GVec l) {
 20:   SETERRQ(PETSC_ERR_SUP, " ");
 21: }

 23: int GVecLocalToGlobal_Triangular_2D(GVec l, InsertMode mode, GVec g) {
 24:   SETERRQ(PETSC_ERR_SUP, " ");
 25: }

 27: int GVecEvaluateFunction_Triangular_2D(Grid grid, GVec v, VarOrdering order, PointFunction f, PetscScalar alpha, void *ctx) {
 28:   Mesh          mesh;
 29:   Partition     part;
 30:   int         **localStart = order->localStart;
 31:   FieldClassMap map;
 32:   int           numNodes, numFields;
 33:   int          *fields, **fieldClasses, *classes, *classSizes;
 34:   int           nodeVars;
 35:   PetscScalar  *array;
 36:   double        x, y, z;
 37:   int           size, locSize, overlapSize;
 38:   int           fieldIdx, field, node, nclass, count;
 39:   int           ierr;

 42:   GridGetMesh(grid, &mesh);
 43:   MeshGetPartition(mesh, &part);
 44:   VarOrderingGetClassMap(order, &map);
 45:   numNodes     = map->numNodes;
 46:   numFields    = map->numFields;
 47:   fields       = map->fields;
 48:   fieldClasses = map->fieldClasses;
 49:   classes      = map->classes;
 50:   classSizes   = map->classSizes;
 51:   /* Check for a locally ghosted vector */
 52:   VecGetArray(v, &array);
 53:   VecGetLocalSize(v, &locSize);
 54:   /* VecGetGhostSize(v, &overlapSize);                                                                */
 55:   overlapSize = locSize + ((Vec_MPI *) v->data)->nghost;
 56:   size        = order->numLocVars;
 57: #if 0
 58:   /* This doesn't work for constrained vector since it conflicts with the check on count
 59:      as variables generated by constraints are not handled here
 60:   */
 61:   if (locSize     != order->numLocVars)
 62:     SETERRQ2(PETSC_ERR_ARG_WRONG, "Wrong vector size %d should be %d", locSize, order->numLocVars);
 63: #endif
 64:   if (overlapSize > locSize) {
 65:     PartitionGetNumOverlapNodes(part, &numNodes);
 66:     size = order->numOverlapVars;
 67:     if (overlapSize != order->numOverlapVars) SETERRQ(PETSC_ERR_ARG_WRONG, "Wrong size for vector");
 68:   }
 69:   for(node = 0, count = 0; node < numNodes; node++, count += nodeVars) {
 70:     nclass   = classes[node];
 71:     nodeVars = classSizes[nclass];
 72:     for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
 73:       field = fields[fieldIdx];
 74:       if (fieldClasses[fieldIdx][nclass] == 0) continue;
 75:       MeshGetNodeCoords(mesh, node, &x, &y, &z);
 76:       (*f)(1, grid->fields[field].numComp, &x, &y, &z, &array[count+localStart[field][nclass]], ctx);
 77:       array[count+localStart[field][nclass]] *= alpha;
 78:     }
 79:   }
 80:   if (count != size) SETERRQ(PETSC_ERR_PLIB, "Invalid variable offset records");
 81:   VecRestoreArray(v, &array);
 82:   return(0);
 83: }

 85: int GVecEvaluateFunctionBoundary_Triangular_2D(Grid grid, GVec v, int bd, VarOrdering order, PointFunction func,
 86:                                                PetscScalar alpha, void *ctx)
 87: {
 88:   Mesh          mesh         = grid->mesh;
 89:   int         **localStart   = order->localStart;
 90:   int          *offsets      = order->offsets;
 91:   int          *localOffsets = order->localOffsets;
 92:   int           firstVar     = order->firstVar[mesh->part->rank];
 93:   FieldClassMap map;
 94:   int           numNodes, numFields;
 95:   int          *fields, **fieldClasses;
 96:   PetscScalar  *array;        /* The local vector values */
 97:   double        x, y, z;
 98:   int           f, field, node, nclass, row;
 99:   int           ierr;

102:   VarOrderingGetClassMap(order, &map);
103:   numNodes     = map->numNodes;
104:   numFields    = map->numFields;
105:   fields       = map->fields;
106:   fieldClasses = map->fieldClasses;
107:   /* Loop over boundary nodes */
108:   VecGetArray(v, &array);
109:   for(f = 0; f < numFields; f++) {
110:     field = fields[f];
111:     (*grid->ops->getboundarystart)(grid, bd, f, PETSC_FALSE, map, &node, &nclass);
112:     while(node >= 0) {
113:       if (node >= numNodes) {
114:         row = localOffsets[node-numNodes];
115:       } else {
116:         row = offsets[node] - firstVar + localStart[field][nclass];
117:       }
118:       MeshGetNodeCoords(mesh, node, &x, &y, &z);
119:       if (fieldClasses[f][nclass] != 0) {
120:         (*func)(1, grid->fields[field].numComp, &x, &y, &z, &array[row], ctx);
121:         array[row] *= alpha;
122:       }
123:       (*grid->ops->getboundarynext)(grid, bd, f, PETSC_FALSE, map, &node, &nclass);
124: #ifdef PETSC_USE_BOPT_g
125: #endif
126:     }
127:   }
128:   VecRestoreArray(v, &array);
129:   return(0);
130: }

132: int GVecEvaluateFunctionCollective_Triangular_2D(Grid grid, GVec v, VarOrdering order, PointFunction f, PetscScalar alpha,
133:                                                  void *ctx)
134: {
135:   Mesh             mesh;
136:   FieldClassMap    map;
137:   int            **localStart = order->localStart;
138:   int              numNodes, numFields;
139:   int             *fields, **fieldClasses, *classes, *classSizes;
140:   int              nodeVars, comp;
141:   PetscScalar     *array;
142:   double           x, y, z;
143:   int              maxNodes; /* The most nodes in any domain */
144:   int              fieldIdx, field, node, nclass, count;
145:   int              ierr;

148:   GridGetMesh(grid, &mesh);
149:   VarOrderingGetClassMap(order, &map);
150:   numNodes     = map->numNodes;
151:   numFields    = map->numFields;
152:   fields       = map->fields;
153:   fieldClasses = map->fieldClasses;
154:   classes      = map->classes;
155:   classSizes   = map->classSizes;
156:   MPI_Allreduce(&numNodes, &maxNodes, 1, MPI_INT, MPI_MAX, grid->comm);
157:   VecGetArray(v, &array);
158:   for(node = 0, count = 0; node < maxNodes; node++) {
159:     if (node < numNodes) {
160:       nclass     = classes[node];
161:       nodeVars   = classSizes[nclass];
162:       for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
163:         field = fields[fieldIdx];
164:         comp  = grid->fields[field].numComp;
165:         if (fieldClasses[fieldIdx][nclass] == 0) {
166:           /* We have to make sure that every processor is available at each iteration */
167:           (*f)(0, 0, PETSC_NULL, PETSC_NULL, PETSC_NULL, PETSC_NULL, ctx);
168:           continue;
169:         }
170:         MeshGetNodeCoords(mesh, node, &x, &y, &z);
171:         (*f)(1, comp, &x, &y, &z, &array[count+localStart[field][nclass]], ctx);
172:         array[count+localStart[field][nclass]] *= alpha;
173:       }
174:       count += nodeVars;
175:     } else {
176:       /* We have to make sure that every processor is available at each iteration */
177:       (*f)(0, 0, PETSC_NULL, PETSC_NULL, PETSC_NULL, PETSC_NULL, ctx);
178:     }
179:   }
180:   if (count != order->numLocVars) {
181:     SETERRQ2(PETSC_ERR_PLIB, "Invalid number of variables modified %d should be %d", count, order->numLocVars);
182:   }
183:   VecRestoreArray(v, &array);
184:   return(0);
185: }

187: int GVecEvaluateFunctionGalerkin_Triangular_2D(Grid grid, GVec v, int numFields, int *fields, LocalVarOrdering locOrder,
188:                                                PointFunction f, PetscScalar alpha, void *ctx)
189: {
190:   Mesh         mesh        = grid->mesh;
191:   int          numElements = mesh->numFaces;
192:   ElementVec   vec         = grid->vec;
193:   int         *elemStart   = locOrder->elemStart;
194:   PetscScalar *array       = vec->array;
195:   int          field, fieldIdx, elem;
196: #ifdef PETSC_USE_BOPT_g
197:   PetscTruth   opt;
198: #endif
199:   int          ierr;

202:   /* Loop over elements */
203:   for(elem = 0; elem < numElements; elem++) {
204:     /* Initialize element vector */
205:     ElementVecZero(vec);

207:     /* Get contribution to the element vector from each discretization */
208:     for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
209:       field = fields[fieldIdx];
210:       ierr  = DiscretizationEvaluateFunctionGalerkin(grid->fields[field].disc, mesh, f, alpha, elem, &array[elemStart[field]], ctx);
211: 
212: #ifdef PETSC_USE_BOPT_g
213: #endif
214:     }

216:     /* Setup global row and column indices */
217:     GridCalcElementVecIndices(grid, elem, vec);
218: #ifdef PETSC_USE_BOPT_g
219:     PetscOptionsHasName(PETSC_NULL, "-trace_vec_assembly", &opt);
220:     if (opt == PETSC_TRUE) {
221:       int var;

223:       for(var = 0; var < vec->reduceSize; var++)
224:         PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
225:     }
226: #endif
227:     /* Put values in global vector */
228:     ElementVecSetValues(vec, v, ADD_VALUES);
229:   }

231:   VecAssemblyBegin(v);
232:   VecAssemblyEnd(v);
233:   return(0);
234: }

236: int GVecEvaluateFunctionGalerkinCollective_Triangular_2D(Grid grid, GVec v, int numFields, int *fields,
237:                                                          LocalVarOrdering locOrder, PointFunction f,
238:                                                          PetscScalar alpha, void *ctx)
239: {
240:   Mesh         mesh        = grid->mesh;
241:   int          numElements = mesh->numFaces;
242:   ElementVec   vec         = grid->vec;
243:   int         *elemStart   = locOrder->elemStart;
244:   PetscScalar *array       = vec->array;
245:   int          maxElements;
246:   int          field, fieldIdx, elem;
247: #ifdef PETSC_USE_BOPT_g
248:   PetscTruth   opt;
249: #endif
250:   int          ierr;

253:   MPI_Allreduce(&numElements, &maxElements, 1, MPI_INT, MPI_MAX, grid->comm);
254:   /* Loop over elements */
255:   for(elem = 0; elem < maxElements; elem++) {
256:     if (elem < numElements) {
257:       /* Initialize element vector */
258:       ElementVecZero(vec);

260:       /* Get contribution to the element vector from each discretization */
261:       for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
262:         field = fields[fieldIdx];
263:         ierr  = DiscretizationEvaluateFunctionGalerkin(grid->fields[field].disc, mesh, f, alpha, elem, &array[elemStart[field]], ctx);
264: 
265: #ifdef PETSC_USE_BOPT_g
266: #endif
267:       }

269:       /* Setup global row and column indices */
270:       GridCalcElementVecIndices(grid, elem, vec);
271: #ifdef PETSC_USE_BOPT_g
272:       PetscOptionsHasName(PETSC_NULL, "-trace_vec_assembly", &opt);
273:       if (opt == PETSC_TRUE) {
274:         int var;

276:         for(var = 0; var < vec->reduceSize; var++)
277:           PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
278:       }
279: #endif
280:       /* Put values in global vector */
281:       ElementVecSetValues(vec, v, ADD_VALUES);
282:     } else {
283:       /* We have to make sure that every processor is available at each call to f */
284:       for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
285:         field = fields[fieldIdx];
286:         ierr  = DiscretizationEvaluateFunctionGalerkin(grid->fields[field].disc, mesh, f, 0.0, -1, PETSC_NULL, ctx);
287:       }
288:     }
289:   }

291:   VecAssemblyBegin(v);
292:   VecAssemblyEnd(v);
293:   return(0);
294: }

296: int GVecEvaluateBoundaryFunctionGalerkin_Triangular_2D(Grid grid, GVec v, int numFields, int *fields,
297:                                                        LocalVarOrdering locOrder, PointFunction f, PetscScalar alpha, void *ctx)
298: {
299:   Mesh                     mesh      = grid->mesh;
300:   Partition                part;
301:   Mesh_Triangular         *tri       = (Mesh_Triangular *) grid->mesh->data;
302:   int                      elemSize  = locOrder->elemSize;
303:   int                     *elemStart = locOrder->elemStart;
304:   int                      numEdges  = mesh->numEdges;
305:   int                     *bdEdges   = tri->bdEdges;
306:   int                      firstEdge;
307:   ElementVec               vec;         /* The element vector */
308:   PetscScalar             *array;       /* The values in the element vector */
309:   EdgeContext              bdCtx;       /* A context wrapper to communicate the midnode of an edge */
310:   int                      field, edge, midNode;
311:   int                      fieldIdx, bd, bdEdge;
312: #ifdef PETSC_USE_BOPT_g
313:   PetscTruth               opt;
314: #endif
315:   int                      ierr;

318:   /* Setup element vector for the lower dimensional system */
319:   ierr  = ElementVecCreate(grid->comm, elemSize, &vec);
320:   array = vec->array;

322:   /* Setup user context */
323:   bdCtx.ctx = ctx;

325:   /* Our problem here is that "edges" are not data structures like "elements". The element
326:      holds the midnodes which appear on it, but edges do not. Thus we must pass the midnode
327:      number to the discretization, which we do using a context wrapper. Unfortunately, the
328:      row indices were derived from element, so we must introduce another numbering function
329:      which operates on nodes alone. The midnode number is found by a search of the elements
330:      which could certainly be improved with geometric hints. We might also assume that it
331:      is the node lying between the two endpoints in the bdNodes[] array. In addition, the
332:      boundary variable ordering is in relation to boundary node numbers, so that the node
333:      number must be converted before calling the numbering function. This could be speeded up
334:      by placing boundary node numbers in the bdEdges[] array instead. */

336:   /* Loop over boundary edges */
337:   MeshGetPartition(mesh, &part);
338:   PartitionGetStartEdge(part, &firstEdge);
339:   for(bd = 0, bdEdge = 0; bd < grid->numBd; bd++) {
340:     for(bdEdge = tri->bdEdgeBegin[bd]; bdEdge < tri->bdEdgeBegin[bd+1]; bdEdge++) {
341:       /* Check that edge is on this processor */
342:       edge = bdEdges[bdEdge] - firstEdge;
343:       if ((edge < 0) || (edge > numEdges)) continue;

345:       /* Search for midnode on edge */
346:       midNode = -1;
347:       MeshGetMidnodeFromEdge(mesh, edge, &midNode);
348:       bdCtx.midnode = midNode;

350:       /* Initialize element matrix */
351:       ElementVecZero(vec);

353:       for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
354:         field = fields[fieldIdx];
355:         DiscretizationEvaluateFunctionGalerkin(grid->fields[field].disc->bdDisc, mesh, f, alpha, edge, &array[elemStart[field]], &bdCtx);
356: 
357: #ifdef PETSC_USE_BOPT_g
358: #endif
359:       }

361:       /* Setup global row and column indices */
362:       GridCalcBoundaryElementVecIndices(grid, bd, edge, midNode, grid->bdOrder, PETSC_FALSE, vec);
363: #ifdef PETSC_USE_BOPT_g
364:       PetscOptionsHasName(PETSC_NULL, "-trace_vec_bd_assembly", &opt);
365:       if (opt == PETSC_TRUE) {
366:         int var;

368:         for(var = 0; var < vec->reduceSize; var++)
369:           PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
370:       }
371: #endif
372:       /* Put values in global vector */
373:       ElementVecSetValues(vec, v, ADD_VALUES);
374:     }
375:   }
376: #ifdef PETSC_USE_BOPT_g
377:   if (bdEdge != mesh->numBdEdges) SETERRQ(PETSC_ERR_PLIB, "Invalid boundary edge numbering");
378: #endif

380:   VecAssemblyBegin(v);
381:   VecAssemblyEnd(v);

383:   /* Cleanup */
384:   ElementVecDestroy(vec);

386:   return(0);
387: }

389: int GVecEvaluateBoundaryFunctionGalerkinCollective_Triangular_2D(Grid grid, GVec v, int numFields, int *fields,
390:                                                                  LocalVarOrdering locOrder, PointFunction f,
391:                                                                  PetscScalar alpha, void *ctx)
392: {
393:   Mesh                     mesh      = grid->mesh;
394:   Partition                part;
395:   Mesh_Triangular         *tri       = (Mesh_Triangular *) mesh->data;
396:   int                      elemSize  = locOrder->elemSize;
397:   int                     *elemStart = locOrder->elemStart;
398:   int                      numEdges  = mesh->numEdges;
399:   int                     *bdEdges   = tri->bdEdges;
400:   int                      firstEdge;
401:   ElementVec               vec;         /* The element vector */
402:   PetscScalar             *array;       /* The values in the element vector */
403:   EdgeContext              bdCtx;       /* A context wrapper to communicate the midnode of an edge */
404:   int                      field, edge, midnode;
405:   int                      fieldIdx, bd, bdEdge;
406: #ifdef PETSC_USE_BOPT_g
407:   PetscTruth               opt;
408: #endif
409:   int                      ierr;

412:   /* Setup element vector for the lower dimensional system */
413:   ierr  = ElementVecCreate(grid->comm, elemSize, &vec);
414:   array = vec->array;

416:   /* Setup user context */
417:   bdCtx.ctx = ctx;

419:   /* Our problem here is that "edges" are not data structures like "elements". The element
420:      holds the midnodes which appear on it, but edges do not. Thus we must pass the midnode
421:      number to the discretization, which we do using a context wrapper. Unfortunately, the
422:      row indices were derived from element, so we must introduce another numbering function
423:      which operates on nodes alone. The midnode number is found by a search of the elements
424:      which could certainly be improved with geometric hints. We might also assume that it
425:      is the node lying between the two endpoints in the bdNodes[] array. In addition, the
426:      boundary variable ordering is in relation to boundary node numbers, so that the node
427:      number must be converted before calling the numbering function. This could be speeded up
428:      by placing boundary node numbers in the bdEdges[] array instead. */

430:   /* Loop over boundary edges */
431:   MeshGetPartition(mesh, &part);
432:   PartitionGetStartEdge(part, &firstEdge);
433:   for(bd = 0, bdEdge = 0; bd < grid->numBd; bd++) {
434:     for(bdEdge = tri->bdEdgeBegin[bd]; bdEdge < tri->bdEdgeBegin[bd+1]; bdEdge++) {
435:       /* Check that edge is on this processor */
436:       edge = bdEdges[bdEdge] - firstEdge;
437:       if ((edge < 0) || (edge > numEdges)) {
438:         for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
439:           field = fields[fieldIdx];
440:           ierr  = DiscretizationEvaluateFunctionGalerkin(grid->fields[field].disc->bdDisc, mesh, f, 0.0, -1, PETSC_NULL, &bdCtx);
441: 
442:         }
443:         continue;
444:       }

446:       /* Locate midnode on edge */
447:       midnode = -1;
448:       MeshGetMidnodeFromEdge(mesh, edge, &midnode);
449:       bdCtx.midnode = midnode;
450: #ifdef PETSC_USE_BOPT_g
451:       if (tri->markers[midnode] != tri->bdMarkers[bd])
452:         SETERRQ4(PETSC_ERR_ARG_WRONG, "Invalid midnode %d has marker %d on boundary %d (%d)",
453:                  midnode, tri->markers[midnode], bd, tri->bdMarkers[bd]);
454: #endif

456:       /* Initialize element matrix */
457:       ElementVecZero(vec);

459:       for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
460:         field = fields[fieldIdx];
461:         DiscretizationEvaluateFunctionGalerkin(grid->fields[field].disc->bdDisc, mesh, f, alpha, edge, &array[elemStart[field]], &bdCtx);
462: 
463: #ifdef PETSC_USE_BOPT_g
464: #endif
465:       }

467:       /* Setup global row and column indices */
468:       GridCalcBoundaryElementVecIndices(grid, bd, edge, midnode, grid->bdOrder, PETSC_FALSE, vec);
469: #ifdef PETSC_USE_BOPT_g
470:       PetscOptionsHasName(PETSC_NULL, "-trace_vec_bd_assembly", &opt);
471:       if (opt == PETSC_TRUE) {
472:         int var;

474:         for(var = 0; var < vec->reduceSize; var++)
475:           PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
476:       }
477: #endif
478:       /* Put values in global vector */
479:       ElementVecSetValues(vec, v, ADD_VALUES);
480:     }
481:   }
482: #ifdef PETSC_USE_BOPT_g
483:   if (bdEdge != mesh->numBdEdges) SETERRQ(PETSC_ERR_PLIB, "Invalid boundary edge numbering");
484: #endif

486:   VecAssemblyBegin(v);
487:   VecAssemblyEnd(v);

489:   /* Cleanup */
490:   ElementVecDestroy(vec);

492:   return(0);
493: }

495: int GVecEvaluateNonlinearOperatorGalerkin_Triangular_2D(Grid grid, GVec v, GVec x, GVec y, int numFields, int *fields,
496:                                                         LocalVarOrdering locOrder, NonlinearOperator op, PetscScalar alpha,
497:                                                         PetscTruth isALE, void *ctx)
498: {
499:   Mesh         mesh;
500:   Partition    part;
501:   MeshMover    mover;
502:   int         *elemStart     = locOrder->elemStart;
503:   ElementVec   vec           = grid->vec;
504:   PetscScalar *array         = vec->array;
505:   ElementVec   ghostVec      = grid->ghostElementVec; /* The local solution vector */
506:   PetscScalar *ghostArray    = ghostVec->array;       /* The values in the ghost element vector */
507:   PetscTruth   reduceElement = grid->reduceElementArgs;
508:   Grid         ALEGrid;                             /* The grid describing the mesh velocity */
509:   Vec          appVec;                              /* The local vec for y */
510:   ElementVec   elemAppVec;                          /* The element vec for y */
511:   PetscScalar *appArray;                            /* The values in elemAppVec */
512:   ElementVec   MeshALEVec;                          /* ALE velocity vector with mesh discretization */
513:   ElementVec   ALEVec;                              /* ALE velocity vector */
514:   PetscScalar *ALEArray;                            /* The values in the ALE element vector */
515:   PetscScalar *nonlinearArgs[2];
516:   PetscTruth   ALEActive;
517:   int          numElements;
518:   int          field, fieldIdx, elem;
519: #ifdef PETSC_USE_BOPT_g
520:   PetscTruth   opt;
521: #endif
522:   int          ierr;

525:   GridGetMesh(grid, &mesh);
526:   MeshGetPartition(mesh, &part);
527:   if (grid->ALEActive && (isALE == PETSC_TRUE)) {
528:     ALEActive = PETSC_TRUE;
529:     MeshGetMover(mesh, &mover);
530:     MeshMoverGetVelocityGrid(mover, &ALEGrid);
531:   } else {
532:     ALEActive = PETSC_FALSE;
533:   }
534:   /* Fill the local solution vectors */
535:   if (x != PETSC_NULL) {
536:     GridGlobalToLocal(grid, INSERT_VALUES, x);
537:   }
538:   VecDuplicate(grid->ghostVec, &appVec);
539:   ElementVecDuplicate(ghostVec, &elemAppVec);
540:   if (y != PETSC_NULL) {
541:     GridGlobalToLocalGeneral(grid, y, appVec, INSERT_VALUES, grid->ghostScatter);
542:   }
543:   appArray = elemAppVec->array;

545:   /* Setup ALE variables */
546:   if (ALEActive == PETSC_TRUE) {
547:     /* Notice that the ALEArray is from this grid, not the mesh velocity grid */
548:     MeshALEVec = ALEGrid->vec;
549:     ierr       = ElementVecDuplicate(grid->vec, &ALEVec);
550:     ALEArray   = ALEVec->array;
551:   } else {
552:     MeshALEVec = PETSC_NULL;
553:     ALEArray   = PETSC_NULL;
554:   }

556:   /* Loop over elements */
557:   PartitionGetNumElements(part, &numElements);
558:   for(elem = 0; elem < numElements; elem++) {
559:     /* Initialize element vector */
560:     ElementVecZero(vec);

562:     /* Setup local row and column indices */
563:     GridCalcLocalElementVecIndices(grid, elem, ghostVec);
564:     ElementVecDuplicateIndices(ghostVec, elemAppVec);

566:     /* Setup local solution vector */
567:     GridLocalToElement(grid, ghostVec);
568:     GridLocalToElementGeneral(grid, appVec, grid->bdReduceVecCur, grid->reduceSystem, reduceElement, elemAppVec);

570:     /* Setup ALE variables */
571:     if (ALEActive == PETSC_TRUE) {
572:       GridCalcLocalElementVecIndices(ALEGrid, elem, MeshALEVec);
573:       GridLocalToElement(ALEGrid, MeshALEVec);
574:     }

576:     /* Get contribution to the element vector from each discretization */
577:     for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
578:       field            = fields[fieldIdx];
579:       nonlinearArgs[0] = &ghostArray[elemStart[field]];
580:       nonlinearArgs[1] = &appArray[elemStart[field]];
581:       if (ALEActive == PETSC_TRUE)
582:       {
583:         GridInterpolateElementVec(ALEGrid, 0, MeshALEVec, grid, field, ALEVec);
584:         DiscretizationEvaluateNonlinearALEOperatorGalerkin(grid->fields[field].disc, mesh, op, alpha, elem, 2, nonlinearArgs,
585:                                                                   ALEArray, &array[elemStart[field]], ctx);
586: 
587:       } else {
588:         DiscretizationEvaluateNonlinearOperatorGalerkin(grid->fields[field].disc, mesh, op, alpha, elem, 2, nonlinearArgs,
589:                                                                &array[elemStart[field]], ctx);
590: 
591:       }
592: #ifdef PETSC_USE_BOPT_g
593: #endif
594:     }

596:     /* Setup global row and column indices */
597:     GridCalcElementVecIndices(grid, elem, vec);
598: #ifdef PETSC_USE_BOPT_g
599:     PetscOptionsHasName(PETSC_NULL, "-trace_vec_assembly", &opt);
600:     if (opt == PETSC_TRUE) {
601:       int var;

603:       for(var = 0; var < vec->reduceSize; var++)
604:         PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
605:     }
606: #endif
607:     /* Put values in global vector */
608:     ElementVecSetValues(vec, v, ADD_VALUES);
609:   }

611:   /* Cleanup ALE variables */
612:   if (ALEActive == PETSC_TRUE) {
613:     ElementVecDestroy(ALEVec);
614:   }
615:   /* Cleanup additional input vectors */
616:   VecDestroy(appVec);
617:   ElementVecDestroy(elemAppVec);
618:   VecAssemblyBegin(v);
619:   VecAssemblyEnd(v);
620:   return(0);
621: }

623: int GVecEvaluateOperatorGalerkin_Triangular_2D(Grid grid, GVec v, GVec x, GVec y, VarOrdering sOrder, LocalVarOrdering sLocOrder,
624:                                                VarOrdering tOrder, LocalVarOrdering tLocOrder, int op, PetscScalar alpha, void *ctx)
625: {
626:   Mesh             mesh          = grid->mesh;
627:   PetscTruth       reduceSystem  = grid->reduceSystem;
628:   PetscTruth       reduceElement = grid->reduceElement;
629:   int              sElemSize     = sLocOrder->elemSize;
630:   int             *sElemStart    = sLocOrder->elemStart;
631:   int              tElemSize     = tLocOrder->elemSize;
632:   int             *tElemStart    = tLocOrder->elemStart;
633:   FieldClassMap    sMap,         tMap;
634:   int              numSFields,   numTFields;
635:   int             *sFields,     *tFields;
636:   PetscTruth       sConstrained, tConstrained;
637:   Vec              ghostVec;     /* The local ghost vector for x (usually the solution) */
638:   VecScatter       ghostScatter; /* The scatter from x to ghostVec */
639:   Vec              appVec;       /* The local ghost vector for y (usually the application vector) */
640:   VecScatter       appScatter;   /* The scatter from y to appVec */
641:   ElementMat       mat;
642:   ElementVec       elemGhostVec, elemAppVec, vec;
643:   PetscScalar     *ghostArray, *appArray, *matArray, *array;
644:   int              numElements;
645:   int              f, sField, tField, elem;
646: #ifdef PETSC_USE_BOPT_g
647:   PetscTruth            opt;
648: #endif
649:   int              ierr;

652:   MeshGetInfo(mesh, PETSC_NULL, PETSC_NULL, PETSC_NULL, &numElements);
653:   VarOrderingGetClassMap(sOrder, &sMap);
654:   VarOrderingGetClassMap(tOrder, &tMap);
655:   numSFields   = sMap->numFields;
656:   sFields      = sMap->fields;
657:   sConstrained = sMap->isConstrained;
658:   numTFields   = tMap->numFields;
659:   tFields      = tMap->fields;
660:   tConstrained = tMap->isConstrained;
661:   /* Setup reduction */
662:   (*grid->ops->gridsetupghostscatter)(grid, tOrder, &ghostVec, &ghostScatter);
663:   (*grid->ops->gridsetupghostscatter)(grid, sOrder, &appVec,   &appScatter);
664:   /* Setup element vector and matrix */
665:   if (tConstrained == PETSC_TRUE) {
666:     for(f = 0; f < numTFields; f++) {
667:       if (grid->fields[tFields[f]].isConstrained == PETSC_TRUE)
668:         tElemSize += grid->fields[tFields[f]].disc->funcs*grid->fields[tFields[f]].constraintCompDiff;
669:     }
670:   }
671:   if (sConstrained == PETSC_TRUE) {
672:     for(f = 0; f < numSFields; f++) {
673:       if (grid->fields[sFields[f]].isConstrained == PETSC_TRUE)
674:         sElemSize += grid->fields[sFields[f]].disc->funcs*grid->fields[sFields[f]].constraintCompDiff;
675:     }
676:   }
677:   ierr       = ElementVecCreate(grid->comm, tElemSize, &vec);
678:   array      = vec->array;
679:   ierr       = ElementVecDuplicate(vec, &elemGhostVec);
680:   ghostArray = elemGhostVec->array;
681:   ierr       = ElementVecCreate(grid->comm, sElemSize, &elemAppVec);
682:   appArray   = elemAppVec->array;
683:   ierr       = ElementMatCreate(grid->comm, tElemSize, sElemSize, &mat);
684:   matArray   = mat->array;

686:   /* Fill the local solution vectors */
687:   GridGlobalToLocalGeneral(grid, x, ghostVec, INSERT_VALUES, ghostScatter);
688:   GridGlobalToLocalGeneral(grid, y, appVec,   INSERT_VALUES, appScatter);

690:   /* Setup the operator with information about the test function space */
691:   for(f = 0; f < numSFields; f++) {
692:     grid->fields[sFields[f]].disc->operators[op]->test = grid->fields[tFields[f]].disc;
693:   }

695:   /* Loop over elements */
696:   for(elem = 0; elem < numElements; elem++) {
697:     /* Initialize element vector */
698:     ElementVecZero(vec);
699:     vec->reduceSize          = tLocOrder->elemSize;
700:     elemGhostVec->reduceSize = tLocOrder->elemSize;
701:     elemAppVec->reduceSize   = sLocOrder->elemSize;

703:     /* Setup local row indices */
704:     GridCalcGeneralElementVecIndices(grid, elem, tOrder, PETSC_NULL, PETSC_TRUE, elemGhostVec);
705:     GridCalcGeneralElementVecIndices(grid, elem, sOrder, PETSC_NULL, PETSC_TRUE, elemAppVec);
706:     /* Setup local vectors */
707:     GridLocalToElementGeneral(grid, ghostVec, grid->bdReduceVecCur, reduceSystem, reduceElement, elemGhostVec);
708:     GridLocalToElementGeneral(grid, appVec,   grid->bdReduceVecCur, reduceSystem, reduceElement, elemAppVec);
709:     /* Must transform to unconstrained variables for element integrals */
710:     GridProjectElementVec(grid, mesh, elem, tOrder, PETSC_FALSE, elemGhostVec);
711:     GridProjectElementVec(grid, mesh, elem, sOrder, PETSC_FALSE, elemAppVec);

713:     for(f = 0; f < numSFields; f++) {
714:       sField = sFields[f];
715:       tField = tFields[f];
716:       /* Get contribution to the element vector from the linear operator */
717:       ElementMatZero(mat);
718:       DiscretizationEvaluateOperatorGalerkinMF(grid->fields[sField].disc, mesh, sElemSize, tElemStart[tField], sElemStart[sField],
719:                                                       op, alpha, elem, &ghostArray[sElemStart[sField]],
720:                                                       &appArray[sElemStart[sField]], array, matArray, ctx);
721: 
722: #ifdef PETSC_USE_BOPT_g
723: #endif
724:     }

726:     /* Setup global row indices, with reduction if necessary */
727:     GridCalcGeneralElementVecIndices(grid, elem, tOrder, PETSC_NULL, PETSC_FALSE, vec);
728: #ifdef PETSC_USE_BOPT_g
729:     PetscOptionsHasName(PETSC_NULL, "-trace_vec_assembly", &opt);
730:     if (opt == PETSC_TRUE) {
731:       int var;

733:       for(var = 0; var < vec->reduceSize; var++)
734:         PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
735:     }
736: #endif
737:     /* Put values in global vector */
738:     ElementVecSetValues(vec, v, ADD_VALUES);
739:   }

741:   VecDestroy(ghostVec);
742:   VecScatterDestroy(ghostScatter);
743:   VecDestroy(appVec);
744:   VecScatterDestroy(appScatter);
745:   ElementVecDestroy(elemGhostVec);
746:   ElementVecDestroy(elemAppVec);
747:   ElementVecDestroy(vec);
748:   ElementMatDestroy(mat);
749:   VecAssemblyBegin(v);
750:   VecAssemblyEnd(v);
751:   return(0);
752: }

754: int GVecEvaluateSystemMatrix_Triangular_2D(Grid grid, GVec x, GVec y, GVec f, void *ctx)
755: {
756:   Mesh                  mesh                = grid->mesh;
757:   int                   numElements         = mesh->numFaces;
758:   int                   numMatOps           = grid->numMatOps;       /* The number of operators in the matrix */
759:   GridOp               *matOps              = grid->matOps;          /* The operators in the system matrix */
760:   PetscTruth            reduceSystem        = grid->reduceSystem;
761:   PetscTruth            reduceElement       = grid->reduceElement;
762:   PetscTruth            explicitConstraints = grid->explicitConstraints;
763:   PetscConstraintObject constCtx            = grid->constraintCtx;   /* The constraint object */
764:   int                   numNewFields        = grid->numNewFields;    /* The number of new fields added by constraints */
765:   ElementVec            vec                 = grid->vec;             /* The element vector */
766:   PetscScalar          *array               = vec->array;            /* The values in the element vector */
767:   ElementMat            mat                 = grid->mat;             /* The element matrix */
768:   PetscScalar          *matArray            = mat->array;            /* The values in the element matrix */
769:   Vec                   ghostVec            = grid->ghostVec;        /* The local solution vector */
770:   ElementVec            elemGhostVec        = grid->ghostElementVec; /* Local solution vector */
771:   PetscScalar          *ghostArray          = elemGhostVec->array;   /* The values in the ghost element vector */
772:   int                   numFields           = grid->cm->numFields;   /* The number of fields in the calculation */
773:   int                  *fields              = grid->cm->fields;      /* The fields participating in the calculation */
774:   LocalVarOrdering      locOrder            = grid->locOrder;        /* The default local variable ordering */
775:   int                   elemSize            = locOrder->elemSize;    /* The number of shape functions in the element matrix */
776:   int                  *elemStart           = locOrder->elemStart;   /* The offset of each field in the element matrix */
777:   int                   rank                = mesh->part->rank;      /* The processor rank */
778:   MeshMover             mover;
779:   Grid                  ALEGrid;                                     /* The grid describing the mesh velocity */
780:   VarOrdering           order;                                       /* The default variable ordering */
781:   ElementVec            MeshALEVec;                                  /* ALE velocity vector with mesh discretization */
782:   ElementVec            ALEVec;                                      /* ALE velocity vector */
783:   PetscScalar          *ALEArray;                                    /* The values in the ALE element vector */
784:   Vec                   appVec;                                      /* The local vec for y */
785:   ElementVec            elemAppVec;                                  /* The element vec for y */
786:   PetscScalar          *appArray;                                    /* The values in elemAppVec */
787:   int                   elem, fieldIndex;
788:   int                   newComp = 0;
789:   int                   sField, tField, op, newField, row, col;
790: #ifdef PETSC_USE_BOPT_g
791:   PetscTruth            opt;
792: #endif
793:   int                   ierr;

796:   MeshGetMover(mesh, &mover);
797:   MeshMoverGetVelocityGrid(mover, &ALEGrid);
798:   PetscObjectQuery((PetscObject) x, "Order", (PetscObject *) &order);
799:   /* Right now, we ignore the preconditioner */
800:   /* Fill the local solution vectors */
801:   if (x != PETSC_NULL) {
802:     GridGlobalToLocal(grid, INSERT_VALUES, x);
803:   }
804:   VecDuplicate(ghostVec, &appVec);
805:   GridGlobalToLocalGeneral(grid, y, appVec, INSERT_VALUES, grid->ghostScatter);
806:   ElementVecDuplicate(elemGhostVec, &elemAppVec);
807:   appArray = elemAppVec->array;

809:   /* Setup ALE variables */
810:   if (grid->ALEActive == PETSC_TRUE) {
811:     /* Notice that the ALEArray is from this grid, not the mesh velocity grid */
812:     MeshALEVec = ALEGrid->vec;
813:     ierr       = ElementVecDuplicate(grid->vec, &ALEVec);
814:     ALEArray   = ALEVec->array;
815:   } else {
816:     MeshALEVec = PETSC_NULL;
817:     ALEArray   = PETSC_NULL;
818:   }

820:   /* Loop over elements */
821:   for(elem = 0; elem < numElements; elem++)
822:   {
823:     /* Initialize element vector */
824:     ElementVecZero(vec);
825:     vec->reduceSize          = locOrder->elemSize;
826:     elemGhostVec->reduceSize = locOrder->elemSize;
827:     elemAppVec->reduceSize   = locOrder->elemSize;

829:     /* Setup global row and column indices */
830:     GridCalcLocalElementVecIndices(grid, elem, elemGhostVec);
831:     ElementVecDuplicateIndices(elemGhostVec, elemAppVec);
832:     elemAppVec->reduceSize = elemGhostVec->reduceSize;

834:     /* Setup local solution vector */
835:     GridLocalToElementGeneral(grid, ghostVec, grid->bdReduceVecCur, reduceSystem, reduceElement, elemGhostVec);
836:     GridLocalToElementGeneral(grid, appVec, grid->bdReduceVecCur, reduceSystem, reduceElement, elemAppVec);

838:     /* Must transform to unconstrained variables for element integrals */
839:     GridProjectElementVec(grid, mesh, elem, order, PETSC_FALSE, elemGhostVec);
840:     GridProjectElementVec(grid, mesh, elem, order, PETSC_FALSE, elemAppVec);

842:     /* Setup ALE variables */
843:     if (grid->ALEActive == PETSC_TRUE) {
844:       GridCalcLocalElementVecIndices(ALEGrid, elem, MeshALEVec);
845:       GridLocalToElement(ALEGrid, MeshALEVec);
846:     }

848:     /* Calculate the contribution to the element matrix from each field */
849:     for(op = 0; op < numMatOps; op++) {
850:       sField = matOps[op].field;
851:       tField = grid->fields[sField].disc->operators[matOps[op].op]->test->field;
852:       if (grid->fields[sField].isActive) {
853:         ElementMatZero(mat);
854:         if (matOps[op].isALE) {
855:           GridInterpolateElementVec(ALEGrid, 0, MeshALEVec, grid, sField, ALEVec);
856:           DiscretizationEvaluateALEOperatorGalerkinMF(grid->fields[sField].disc, mesh, elemSize, elemStart[tField], elemStart[sField],
857:                                                              matOps[op].op, matOps[op].alpha, elem, &ghostArray[elemStart[sField]],
858:                                                              &appArray[elemStart[sField]], ALEArray, array, matArray, ctx);
859: 
860:         } else {
861:           DiscretizationEvaluateOperatorGalerkinMF(grid->fields[sField].disc, mesh, elemSize, elemStart[tField], elemStart[sField],
862:                                                           matOps[op].op, matOps[op].alpha, elem, &ghostArray[elemStart[sField]],
863:                                                           &appArray[elemStart[sField]], array, matArray, ctx);
864: 
865:         }
866: #ifdef PETSC_USE_BOPT_g
867: #endif
868:       }
869:     }

871:     /* Setup global numbering, with reduction if necessary */
872:     GridCalcGeneralElementVecIndices(grid, elem, order, PETSC_NULL, PETSC_FALSE, vec);
873: #ifdef PETSC_USE_BOPT_g
874:     PetscOptionsHasName(PETSC_NULL, "-trace_vec_assembly", &opt);
875:     if (opt == PETSC_TRUE) {
876:       int var;
877:       for(var = 0; var < vec->reduceSize; var++)
878:         PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
879:     }
880: #endif
881:     /* Put values in global vector */
882:     ElementVecSetValues(vec, f, ADD_VALUES);
883:   }

885:   /* Evaluate self-interaction of new fields created by constraints */
886:   if (explicitConstraints == PETSC_TRUE) {
887:     /* WARNING: This only accomodates 1 constrained field */
888:     /* Get constraint information */
889:     for(fieldIndex = 0; fieldIndex < numFields; fieldIndex++) {
890:       sField = fields[fieldIndex];
891:       if (grid->fields[sField].isConstrained == PETSC_TRUE) {
892:         newComp = grid->fields[sField].numComp + grid->fields[sField].constraintCompDiff;
893:         break;
894:       }
895:     }
896:     /* Calculate self-interaction */
897:     for(newField = 0; newField < numNewFields; newField++) {
898:       /* Initialize element matrix and vector */
899:       ElementMatZero(mat);
900:       ElementVecZero(vec);
901:       mat->reduceRowSize     = newComp;
902:       mat->reduceColSize     = newComp;
903:       elemAppVec->reduceSize = newComp;
904:       vec->reduceSize        = newComp;

906:       /* Calculate the indices and contribution to the element matrix from the new field */
907:       (*constCtx->ops->newelemmat)(constCtx, order, newField, mat);
908: #ifdef PETSC_USE_BOPT_g
909:       PetscOptionsHasName(PETSC_NULL, "-trace_mat_assembly_constrained", &opt);
910:       if (opt == PETSC_TRUE) {
911:         ElementMatView(mat, PETSC_VIEWER_STDOUT_(mat->comm));
912:       }
913: #endif
914:       /* Global vector indices are the same as the matrix indices */
915:       for(row = 0; row < mat->reduceRowSize; row++) {
916:         vec->indices[row] = mat->rowIndices[row];
917:       }
918:       /* Local vector indices can be calculated directly from the field number */
919:       elemAppVec->indices[0] = grid->constraintOrder->firstVar[rank+1] - (numNewFields - newField)*newComp;
920:       for(row = 1; row < elemAppVec->reduceSize; row++) {
921:         elemAppVec->indices[row] = elemAppVec->indices[row-1]+1;
922:       }
923:       /* Retrieve element vector values from x */
924:       GridLocalToElementGeneral(grid, appVec, grid->bdReduceVecCur, reduceSystem, reduceElement, elemAppVec);
925:       /* Multiply element matrix and element vector */
926:       for(row = 0; row < mat->reduceRowSize; row++) {
927:         for(col = 0; col < mat->reduceColSize; col++) {
928:           vec->array[row] += mat->array[row*mat->reduceColSize+col]*elemAppVec->array[col];
929:         }
930:       }
931:       PetscLogFlops(2*mat->reduceRowSize*mat->reduceColSize);
932:       /* Put values in global vector */
933:       ElementVecSetValues(vec, f, ADD_VALUES);
934: #ifdef PETSC_USE_BOPT_g
935: #endif
936:     }
937:   }

939:   /* Reset element vector */
940:   elemGhostVec->reduceSize = locOrder->elemSize;

942:   VecDestroy(appVec);
943:   ElementVecDestroy(elemAppVec);
944:   if (grid->ALEActive == PETSC_TRUE) {
945:     ElementVecDestroy(ALEVec);
946:   }
947:   VecAssemblyBegin(f);
948:   VecAssemblyEnd(f);
949:   return(0);
950: }

952: int GVecEvaluateSystemMatrixDiagonal_Triangular_2D(Grid grid, GVec x, GVec d, void *ctx)
953: {
954:   Mesh                  mesh          = grid->mesh;
955:   int                   numNewFields  = grid->numNewFields;     /* The number of new fields added by constraints */
956:   int                   numMatOps     = grid->numMatOps;        /* The number of operators in the matrix */
957:   GridOp               *matOps        = grid->matOps;           /* The operators in the system matrix */
958:   VarOrdering           constOrder    = grid->constraintOrder;  /* The constrained variable ordering */
959:   PetscTruth            reduceSystem  = grid->reduceSystem;
960:   PetscTruth            reduceElement = grid->reduceElement;
961:   PetscTruth            expConst      = grid->explicitConstraints;
962:   PetscConstraintObject constCtx      = grid->constraintCtx;    /* The constraint object */
963:   int                   numFields     = grid->cm->numFields;    /* The number of fields in the calculation */
964:   int                  *fields        = grid->cm->fields;       /* The fields participating in the calculation */
965:   LocalVarOrdering      locOrder      = grid->locOrder;         /* The default local variable ordering */
966:   int                   elemSize      = locOrder->elemSize;     /* The number of shape functions in the element matrix */
967:   int                  *elemStart     = locOrder->elemStart;    /* The offset of each field in the element matrix */
968:   ElementMat            mat           = grid->mat;              /* The element matrix */
969:   PetscScalar          *array         = mat->array;             /* The values in the element matrix */
970:   Vec                   ghostVec      = grid->ghostVec;         /* The local solution vector */
971:   ElementVec            elemGhostVec  = grid->ghostElementVec;  /* The element vector from ghostVec */
972:   PetscScalar          *ghostArray    = elemGhostVec->array;    /* The values in elemGhostVec */
973:   MeshMover             mover;
974:   Grid                  ALEGrid;                                /* The grid describing the mesh velocity */
975:   VarOrdering           order;                                  /* The default variable ordering */
976:   ElementVec            MeshALEVec;                             /* ALE velocity vector with mesh discretization */
977:   ElementVec            ALEVec;                                 /* ALE velocity vector */
978:   PetscScalar          *ALEArray;                               /* The values in the ALE element vector */
979:   int                   newComp = 0;
980:   int                   numElements;
981:   int                   elem, f, sField, tField, op, newField;
982: #ifdef PETSC_USE_BOPT_g
983:   PetscTruth            opt;
984: #endif
985:   int                   ierr;

988:   MeshGetMover(mesh, &mover);
989:   MeshMoverGetVelocityGrid(mover, &ALEGrid);
990:   MeshGetInfo(mesh, PETSC_NULL, PETSC_NULL, PETSC_NULL, &numElements);
991:   if (expConst == PETSC_TRUE) {
992:     order = grid->constraintOrder;
993:   } else {
994:     order = grid->order;
995:   }
996:   /* Fill the local solution vectors */
997:   if (x != PETSC_NULL) {
998:     GridGlobalToLocal(grid, INSERT_VALUES, x);
999:   }

1001:   /* Setup ALE variables -- No new variables should be ALE so ALEVec is not recalculated */
1002:   if (grid->ALEActive == PETSC_TRUE) {
1003:     /* Notice that the ALEArray is from this grid, not the mesh velocity grid */
1004:     MeshALEVec = ALEGrid->vec;
1005:     ierr       = ElementVecDuplicate(grid->vec, &ALEVec);
1006:     ALEArray   = ALEVec->array;
1007:   } else {
1008:     MeshALEVec = PETSC_NULL;
1009:     ALEArray   = PETSC_NULL;
1010:   }

1012:   /* Loop over elements */
1013:   for(elem = 0; elem < numElements; elem++) {
1014:     /* Initialize element matrix */
1015:     ElementMatZero(mat);
1016:     mat->reduceRowSize       = locOrder->elemSize;
1017:     mat->reduceColSize       = locOrder->elemSize;
1018:     elemGhostVec->reduceSize = locOrder->elemSize;

1020:     /* Setup local row indices for the ghost vector */
1021:     GridCalcLocalElementVecIndices(grid, elem, elemGhostVec);
1022:     /* Setup local solution vector */
1023:     GridLocalToElementGeneral(grid, ghostVec, grid->bdReduceVecCur, reduceSystem, reduceElement, elemGhostVec);
1024:     /* Must transform to unconstrained variables for element integrals */
1025:     GridProjectElementVec(grid, mesh, elem, order, PETSC_FALSE, elemGhostVec);

1027:     /* Setup ALE variables */
1028:     if (grid->ALEActive == PETSC_TRUE) {
1029:       GridCalcLocalElementVecIndices(ALEGrid, elem, MeshALEVec);
1030:       GridLocalToElement(ALEGrid, MeshALEVec);
1031:     }

1033:     /* Calculate the contribution to the element matrix from each field */
1034:     for(op = 0; op < numMatOps; op++) {
1035:       sField = matOps[op].field;
1036:       tField = grid->fields[sField].disc->operators[matOps[op].op]->test->field;
1037:       if (grid->fields[sField].isActive) {
1038:         if (matOps[op].isALE) {
1039:           GridInterpolateElementVec(ALEGrid, 0, MeshALEVec, grid, sField, ALEVec);
1040:           DiscretizationEvaluateALEOperatorGalerkin(grid->fields[sField].disc, mesh, elemSize, elemStart[tField], elemStart[sField],
1041:                                                            matOps[op].op, matOps[op].alpha, elem, &ghostArray[elemStart[sField]],
1042:                                                            ALEArray, array, ctx);
1043: 
1044:         } else {
1045:           DiscretizationEvaluateOperatorGalerkin(grid->fields[sField].disc, mesh, elemSize, elemStart[tField], elemStart[sField],
1046:                                                         matOps[op].op, matOps[op].alpha, elem, &ghostArray[elemStart[sField]],
1047:                                                         array, ctx);
1048: 
1049:         }
1050: #ifdef PETSC_USE_BOPT_g
1051: #endif
1052:       }
1053:     }

1055:     /* Setup global numbering, with reduction if necessary */
1056:     GridCalcGeneralElementMatIndices(grid, elem, order, order, PETSC_FALSE, mat);
1057: #ifdef PETSC_USE_BOPT_g
1058:     PetscOptionsHasName(PETSC_NULL, "-trace_mat_assembly", &opt);
1059:     if (opt == PETSC_TRUE) {
1060:       ElementMatView(mat, PETSC_VIEWER_STDOUT_(mat->comm));
1061:     }
1062: #endif
1063:     /* Put diagonal values in the global matrix */
1064:     ElementMatSetDiagonalValues(mat, d, ADD_VALUES);
1065:   }

1067:   /* Evaluate self-interaction of new fields created by constraints */
1068:   if (expConst == PETSC_TRUE) {
1069:     /* WARNING: This only accomodates 1 constrained field */
1070:     /* Get constraint information */
1071:     for(f = 0; f < numFields; f++) {
1072:       sField = fields[f];
1073:       if (grid->fields[sField].isConstrained == PETSC_TRUE) {
1074:         newComp = grid->fields[sField].numComp + grid->fields[sField].constraintCompDiff;
1075:         break;
1076:       }
1077:     }
1078:     /* Calculate self-interaction */
1079:     for(newField = 0; newField < numNewFields; newField++) {
1080:       /* Initialize element matrix */
1081:       ElementMatZero(mat);
1082:       mat->reduceRowSize = newComp;
1083:       mat->reduceColSize = newComp;

1085:       /* Calculate the indices and contribution to the element matrix from the new field */
1086:       (*constCtx->ops->newelemmat)(constCtx, constOrder, newField, mat);
1087: #ifdef PETSC_USE_BOPT_g
1088:       PetscOptionsHasName(PETSC_NULL, "-trace_mat_assembly_constrained", &opt);
1089:       if (opt == PETSC_TRUE) {
1090:         ElementMatView(mat, PETSC_VIEWER_STDOUT_(mat->comm));
1091:       }
1092: #endif
1093:       /* Put values in global matrix */
1094:       ElementMatSetDiagonalValues(mat, d, ADD_VALUES);
1095: #ifdef PETSC_USE_BOPT_g
1096: #endif
1097:     }
1098:   }

1100:   /* Assemble matrix */
1101:   VecAssemblyBegin(d);
1102:   VecAssemblyEnd(d);

1104:   /* Reset element matrix and vector */
1105:   mat->reduceRowSize       = locOrder->elemSize;
1106:   mat->reduceColSize       = locOrder->elemSize;
1107:   elemGhostVec->reduceSize = locOrder->elemSize;

1109:   /* Cleanup */
1110:   if (grid->ALEActive == PETSC_TRUE) {
1111:     ElementVecDestroy(ALEVec);
1112:   }

1114:   return(0);
1115: }