Actual source code: ex8.c
1: /*$Id: ex8.c,v 1.24 2001/09/11 16:32:18 bsmith Exp $*/
3: static char help[] = "Demonstrates using a local ordering to set values into a parallel vector.nn";
5: /*T
6: Concepts: vectors^assembling vectors with local ordering;
7: Processors: n
8: T*/
10: /*
11: Include "petscvec.h" so that we can use vectors. Note that this file
12: automatically includes:
13: petsc.h - base PETSc routines petscis.h - index sets
14: petscsys.h - system routines petscviewer.h - viewers
15: */
16: #include petscvec.h
18: int main(int argc,char **argv)
19: {
20: int i,N,ierr,rank,ng,*gindices,rstart,rend,M;
21: PetscScalar one = 1.0;
22: Vec x;
24: PetscInitialize(&argc,&argv,(char *)0,help);
25: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
27: /*
28: Create a parallel vector.
29: - In this case, we specify the size of each processor's local
30: portion, and PETSc computes the global size. Alternatively,
31: PETSc could determine the vector's distribution if we specify
32: just the global size.
33: */
34: VecCreate(PETSC_COMM_WORLD,&x);
35: VecSetSizes(x,rank+1,PETSC_DECIDE);
36: VecSetFromOptions(x);
37: VecGetSize(x,&N);
38: VecSet(&one,x);
40: /*
41: Set the local to global ordering for the vector. Each processor
42: generates a list of the global indices for each local index. Note that
43: the local indices are just whatever is convenient for a particular application.
44: In this case we treat the vector as lying on a one dimensional grid and
45: have one ghost point on each end of the blocks owned by each processor.
46: */
48: VecGetSize(x,&M);
49: VecGetOwnershipRange(x,&rstart,&rend);
50: ng = rend - rstart + 2;
51: PetscMalloc(ng*sizeof(int),&gindices);
52: gindices[0] = rstart - 1;
53: for (i=0; i<ng-1; i++) {
54: gindices[i+1] = gindices[i] + 1;
55: }
56: /* map the first and last point as periodic */
57: if (gindices[0] == -1) gindices[0] = M - 1;
58: if (gindices[ng-1] == M) gindices[ng-1] = 0;
59: {
60: ISLocalToGlobalMapping ltog;
61: ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,ng,gindices,<og);
62: VecSetLocalToGlobalMapping(x,ltog);
63: ISLocalToGlobalMappingDestroy(ltog);
64: }
65: PetscFree(gindices);
67: /*
68: Set the vector elements.
69: - In this case set the values using the local ordering
70: - Each processor can contribute any vector entries,
71: regardless of which processor "owns" them; any nonlocal
72: contributions will be transferred to the appropriate processor
73: during the assembly process.
74: - In this example, the flag ADD_VALUES indicates that all
75: contributions will be added together.
76: */
77: for (i=0; i<ng; i++) {
78: VecSetValuesLocal(x,1,&i,&one,ADD_VALUES);
79: }
81: /*
82: Assemble vector, using the 2-step process:
83: VecAssemblyBegin(), VecAssemblyEnd()
84: Computations can be done while messages are in transition
85: by placing code between these two statements.
86: */
87: VecAssemblyBegin(x);
88: VecAssemblyEnd(x);
90: /*
91: View the vector; then destroy it.
92: */
93: VecView(x,PETSC_VIEWER_STDOUT_WORLD);
94: VecDestroy(x);
96: PetscFinalize();
97: return 0;
98: }
99: