Actual source code: ex13.c
1: /*$Id: ex13.c,v 1.15 2001/08/07 03:02:34 balay Exp $*/
3: static char help[] = "Tests PetscObjectPublish().nn";
5: /*T
6: Concepts: vectors^assembling vectors;
7: Processors: n
8: T*/
10: /*
11: Include "petscvec.h" so that we can use vectors. Note that this file
12: automatically includes:
13: petsc.h - base PETSc routines petscis.h - index sets
14: petscsys.h - system routines petscviewer.h - viewers
15: */
16: #include petscvec.h
18: int main(int argc,char **argv)
19: {
20: int i,n,ierr,rank;
21: PetscScalar one = 1.0,*array;
22: Vec x,xlocal;
24: PetscInitialize(&argc,&argv,(char *)0,help);
25: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
27: /*
28: Create a parallel vector.
29: - In this case, we specify the size of each processor's local
30: portion, and PETSc computes the global size. Alternatively,
31: if we pass the global size and use PETSC_DECIDE for the
32: local size PETSc will choose a reasonable partition trying
33: to put nearly an equal number of elements on each processor.
34: */
35: VecCreateMPI(PETSC_COMM_WORLD,rank+4,PETSC_DECIDE,&x);
36: PetscObjectPublish((PetscObject)x);
37: VecGetLocalSize(x,&n);
38: VecSet(&one,x);
40: VecCreateSeq(PETSC_COMM_SELF,rank+4,&xlocal);
41: PetscObjectPublish((PetscObject)xlocal);
42: VecSet(&one,xlocal);
44: while (1) {
46: /*
47: Access the vector entries and add to them
48: */
49: PetscBarrier((PetscObject)x);
50: VecGetArray(x,&array);
51: for (i=0; i<n; i++) {
52: array[i]++;
53: }
54: VecRestoreArray(x,&array);
56: VecGetArray(xlocal,&array);
57: for (i=0; i<n; i++) {
58: array[i]++;
59: }
60: VecRestoreArray(xlocal,&array);
61: }
63: /*
64: Destroy the vectors
65: */
66: VecDestroy(x);
67: VecDestroy(xlocal);
69: PetscFinalize();
70: return 0;
71: }
72: