Actual source code: is.c
1: /*$Id: is.c,v 1.15 2001/08/06 21:15:46 bsmith Exp $*/
2: /*
3: Creates a matrix class for using the Neumann-Neumann type preconditioners.
4: This stores the matrices in globally unassembled form. Each processor
5: assembles only its local Neumann problem and the parallel matrix vector
6: product is handled "implicitly".
8: We provide:
9: MatMult()
11: Currently this allows for only one subdomain per processor.
13: */
15: #include src/mat/impls/is/is.h
17: int MatDestroy_IS(Mat A)
18: {
19: int ierr;
20: Mat_IS *b = (Mat_IS*)A->data;
23: if (b->A) {
24: MatDestroy(b->A);
25: }
26: if (b->ctx) {
27: VecScatterDestroy(b->ctx);
28: }
29: if (b->x) {
30: VecDestroy(b->x);
31: }
32: if (b->y) {
33: VecDestroy(b->y);
34: }
35: if (b->mapping) {
36: ISLocalToGlobalMappingDestroy(b->mapping);
37: }
38: PetscFree(b);
39: return(0);
40: }
42: int MatMult_IS(Mat A,Vec x,Vec y)
43: {
44: int ierr;
45: Mat_IS *is = (Mat_IS*)A->data;
46: PetscScalar zero = 0.0;
49: /* scatter the global vector x into the local work vector */
50: VecScatterBegin(x,is->x,INSERT_VALUES,SCATTER_FORWARD,is->ctx);
51: VecScatterEnd(x,is->x,INSERT_VALUES,SCATTER_FORWARD,is->ctx);
53: /* multiply the local matrix */
54: MatMult(is->A,is->x,is->y);
56: /* scatter product back into global memory */
57: VecSet(&zero,y);
58: VecScatterBegin(is->y,y,ADD_VALUES,SCATTER_REVERSE,is->ctx);
59: VecScatterEnd(is->y,y,ADD_VALUES,SCATTER_REVERSE,is->ctx);
61: return(0);
62: }
64: int MatView_IS(Mat A,PetscViewer viewer)
65: {
66: Mat_IS *a = (Mat_IS*)A->data;
67: int ierr;
68: PetscViewer sviewer;
71: PetscViewerGetSingleton(viewer,&sviewer);
72: MatView(a->A,sviewer);
73: PetscViewerRestoreSingleton(viewer,&sviewer);
74: return(0);
75: }
77: int MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping mapping)
78: {
79: int ierr,n;
80: Mat_IS *is = (Mat_IS*)A->data;
81: IS from,to;
82: Vec global;
85: is->mapping = mapping;
86: PetscObjectReference((PetscObject)mapping);
88: /* Create the local matrix A */
89: ISLocalToGlobalMappingGetSize(mapping,&n);
90: MatCreate(PETSC_COMM_SELF,n,n,n,n,&is->A);
91: PetscObjectSetOptionsPrefix((PetscObject)is->A,"is");
92: MatSetFromOptions(is->A);
94: /* Create the local work vectors */
95: VecCreateSeq(PETSC_COMM_SELF,n,&is->x);
96: VecDuplicate(is->x,&is->y);
98: /* setup the global to local scatter */
99: ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);
100: ISLocalToGlobalMappingApplyIS(mapping,to,&from);
101: VecCreateMPI(A->comm,A->n,A->N,&global);
102: VecScatterCreate(global,from,is->x,to,&is->ctx);
103: VecDestroy(global);
104: ISDestroy(to);
105: ISDestroy(from);
106: return(0);
107: }
110: int MatSetValuesLocal_IS(Mat A,int m,int *rows,int n,int *cols,PetscScalar *values,InsertMode addv)
111: {
112: int ierr;
113: Mat_IS *is = (Mat_IS*)A->data;
116: MatSetValues(is->A,m,rows,n,cols,values,addv);
117: return(0);
118: }
120: int MatZeroRowsLocal_IS(Mat A,IS isrows,PetscScalar *diag)
121: {
122: Mat_IS *is = (Mat_IS*)A->data;
123: int ierr,i,n,*rows;
124: PetscScalar *array;
128: {
129: /*
130: Set up is->x as a "counting vector". This is in order to MatMult_IS
131: work properly in the interface nodes.
132: */
133: Vec counter;
134: PetscScalar one=1.0, zero=0.0;
135: VecCreateMPI(A->comm,A->n,A->N,&counter);
136: VecSet(&zero,counter);
137: VecSet(&one,is->x);
138: VecScatterBegin(is->x,counter,ADD_VALUES,SCATTER_REVERSE,is->ctx);
139: VecScatterEnd (is->x,counter,ADD_VALUES,SCATTER_REVERSE,is->ctx);
140: VecScatterBegin(counter,is->x,INSERT_VALUES,SCATTER_FORWARD,is->ctx);
141: VecScatterEnd (counter,is->x,INSERT_VALUES,SCATTER_FORWARD,is->ctx);
142: VecDestroy(counter);
143: }
144: ISGetLocalSize(isrows,&n);
145: if (n == 0) {
146: is->pure_neumann = PETSC_TRUE;
147: } else {
148: is->pure_neumann = PETSC_FALSE;
149: ISGetIndices(isrows,&rows);
150: VecGetArray(is->x,&array);
151: MatZeroRows(is->A,isrows,diag);
152: for (i=0; i<n; i++) {
153: MatSetValue(is->A,rows[i],rows[i],(*diag)/(array[rows[i]]),INSERT_VALUES);
154: }
155: MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);
156: MatAssemblyEnd (is->A,MAT_FINAL_ASSEMBLY);
157: VecRestoreArray(is->x,&array);
158: ISRestoreIndices(isrows,&rows);
159: }
161: return(0);
162: }
164: int MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
165: {
166: Mat_IS *is = (Mat_IS*)A->data;
167: int ierr;
170: MatAssemblyBegin(is->A,type);
171: return(0);
172: }
174: int MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
175: {
176: Mat_IS *is = (Mat_IS*)A->data;
177: int ierr;
180: MatAssemblyEnd(is->A,type);
181: return(0);
182: }
184: EXTERN_C_BEGIN
185: int MatCreate_IS(Mat A)
186: {
187: int ierr;
188: Mat_IS *b;
191: ierr = PetscNew(Mat_IS,&b);
192: A->data = (void*)b;
193: PetscMemzero(b,sizeof(Mat_IS));
194: PetscMemzero(A->ops,sizeof(struct _MatOps));
195: A->factor = 0;
196: A->mapping = 0;
198: A->ops->mult = MatMult_IS;
199: A->ops->destroy = MatDestroy_IS;
200: A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
201: A->ops->setvalueslocal = MatSetValuesLocal_IS;
202: A->ops->zerorowslocal = MatZeroRowsLocal_IS;
203: A->ops->assemblybegin = MatAssemblyBegin_IS;
204: A->ops->assemblyend = MatAssemblyEnd_IS;
205: A->ops->view = MatView_IS;
207: PetscSplitOwnership(A->comm,&A->m,&A->M);
208: PetscSplitOwnership(A->comm,&A->n,&A->N);
209: MPI_Scan(&A->m,&b->rend,1,MPI_INT,MPI_SUM,A->comm);
210: b->rstart = b->rend - A->m;
212: b->A = 0;
213: b->ctx = 0;
214: b->x = 0;
215: b->y = 0;
217: return(0);
218: }
219: EXTERN_C_END