Actual source code: mpirowbs.h
5: #include src/mat/matimpl.h
8: #include "BSsparse.h"
9: #include "BSprivate.h"
12: /*
13: Mat_MPIRowbs - Parallel, compressed row storage format that's the
14: interface to BlockSolve.
15: */
17: typedef struct {
18: int *rowners; /* range of rows owned by each proc */
19: int rstart,rend; /* starting and ending owned rows */
20: int size; /* size of communicator */
21: int rank; /* rank of proc in communicator */
22: int sorted; /* if true, rows sorted by increasing cols */
23: PetscTruth roworiented; /* if true, row-oriented storage */
24: int nonew; /* if true, no new elements allowed */
25: int nz,maxnz; /* total nonzeros stored, allocated */
26: int *imax; /* allocated matrix space per row */
28: /* The following variables are used in matrix assembly */
29: PetscTruth donotstash; /* 1 if off processor entries dropped */
30: MPI_Request *send_waits; /* array of send requests */
31: MPI_Request *recv_waits; /* array of receive requests */
32: int nsends,nrecvs; /* numbers of sends and receives */
33: PetscScalar *svalues,*rvalues; /* sending and receiving data */
34: int rmax; /* maximum message length */
35: PetscTruth vecs_permscale; /* flag indicating permuted and scaled vectors */
36: int factor;
37: int bs_color_single; /* Indicates blocksolve should bypass cliques in coloring */
38: int reallocs; /* number of mallocs during MatSetValues() */
39: PetscTruth keepzeroedrows; /* keeps matrix structure same in calls to MatZeroRows()*/
41: /* BlockSolve data */
42: MPI_Comm comm_mpirowbs; /* use a different communicator for BlockSolve */
43: BSprocinfo *procinfo; /* BlockSolve processor context */
44: BSmapping *bsmap; /* BlockSolve mapping context */
45: BSspmat *A; /* initial matrix */
46: BSpar_mat *pA; /* permuted matrix */
47: BScomm *comm_pA; /* communication info for triangular solves */
48: BSpar_mat *fpA; /* factored permuted matrix */
49: BScomm *comm_fpA; /* communication info for factorization */
50: Vec diag; /* scaling vector (stores inverse of square
51: root of permuted diagonal of original matrix) */
52: Vec xwork; /* work space for mat-vec mult */
54: /* Cholesky factorization data */
55: double alpha; /* restart for failed factorization */
56: int ierr; /* BS factorization error */
57: int failures; /* number of BS factorization failures */
58: int blocksolveassembly; /* Indicates the matrix has been assembled for block solve */
59: int assembled_icc_storage; /* Indicates that the block solve assembly was performed for icc format */
60: } Mat_MPIRowbs;
62: EXTERN PetscErrorCode MatCholeskyFactorNumeric_MPIRowbs(Mat,MatFactorInfo*,Mat*);
63: EXTERN PetscErrorCode MatIncompleteCholeskyFactorSymbolic_MPIRowbs(Mat,IS,MatFactorInfo*,Mat *);
64: EXTERN PetscErrorCode MatLUFactorNumeric_MPIRowbs(Mat,MatFactorInfo*,Mat*);
65: EXTERN PetscErrorCode MatILUFactorSymbolic_MPIRowbs(Mat,IS,IS,MatFactorInfo*,Mat *);
66: EXTERN PetscErrorCode MatSolve_MPIRowbs(Mat,Vec,Vec);
67: EXTERN PetscErrorCode MatForwardSolve_MPIRowbs(Mat,Vec,Vec);
68: EXTERN PetscErrorCode MatBackwardSolve_MPIRowbs(Mat,Vec,Vec);
69: EXTERN PetscErrorCode MatScaleSystem_MPIRowbs(Mat,Vec,Vec);
70: EXTERN PetscErrorCode MatUnScaleSystem_MPIRowbs(Mat,Vec,Vec);
71: EXTERN PetscErrorCode MatUseScaledForm_MPIRowbs(Mat,PetscTruth);
72: EXTERN PetscErrorCode MatGetSubMatrices_MPIRowbs (Mat,int,const IS[],const IS[],MatReuse,Mat **);
73: EXTERN PetscErrorCode MatGetSubMatrix_MPIRowbs (Mat,IS,IS,int,MatReuse,Mat *);
74: EXTERN PetscErrorCode MatAssemblyEnd_MPIRowbs_ForBlockSolve(Mat);
75: EXTERN PetscErrorCode MatGetSubMatrices_MPIRowbs_Local(Mat,int,const IS[],const IS[],MatReuse,Mat*);
76: EXTERN PetscErrorCode MatLoad_MPIRowbs(PetscViewer,const MatType,Mat*);
78: #define CHKERRBS(a) {if (__BSERROR_STATUS) {(*PetscErrorPrintf)(\
79: "BlockSolve95 Error Code %d\n",__BSERROR_STATUS);CHKERRQ(1);}}
81: #if defined(PETSC_USE_LOG) /* turn on BlockSolve logging */
82: #define MAINLOG
83: #endif
85: #endif