Actual source code: dscpack.c
1: #define PETSCMAT_DLL
3: /*
4: Provides an interface to the DSCPACK (Domain-Separator Codes) sparse direct solver
5: */
7: #include src/mat/impls/baij/seq/baij.h
8: #include src/mat/impls/baij/mpi/mpibaij.h
11: #include "dscmain.h"
14: typedef struct {
15: DSC_Solver My_DSC_Solver;
16: PetscInt num_local_strucs, *local_struc_old_num,
17: num_local_cols, num_local_nonz,
18: *global_struc_new_col_num,
19: *global_struc_new_num, *global_struc_owner,
20: dsc_id,bs,*local_cols_old_num,*replication;
21: PetscInt order_code,scheme_code,factor_type, stat,
22: LBLASLevel,DBLASLevel,max_mem_allowed;
23: MatStructure flg;
24: IS my_cols,iden,iden_dsc;
25: Vec vec_dsc;
26: VecScatter scat;
27: MPI_Comm comm_dsc;
29: /* A few inheritance details */
30: PetscMPIInt size;
31: PetscErrorCode (*MatDuplicate)(Mat,MatDuplicateOption,Mat*);
32: PetscErrorCode (*MatView)(Mat,PetscViewer);
33: PetscErrorCode (*MatAssemblyEnd)(Mat,MatAssemblyType);
34: PetscErrorCode (*MatCholeskyFactorSymbolic)(Mat,IS,MatFactorInfo*,Mat*);
35: PetscErrorCode (*MatDestroy)(Mat);
36: PetscErrorCode (*MatPreallocate)(Mat,PetscInt,PetscInt,PetscInt*,PetscInt,PetscInt*);
38: /* Clean up flag for destructor */
39: PetscTruth CleanUpDSCPACK;
40: } Mat_DSC;
42: EXTERN PetscErrorCode MatDuplicate_DSCPACK(Mat,MatDuplicateOption,Mat*);
44: EXTERN PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_Base_DSCPACK(Mat,const MatType,MatReuse,Mat*);
47: /* DSC function */
50: void isort2(PetscInt size, PetscInt *list, PetscInt *idx_dsc) {
51: /* in increasing order */
52: /* idx_dsc will contain indices such that */
53: /* list can be accessed in sorted order */
54: PetscInt i, j, x, y;
55:
56: for (i=0; i<size; i++) idx_dsc[i] =i;
58: for (i=1; i<size; i++){
59: y= idx_dsc[i];
60: x=list[idx_dsc[i]];
61: for (j=i-1; ((j>=0) && (x<list[idx_dsc[j]])); j--)
62: idx_dsc[j+1]=idx_dsc[j];
63: idx_dsc[j+1]=y;
64: }
65: }/*end isort2*/
69: PetscErrorCode BAIJtoMyANonz( PetscInt *AIndex, PetscInt *AStruct, PetscInt bs,
70: RealNumberType *ANonz, PetscInt NumLocalStructs,
71: PetscInt NumLocalNonz, PetscInt *GlobalStructNewColNum,
72: PetscInt *LocalStructOldNum,
73: PetscInt *LocalStructLocalNum,
74: RealNumberType **adr_MyANonz)
75: /*
76: Extract non-zero values of lower triangular part
77: of the permuted matrix that belong to this processor.
79: Only output parameter is adr_MyANonz -- is malloced and changed.
80: Rest are input parameters left unchanged.
82: When LocalStructLocalNum == PETSC_NULL,
83: AIndex, AStruct, and ANonz contain entire original matrix A
84: in PETSc SeqBAIJ format,
85: otherwise,
86: AIndex, AStruct, and ANonz are indeces for the submatrix
87: of A whose colomns (in increasing order) belong to this processor.
89: Other variables supply information on ownership of columns
90: and the new numbering in a fill-reducing permutation
92: This information is used to setup lower half of A nonzeroes
93: for columns owned by this processor
94: */
95: {
97: PetscInt i, j, k, iold,inew, jj, kk, bs2=bs*bs,
98: *idx, *NewColNum,
99: MyANonz_last, max_struct=0, struct_size;
100: RealNumberType *MyANonz;
104: /* loop: to find maximum number of subscripts over columns
105: assigned to this processor */
106: for (i=0; i <NumLocalStructs; i++) {
107: /* for each struct i (local) assigned to this processor */
108: if (LocalStructLocalNum){
109: iold = LocalStructLocalNum[i];
110: } else {
111: iold = LocalStructOldNum[i];
112: }
113:
114: struct_size = AIndex[iold+1] - AIndex[iold];
115: if ( max_struct <= struct_size) max_struct = struct_size;
116: }
118: /* allocate tmp arrays large enough to hold densest struct */
119: PetscMalloc((2*max_struct+1)*sizeof(PetscInt),&NewColNum);
120: idx = NewColNum + max_struct;
121:
122: PetscMalloc(NumLocalNonz*sizeof(RealNumberType),&MyANonz);
123: *adr_MyANonz = MyANonz;
125: /* loop to set up nonzeroes in MyANonz */
126: MyANonz_last = 0 ; /* points to first empty space in MyANonz */
127: for (i=0; i <NumLocalStructs; i++) {
129: /* for each struct i (local) assigned to this processor */
130: if (LocalStructLocalNum){
131: iold = LocalStructLocalNum[i];
132: } else {
133: iold = LocalStructOldNum[i];
134: }
136: struct_size = AIndex[iold+1] - AIndex[iold];
137: for (k=0, j=AIndex[iold]; j<AIndex[iold+1]; j++){
138: NewColNum[k] = GlobalStructNewColNum[AStruct[j]];
139: k++;
140: }
141: isort2(struct_size, NewColNum, idx);
142:
143: kk = AIndex[iold]*bs2; /* points to 1st element of iold block col in ANonz */
144: inew = GlobalStructNewColNum[LocalStructOldNum[i]];
146: for (jj = 0; jj < bs; jj++) {
147: for (j=0; j<struct_size; j++){
148: for ( k = 0; k<bs; k++){
149: if (NewColNum[idx[j]] + k >= inew)
150: MyANonz[MyANonz_last++] = ANonz[kk + idx[j]*bs2 + k*bs + jj];
151: }
152: }
153: inew++;
154: }
155: } /* end outer loop for i */
157: PetscFree(NewColNum);
158: if (MyANonz_last != NumLocalNonz) SETERRQ2(PETSC_ERR_PLIB,"MyANonz_last %d != NumLocalNonz %d\n",MyANonz_last, NumLocalNonz);
159: return(0);
160: }
165: PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_DSCPACK_Base(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
166: {
168: Mat B=*newmat;
169: Mat_DSC *lu=(Mat_DSC*)A->spptr;
170: void (*f)(void);
173: if (reuse == MAT_INITIAL_MATRIX) {
174: MatDuplicate(A,MAT_COPY_VALUES,&B);
175: }
176: /* Reset the original function pointers */
177: B->ops->duplicate = lu->MatDuplicate;
178: B->ops->view = lu->MatView;
179: B->ops->assemblyend = lu->MatAssemblyEnd;
180: B->ops->choleskyfactorsymbolic = lu->MatCholeskyFactorSymbolic;
181: B->ops->destroy = lu->MatDestroy;
182: PetscObjectQueryFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",&f);
183: if (f) {
184: PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C","",(FCNVOID)lu->MatPreallocate);
185: }
186: PetscFree(lu);
188: PetscObjectComposeFunction((PetscObject)B,"MatConvert_seqbaij_dscpack_C","",PETSC_NULL);
189: PetscObjectComposeFunction((PetscObject)B,"MatConvert_dscpack_seqbaij_C","",PETSC_NULL);
190: PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_dscpack_C","",PETSC_NULL);
191: PetscObjectComposeFunction((PetscObject)B,"MatConvert_dscpack_mpibaij_C","",PETSC_NULL);
193: PetscObjectChangeTypeName((PetscObject)B,type);
194: *newmat = B;
196: return(0);
197: }
202: PetscErrorCode MatDestroy_DSCPACK(Mat A)
203: {
204: Mat_DSC *lu=(Mat_DSC*)A->spptr;
206:
208: if (lu->CleanUpDSCPACK) {
209: if (lu->dsc_id != -1) {
210: if(lu->stat) DSC_DoStats(lu->My_DSC_Solver);
211: DSC_FreeAll(lu->My_DSC_Solver);
212: DSC_Close0(lu->My_DSC_Solver);
213:
214: PetscFree(lu->local_cols_old_num);
215: }
216: DSC_End(lu->My_DSC_Solver);
217:
218: MPI_Comm_free(&(lu->comm_dsc));
219: ISDestroy(lu->my_cols);
220: PetscFree(lu->replication);
221: VecDestroy(lu->vec_dsc);
222: ISDestroy(lu->iden_dsc);
223: VecScatterDestroy(lu->scat);
224: if (lu->size >1 && lu->iden) {ISDestroy(lu->iden);}
225: }
226: if (lu->size == 1) {
227: MatConvert_DSCPACK_Base(A,MATSEQBAIJ,MAT_REUSE_MATRIX,&A);
228: } else {
229: MatConvert_DSCPACK_Base(A,MATMPIBAIJ,MAT_REUSE_MATRIX,&A);
230: }
231: (*A->ops->destroy)(A);
232: return(0);
233: }
237: PetscErrorCode MatSolve_DSCPACK(Mat A,Vec b,Vec x) {
238: Mat_DSC *lu= (Mat_DSC*)A->spptr;
240: RealNumberType *solution_vec,*rhs_vec;
243: /* scatter b into seq vec_dsc */
244: if ( !lu->scat ) {
245: VecScatterCreate(b,lu->my_cols,lu->vec_dsc,lu->iden_dsc,&lu->scat);
246: }
247: VecScatterBegin(b,lu->vec_dsc,INSERT_VALUES,SCATTER_FORWARD,lu->scat);
248: VecScatterEnd(b,lu->vec_dsc,INSERT_VALUES,SCATTER_FORWARD,lu->scat);
250: if (lu->dsc_id != -1){
251: VecGetArray(lu->vec_dsc,&rhs_vec);
252: DSC_InputRhsLocalVec(lu->My_DSC_Solver, rhs_vec, lu->num_local_cols);
253: VecRestoreArray(lu->vec_dsc,&rhs_vec);
254:
255: DSC_Solve(lu->My_DSC_Solver);
256: if (ierr != DSC_NO_ERROR) {
257: DSC_ErrorDisplay(lu->My_DSC_Solver);
258: SETERRQ(PETSC_ERR_LIB,"Error in calling DSC_Solve");
259: }
261: /* get the permuted local solution */
262: VecGetArray(lu->vec_dsc,&solution_vec);
263: DSC_GetLocalSolution(lu->My_DSC_Solver,solution_vec, lu->num_local_cols);
264: VecRestoreArray(lu->vec_dsc,&solution_vec);
266: } /* end of if (lu->dsc_id != -1) */
268: /* put permuted local solution solution_vec into x in the original order */
269: VecScatterBegin(lu->vec_dsc,x,INSERT_VALUES,SCATTER_REVERSE,lu->scat);
270: VecScatterEnd(lu->vec_dsc,x,INSERT_VALUES,SCATTER_REVERSE,lu->scat);
272: return(0);
273: }
277: PetscErrorCode MatCholeskyFactorNumeric_DSCPACK(Mat A,MatFactorInfo *info,Mat *F) {
278: Mat_SeqBAIJ *a_seq;
279: Mat_DSC *lu=(Mat_DSC*)(*F)->spptr;
280: Mat *tseq,A_seq=PETSC_NULL;
281: RealNumberType *my_a_nonz;
283: PetscMPIInt size;
284: PetscInt M=A->M,Mbs=M/lu->bs,max_mem_estimate,max_single_malloc_blk,
285: number_of_procs,i,j,next,iold,*idx,*iidx=0,*itmp;
286: IS my_cols_sorted;
287:
289: MPI_Comm_size(A->comm,&size);
290: if ( lu->flg == DIFFERENT_NONZERO_PATTERN){ /* first numeric factorization */
291: /* convert A to A_seq */
292: if (size > 1) {
293: if (!lu->iden){
294: ISCreateStride(PETSC_COMM_SELF,M,0,1,&lu->iden);
295: }
296: MatGetSubMatrices(A,1,&lu->iden,&lu->iden,MAT_INITIAL_MATRIX,&tseq);
297: A_seq = tseq[0];
298: a_seq = (Mat_SeqBAIJ*)A_seq->data;
299: } else {
300: a_seq = (Mat_SeqBAIJ*)A->data;
301: }
302:
303: PetscMalloc(Mbs*sizeof(PetscInt),&lu->replication);
304: for (i=0; i<Mbs; i++) lu->replication[i] = lu->bs;
306: number_of_procs = DSC_Analyze(Mbs, a_seq->i, a_seq->j, lu->replication);
307:
308: i = size;
309: if ( number_of_procs < i ) i = number_of_procs;
310: number_of_procs = 1;
311: while ( i > 1 ){
312: number_of_procs *= 2; i /= 2;
313: }
315: /* DSC_Solver starts */
316: DSC_Open0( lu->My_DSC_Solver, number_of_procs, &lu->dsc_id, lu->comm_dsc );
318: if (lu->dsc_id != -1) {
319: DSC_Order(lu->My_DSC_Solver,lu->order_code,Mbs,a_seq->i,a_seq->j,lu->replication,
320: &M,&lu->num_local_strucs,
321: &lu->num_local_cols, &lu->num_local_nonz, &lu->global_struc_new_col_num,
322: &lu->global_struc_new_num, &lu->global_struc_owner,
323: &lu->local_struc_old_num);
324: if (ierr != DSC_NO_ERROR) {
325: DSC_ErrorDisplay(lu->My_DSC_Solver);
326: SETERRQ(PETSC_ERR_LIB,"Error when use DSC_Order()");
327: }
329: DSC_SFactor(lu->My_DSC_Solver,&max_mem_estimate,&max_single_malloc_blk,
330: lu->max_mem_allowed, lu->LBLASLevel, lu->DBLASLevel);
331: if (ierr != DSC_NO_ERROR) {
332: DSC_ErrorDisplay(lu->My_DSC_Solver);
333: SETERRQ(PETSC_ERR_LIB,"Error when use DSC_Order");
334: }
336: BAIJtoMyANonz(a_seq->i, a_seq->j, lu->bs, a_seq->a,
337: lu->num_local_strucs, lu->num_local_nonz,
338: lu->global_struc_new_col_num,
339: lu->local_struc_old_num,
340: PETSC_NULL,
341: &my_a_nonz);
342: if (ierr <0) {
343: DSC_ErrorDisplay(lu->My_DSC_Solver);
344: SETERRQ1(PETSC_ERR_LIB,"Error setting local nonzeroes at processor %d \n", lu->dsc_id);
345: }
347: /* get local_cols_old_num and IS my_cols to be used later */
348: PetscMalloc(lu->num_local_cols*sizeof(PetscInt),&lu->local_cols_old_num);
349: for (next = 0, i=0; i<lu->num_local_strucs; i++){
350: iold = lu->bs*lu->local_struc_old_num[i];
351: for (j=0; j<lu->bs; j++)
352: lu->local_cols_old_num[next++] = iold++;
353: }
354: ISCreateGeneral(PETSC_COMM_SELF,lu->num_local_cols,lu->local_cols_old_num,&lu->my_cols);
355:
356: } else { /* lu->dsc_id == -1 */
357: lu->num_local_cols = 0;
358: lu->local_cols_old_num = 0;
359: ISCreateGeneral(PETSC_COMM_SELF,lu->num_local_cols,lu->local_cols_old_num,&lu->my_cols);
360: }
361: /* generate vec_dsc and iden_dsc to be used later */
362: VecCreateSeq(PETSC_COMM_SELF,lu->num_local_cols,&lu->vec_dsc);
363: ISCreateStride(PETSC_COMM_SELF,lu->num_local_cols,0,1,&lu->iden_dsc);
364: lu->scat = PETSC_NULL;
366: if ( size>1 ) {
367: MatDestroyMatrices(1,&tseq);
368: }
369: } else { /* use previously computed symbolic factor */
370: /* convert A to my A_seq */
371: if (size > 1) {
372: if (lu->dsc_id == -1) {
373: itmp = 0;
374: } else {
375: PetscMalloc(2*lu->num_local_strucs*sizeof(PetscInt),&idx);
376: iidx = idx + lu->num_local_strucs;
377: PetscMalloc(lu->num_local_cols*sizeof(PetscInt),&itmp);
378:
379: isort2(lu->num_local_strucs, lu->local_struc_old_num, idx);
380: for (next=0, i=0; i< lu->num_local_strucs; i++) {
381: iold = lu->bs*lu->local_struc_old_num[idx[i]];
382: for (j=0; j<lu->bs; j++){
383: itmp[next++] = iold++; /* sorted local_cols_old_num */
384: }
385: }
386: for (i=0; i< lu->num_local_strucs; i++) {
387: iidx[idx[i]] = i; /* inverse of idx */
388: }
389: } /* end of (lu->dsc_id == -1) */
390: ISCreateGeneral(PETSC_COMM_SELF,lu->num_local_cols,itmp,&my_cols_sorted);
391: MatGetSubMatrices(A,1,&my_cols_sorted,&lu->iden,MAT_INITIAL_MATRIX,&tseq);
392: ISDestroy(my_cols_sorted);
393: A_seq = tseq[0];
394:
395: if (lu->dsc_id != -1) {
396: DSC_ReFactorInitialize(lu->My_DSC_Solver);
398: a_seq = (Mat_SeqBAIJ*)A_seq->data;
399: BAIJtoMyANonz(a_seq->i, a_seq->j, lu->bs, a_seq->a,
400: lu->num_local_strucs, lu->num_local_nonz,
401: lu->global_struc_new_col_num,
402: lu->local_struc_old_num,
403: iidx,
404: &my_a_nonz);
405: if (ierr <0) {
406: DSC_ErrorDisplay(lu->My_DSC_Solver);
407: SETERRQ1(PETSC_ERR_LIB,"Error setting local nonzeroes at processor %d \n", lu->dsc_id);
408: }
409: PetscFree(idx);
410: PetscFree(itmp);
411: } /* end of if(lu->dsc_id != -1) */
412: } else { /* size == 1 */
413: a_seq = (Mat_SeqBAIJ*)A->data;
414:
415: BAIJtoMyANonz(a_seq->i, a_seq->j, lu->bs, a_seq->a,
416: lu->num_local_strucs, lu->num_local_nonz,
417: lu->global_struc_new_col_num,
418: lu->local_struc_old_num,
419: PETSC_NULL,
420: &my_a_nonz);
421: if (ierr <0) {
422: DSC_ErrorDisplay(lu->My_DSC_Solver);
423: SETERRQ1(PETSC_ERR_LIB,"Error setting local nonzeroes at processor %d \n", lu->dsc_id);
424: }
425: }
426: if ( size>1 ) {MatDestroyMatrices(1,&tseq); }
427: }
428:
429: if (lu->dsc_id != -1) {
430: DSC_NFactor(lu->My_DSC_Solver, lu->scheme_code, my_a_nonz, lu->factor_type, lu->LBLASLevel, lu->DBLASLevel);
431: PetscFree(my_a_nonz);
432: }
433:
434: (*F)->assembled = PETSC_TRUE;
435: lu->flg = SAME_NONZERO_PATTERN;
437: return(0);
438: }
440: /* Note the Petsc permutation r is ignored */
443: PetscErrorCode MatCholeskyFactorSymbolic_DSCPACK(Mat A,IS r,MatFactorInfo *info,Mat *F) {
444: Mat B;
445: Mat_DSC *lu;
447: PetscInt bs,indx;
448: PetscTruth flg;
449: const char *ftype[]={"LDLT","LLT"},*ltype[]={"LBLAS1","LBLAS2","LBLAS3"},*dtype[]={"DBLAS1","DBLAS2"};
453: /* Create the factorization matrix F */
454: MatGetBlockSize(A,&bs);
455: MatCreate(A->comm,&B);
456: MatSetSizes(B,A->m,A->n,A->M,A->N);
457: MatSetType(B,A->type_name);
458: MatSeqBAIJSetPreallocation(B,bs,0,PETSC_NULL);
459: MatMPIBAIJSetPreallocation(B,bs,0,PETSC_NULL,0,PETSC_NULL);
460:
461: lu = (Mat_DSC*)B->spptr;
462: B->bs = bs;
464: B->ops->choleskyfactornumeric = MatCholeskyFactorNumeric_DSCPACK;
465: B->ops->solve = MatSolve_DSCPACK;
466: B->factor = FACTOR_CHOLESKY;
468: /* Set the default input options */
469: lu->order_code = 2;
470: lu->scheme_code = 1;
471: lu->factor_type = 2;
472: lu->stat = 0; /* do not display stats */
473: lu->LBLASLevel = DSC_LBLAS3;
474: lu->DBLASLevel = DSC_DBLAS2;
475: lu->max_mem_allowed = 256;
476: MPI_Comm_dup(A->comm,&(lu->comm_dsc));
477: /* Get the runtime input options */
478: PetscOptionsBegin(A->comm,A->prefix,"DSCPACK Options","Mat");
480: PetscOptionsInt("-mat_dscpack_order","order_code: \n\
481: 1 = ND, 2 = Hybrid with Minimum Degree, 3 = Hybrid with Minimum Deficiency", \
482: "None",
483: lu->order_code,&lu->order_code,PETSC_NULL);
485: PetscOptionsInt("-mat_dscpack_scheme","scheme_code: \n\
486: 1 = standard factorization, 2 = factorization + selective inversion", \
487: "None",
488: lu->scheme_code,&lu->scheme_code,PETSC_NULL);
489:
490: PetscOptionsEList("-mat_dscpack_factor","factor_type","None",ftype,2,ftype[0],&indx,&flg);
491: if (flg) {
492: switch (indx) {
493: case 0:
494: lu->factor_type = DSC_LDLT;
495: break;
496: case 1:
497: lu->factor_type = DSC_LLT;
498: break;
499: }
500: }
501: PetscOptionsInt("-mat_dscpack_MaxMemAllowed","in Mbytes","None",
502: lu->max_mem_allowed,&lu->max_mem_allowed,PETSC_NULL);
504: PetscOptionsInt("-mat_dscpack_stats","display stats: 0 = no display, 1 = display",
505: "None", lu->stat,&lu->stat,PETSC_NULL);
506:
507: PetscOptionsEList("-mat_dscpack_LBLAS","BLAS level used in the local phase","None",ltype,3,ltype[2],&indx,&flg);
508: if (flg) {
509: switch (indx) {
510: case 0:
511: lu->LBLASLevel = DSC_LBLAS1;
512: break;
513: case 1:
514: lu->LBLASLevel = DSC_LBLAS2;
515: break;
516: case 2:
517: lu->LBLASLevel = DSC_LBLAS3;
518: break;
519: }
520: }
522: PetscOptionsEList("-mat_dscpack_DBLAS","BLAS level used in the distributed phase","None",dtype,2,dtype[1],&indx,&flg);
523: if (flg) {
524: switch (indx) {
525: case 0:
526: lu->DBLASLevel = DSC_DBLAS1;
527: break;
528: case 1:
529: lu->DBLASLevel = DSC_DBLAS2;
530: break;
531: }
532: }
534: PetscOptionsEnd();
535:
536: lu->flg = DIFFERENT_NONZERO_PATTERN;
538: lu->My_DSC_Solver = DSC_Begin();
539: lu->CleanUpDSCPACK = PETSC_TRUE;
540: *F = B;
541: return(0);
542: }
546: PetscErrorCode MatAssemblyEnd_DSCPACK(Mat A,MatAssemblyType mode) {
548: Mat_DSC *lu=(Mat_DSC*)A->spptr;
551: (*lu->MatAssemblyEnd)(A,mode);
552: lu->MatCholeskyFactorSymbolic = A->ops->choleskyfactorsymbolic;
553: A->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_DSCPACK;
554: return(0);
555: }
559: PetscErrorCode MatFactorInfo_DSCPACK(Mat A,PetscViewer viewer)
560: {
561: Mat_DSC *lu=(Mat_DSC*)A->spptr;
563: char *s=0;
564:
566: PetscViewerASCIIPrintf(viewer,"DSCPACK run parameters:\n");
568: switch (lu->order_code) {
569: case 1: s = "ND"; break;
570: case 2: s = "Hybrid with Minimum Degree"; break;
571: case 3: s = "Hybrid with Minimum Deficiency"; break;
572: }
573: PetscViewerASCIIPrintf(viewer," order_code: %s \n",s);
575: switch (lu->scheme_code) {
576: case 1: s = "standard factorization"; break;
577: case 2: s = "factorization + selective inversion"; break;
578: }
579: PetscViewerASCIIPrintf(viewer," scheme_code: %s \n",s);
581: switch (lu->stat) {
582: case 0: s = "NO"; break;
583: case 1: s = "YES"; break;
584: }
585: PetscViewerASCIIPrintf(viewer," display stats: %s \n",s);
586:
587: if ( lu->factor_type == DSC_LLT) {
588: s = "LLT";
589: } else if ( lu->factor_type == DSC_LDLT){
590: s = "LDLT";
591: } else {
592: SETERRQ(PETSC_ERR_PLIB,"Unknown factor type");
593: }
594: PetscViewerASCIIPrintf(viewer," factor type: %s \n",s);
596: if ( lu->LBLASLevel == DSC_LBLAS1) {
597: s = "BLAS1";
598: } else if ( lu->LBLASLevel == DSC_LBLAS2){
599: s = "BLAS2";
600: } else if ( lu->LBLASLevel == DSC_LBLAS3){
601: s = "BLAS3";
602: } else {
603: SETERRQ(PETSC_ERR_PLIB,"Unknown local phase BLAS level");
604: }
605: PetscViewerASCIIPrintf(viewer," local phase BLAS level: %s \n",s);
606:
607: if ( lu->DBLASLevel == DSC_DBLAS1) {
608: s = "BLAS1";
609: } else if ( lu->DBLASLevel == DSC_DBLAS2){
610: s = "BLAS2";
611: } else {
612: SETERRQ(PETSC_ERR_PLIB,"Unknown distributed phase BLAS level");
613: }
614: PetscViewerASCIIPrintf(viewer," distributed phase BLAS level: %s \n",s);
615: return(0);
616: }
620: PetscErrorCode MatView_DSCPACK(Mat A,PetscViewer viewer) {
621: PetscErrorCode ierr;
622: PetscMPIInt size;
623: PetscTruth iascii;
624: PetscViewerFormat format;
625: Mat_DSC *lu=(Mat_DSC*)A->spptr;
628: /* This convertion ugliness is because MatView for BAIJ types calls MatConvert to AIJ */
629: size = lu->size;
630: if (size==1) {
631: MatConvert(A,MATSEQBAIJ,MAT_REUSE_MATRIX,&A);
632: } else {
633: MatConvert(A,MATMPIBAIJ,MAT_REUSE_MATRIX,&A);
634: }
636: MatView(A,viewer);
638: MatConvert(A,MATDSCPACK,MAT_REUSE_MATRIX,&A);
640: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
641: if (iascii) {
642: PetscViewerGetFormat(viewer,&format);
643: if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
644: MatFactorInfo_DSCPACK(A,viewer);
645: }
646: }
647: return(0);
648: }
653: PetscErrorCode PETSCMAT_DLLEXPORT MatMPIBAIJSetPreallocation_MPIDSCPACK(Mat B,PetscInt bs,PetscInt d_nz,PetscInt *d_nnz,PetscInt o_nz,PetscInt *o_nnz)
654: {
655: Mat A;
656: Mat_DSC *lu = (Mat_DSC*)B->spptr;
660: /*
661: After performing the MPIBAIJ Preallocation, we need to convert the local diagonal block matrix
662: into DSCPACK type so that the block jacobi preconditioner (for example) can use DSCPACK. I would
663: like this to be done in the MatCreate routine, but the creation of this inner matrix requires
664: block size info so that PETSc can determine the local size properly. The block size info is set
665: in the preallocation routine.
666: */
667: (*lu->MatPreallocate)(B,bs,d_nz,d_nnz,o_nz,o_nnz);
668: A = ((Mat_MPIBAIJ *)B->data)->A;
669: MatConvert_Base_DSCPACK(A,MATDSCPACK,MAT_REUSE_MATRIX,&A);
670: return(0);
671: }
677: PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_Base_DSCPACK(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
678: {
679: /* This routine is only called to convert to MATDSCPACK */
680: /* from MATSEQBAIJ if A has a single process communicator */
681: /* or MATMPIBAIJ otherwise, so we will ignore 'MatType type'. */
683: MPI_Comm comm;
684: Mat B=*newmat;
685: Mat_DSC *lu;
686: void (*f)(void);
689: if (reuse == MAT_INITIAL_MATRIX) {
690: MatDuplicate(A,MAT_COPY_VALUES,&B);
691: }
693: PetscObjectGetComm((PetscObject)A,&comm);
694: PetscNew(Mat_DSC,&lu);
696: lu->MatDuplicate = A->ops->duplicate;
697: lu->MatView = A->ops->view;
698: lu->MatAssemblyEnd = A->ops->assemblyend;
699: lu->MatCholeskyFactorSymbolic = A->ops->choleskyfactorsymbolic;
700: lu->MatDestroy = A->ops->destroy;
701: lu->CleanUpDSCPACK = PETSC_FALSE;
702: lu->bs = A->bs;
704: B->spptr = (void*)lu;
705: B->ops->duplicate = MatDuplicate_DSCPACK;
706: B->ops->view = MatView_DSCPACK;
707: B->ops->assemblyend = MatAssemblyEnd_DSCPACK;
708: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_DSCPACK;
709: B->ops->destroy = MatDestroy_DSCPACK;
711: MPI_Comm_size(comm,&(lu->size));
712: if (lu->size == 1) {
713: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_seqbaij_dscpack_C",
714: "MatConvert_Base_DSCPACK",MatConvert_Base_DSCPACK);
715: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_dscpack_seqbaij_C",
716: "MatConvert_DSCPACK_Base",MatConvert_DSCPACK_Base);
717: } else {
718: /* I really don't like needing to know the tag: MatMPIBAIJSetPreallocation_C */
719: PetscObjectQueryFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",&f);
720: if (f) {
721: lu->MatPreallocate = (PetscErrorCode (*)(Mat,PetscInt,PetscInt,PetscInt*,PetscInt,PetscInt*))f;
722: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIBAIJSetPreallocation_C",
723: "MatMPIBAIJSetPreallocation_MPIDSCPACK",
724: MatMPIBAIJSetPreallocation_MPIDSCPACK);
725: }
726: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpibaij_dscpack_C",
727: "MatConvert_Base_DSCPACK",MatConvert_Base_DSCPACK);
728: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_dscpack_mpibaij_C",
729: "MatConvert_DSCPACK_Base",MatConvert_DSCPACK_Base);
730: }
731: PetscObjectChangeTypeName((PetscObject)B,MATDSCPACK);
732: *newmat = B;
733: return(0);
734: }
739: PetscErrorCode MatDuplicate_DSCPACK(Mat A, MatDuplicateOption op, Mat *M) {
741: Mat_DSC *lu=(Mat_DSC *)A->spptr;
744: (*lu->MatDuplicate)(A,op,M);
745: PetscMemcpy((*M)->spptr,lu,sizeof(Mat_DSC));
746: return(0);
747: }
749: /*MC
750: MATDSCPACK - MATDSCPACK = "dscpack" - A matrix type providing direct solvers (Cholesky) for sequential
751: or distributed matrices via the external package DSCPACK.
753: If DSCPACK is installed (see the manual for
754: instructions on how to declare the existence of external packages),
755: a matrix type can be constructed which invokes DSCPACK solvers.
756: After calling MatCreate(...,A), simply call MatSetType(A,MATDSCPACK).
757: This matrix type is only supported for double precision real.
759: This matrix inherits from MATSEQBAIJ if constructed with a single process communicator,
760: and from MATMPIBAIJ otherwise. As a result, for sequential matrices, MatSeqBAIJSetPreallocation is
761: supported, and similarly MatMPIBAIJSetPreallocation is supported for distributed matrices. It is
762: recommended that you call both of the above preallocation routines for simplicity. Also,
763: MatConvert can be called to perform inplace conversion to and from MATSEQBAIJ or MATMPIBAIJ
764: for sequential or distributed matrices respectively.
766: Options Database Keys:
767: + -mat_type dscpack - sets the matrix type to dscpack during a call to MatSetFromOptions()
768: . -mat_dscpack_order <1,2,3> - DSCPACK ordering, 1:ND, 2:Hybrid with Minimum Degree, 3:Hybrid with Minimum Deficiency
769: . -mat_dscpack_scheme <1,2> - factorization scheme, 1:standard factorization, 2: factorization with selective inversion
770: . -mat_dscpack_factor <LLT,LDLT> - the type of factorization to be performed.
771: . -mat_dscpack_MaxMemAllowed <n> - the maximum memory to be used during factorization
772: . -mat_dscpack_stats <0,1> - display stats of the factorization and solves during MatDestroy(), 0: no display, 1: display
773: . -mat_dscpack_LBLAS <LBLAS1,LBLAS2,LBLAS3> - BLAS level used in the local phase
774: - -mat_dscpack_DBLAS <DBLAS1,DBLAS2> - BLAS level used in the distributed phase
776: Level: beginner
778: .seealso: PCCHOLESKY
779: M*/
784: PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_DSCPACK(Mat A)
785: {
787: PetscMPIInt size;
790: /* Change type name before calling MatSetType to force proper construction of SeqBAIJ or MPIBAIJ */
791: /* and DSCPACK types */
792: PetscObjectChangeTypeName((PetscObject)A,MATDSCPACK);
793: MPI_Comm_size(A->comm,&size);
794: if (size == 1) {
795: MatSetType(A,MATSEQBAIJ);
796: } else {
797: MatSetType(A,MATMPIBAIJ);
798: }
799: MatConvert_Base_DSCPACK(A,MATDSCPACK,MAT_REUSE_MATRIX,&A);
800: return(0);
801: }