Actual source code: partition.c
1: /*$Id: partition.c,v 1.60 2001/06/21 21:17:23 bsmith Exp $*/
2:
3: #include src/mat/matimpl.h
5: /* Logging support */
6: int MAT_PARTITIONING_COOKIE;
8: /*
9: Simplest partitioning, keeps the current partitioning.
10: */
11: static int MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
12: {
13: int ierr,m,rank,size;
16: MPI_Comm_size(part->comm,&size);
17: if (part->n != size) {
18: SETERRQ(PETSC_ERR_SUP,"Currently only supports one domain per processor");
19: }
20: MPI_Comm_rank(part->comm,&rank);
22: MatGetLocalSize(part->adj,&m,PETSC_NULL);
23: ISCreateStride(part->comm,m,rank,0,partitioning);
24: return(0);
25: }
27: static int MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
28: {
29: int cell,ierr,n,N,p,rstart,rend,*color,size;
32: MPI_Comm_size(part->comm,&size);
33: if (part->n != size) {
34: SETERRQ(PETSC_ERR_SUP,"Currently only supports one domain per processor");
35: }
36: p = (int)sqrt((double)part->n);
37: if (p*p != part->n) {
38: SETERRQ(PETSC_ERR_SUP,"Square partitioning requires "perfect square" number of domains");
39: }
40: MatGetSize(part->adj,&N,PETSC_NULL);
41: n = (int)sqrt((double)N);
42: if (n*n != N) { /* This condition is NECESSARY, but NOT SUFFICIENT in order to the domain be square */
43: SETERRQ(PETSC_ERR_SUP,"Square partitioning requires square domain");
44: }
45: if (n%p != 0) {
46: SETERRQ(PETSC_ERR_SUP,"Square partitioning requires p to divide n");
47: }
48: MatGetOwnershipRange(part->adj,&rstart,&rend);
49: PetscMalloc((rend-rstart)*sizeof(int),&color);
50: /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
51: for (cell=rstart; cell<rend; cell++) {
52: color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
53: }
54: ISCreateGeneral(part->comm,rend-rstart,color,partitioning);
55: PetscFree(color);
57: return(0);
58: }
60: EXTERN_C_BEGIN
61: int MatPartitioningCreate_Current(MatPartitioning part)
62: {
64: part->ops->apply = MatPartitioningApply_Current;
65: part->ops->view = 0;
66: part->ops->destroy = 0;
67: return(0);
68: }
69: EXTERN_C_END
71: EXTERN_C_BEGIN
72: int MatPartitioningCreate_Square(MatPartitioning part)
73: {
75: part->ops->apply = MatPartitioningApply_Square;
76: part->ops->view = 0;
77: part->ops->destroy = 0;
78: return(0);
79: }
80: EXTERN_C_END
82: /* ===========================================================================================*/
84: #include petscsys.h
86: PetscFList MatPartitioningList = 0;
87: PetscTruth MatPartitioningRegisterAllCalled = PETSC_FALSE;
89: /*MC
90: MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
91: matrix package.
93: Synopsis:
94: int MatPartitioningRegisterDynamic(char *name_partitioning,char *path,char *name_create,int (*routine_create)(MatPartitioning))
96: Not Collective
98: Input Parameters:
99: + sname - name of partitioning (for example MAT_PARTITIONING_CURRENT) or parmetis
100: . path - location of library where creation routine is
101: . name - name of function that creates the partitioning type, a string
102: - function - function pointer that creates the partitioning type
104: Level: developer
106: If dynamic libraries are used, then the fourth input argument (function)
107: is ignored.
109: Sample usage:
110: .vb
111: MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
112: "MyPartCreate",MyPartCreate);
113: .ve
115: Then, your partitioner can be chosen with the procedural interface via
116: $ MatPartitioningSetType(part,"my_part")
117: or at runtime via the option
118: $ -mat_partitioning_type my_part
120: $PETSC_ARCH and $BOPT occuring in pathname will be replaced with appropriate values.
122: .keywords: matrix, partitioning, register
124: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
125: M*/
127: int MatPartitioningRegister(char *sname,char *path,char *name,int (*function)(MatPartitioning))
128: {
129: int ierr;
130: char fullname[256];
133: PetscFListConcat(path,name,fullname);
134: PetscFListAdd(&MatPartitioningList,sname,fullname,(void (*)(void))function);
135: return(0);
136: }
138: /*@C
139: MatPartitioningRegisterDestroy - Frees the list of partitioning routines.
141: Not Collective
143: Level: developer
145: .keywords: matrix, register, destroy
147: .seealso: MatPartitioningRegisterDynamic(), MatPartitioningRegisterAll()
148: @*/
149: int MatPartitioningRegisterDestroy(void)
150: {
154: if (MatPartitioningList) {
155: PetscFListDestroy(&MatPartitioningList);
156: MatPartitioningList = 0;
157: }
158: return(0);
159: }
161: /*@C
162: MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
163: from the partitioning context.
165: Not collective
167: Input Parameter:
168: . partitioning - the partitioning context
170: Output Parameter:
171: . type - partitioner type
173: Level: intermediate
175: Not Collective
177: .keywords: Partitioning, get, method, name, type
178: @*/
179: int MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
180: {
182: *type = partitioning->type_name;
183: return(0);
184: }
186: /*@C
187: MatPartitioningApply - Gets a partitioning for a matrix.
189: Collective on Mat
191: Input Parameters:
192: . matp - the matrix partitioning object
194: Output Parameters:
195: . partitioning - the partitioning. For each local node this tells the processor
196: number that that node is assigned to.
198: Options Database Keys:
199: To specify the partitioning through the options database, use one of
200: the following
201: $ -mat_partitioning_type parmetis, -mat_partitioning current
202: To see the partitioning result
203: $ -mat_partitioning_view
205: Level: beginner
207: The user can define additional partitionings; see MatPartitioningRegisterDynamic().
209: .keywords: matrix, get, partitioning
211: .seealso: MatPartitioningGetTypeFromOptions(), MatPartitioningRegisterDynamic(), MatPartitioningCreate(),
212: MatPartitioningDestroy(), MatPartitiongSetAdjacency(), ISPartitioningToNumbering(),
213: ISPartitioningCount()
214: @*/
215: int MatPartitioningApply(MatPartitioning matp,IS *partitioning)
216: {
217: int ierr;
218: PetscTruth flag;
222: if (!matp->adj->assembled) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
223: if (matp->adj->factor) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
224: if (!matp->ops->apply) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
225: PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
226: (*matp->ops->apply)(matp,partitioning);
227: PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);
229: PetscOptionsHasName(PETSC_NULL,"-mat_partitioning_view",&flag);
230: if (flag) {
231: MatPartitioningView(matp,PETSC_VIEWER_STDOUT_(matp->comm));
232: ISView(*partitioning,PETSC_VIEWER_STDOUT_(matp->comm));
233: }
234: return(0);
235: }
236:
237: /*@C
238: MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
239: partitioned.
241: Collective on MatPartitioning and Mat
243: Input Parameters:
244: + part - the partitioning context
245: - adj - the adjacency matrix
247: Level: beginner
249: .keywords: Partitioning, adjacency
251: .seealso: MatPartitioningCreate()
252: @*/
253: int MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
254: {
258: part->adj = adj;
259: return(0);
260: }
262: /*@C
263: MatPartitioningDestroy - Destroys the partitioning context.
265: Collective on Partitioning
267: Input Parameters:
268: . part - the partitioning context
270: Level: beginner
272: .keywords: Partitioning, destroy, context
274: .seealso: MatPartitioningCreate()
275: @*/
276: int MatPartitioningDestroy(MatPartitioning part)
277: {
282: if (--part->refct > 0) return(0);
284: if (part->ops->destroy) {
285: (*part->ops->destroy)(part);
286: }
287: if (part->vertex_weights){
288: PetscFree(part->vertex_weights);
289: }
290: PetscLogObjectDestroy(part);
291: PetscHeaderDestroy(part);
292: return(0);
293: }
295: /*@C
296: MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
298: Collective on Partitioning
300: Input Parameters:
301: + part - the partitioning context
302: - weights - the weights
304: Level: beginner
306: Notes:
307: The array weights is freed by PETSc so the user should not free the array. In C/C++
308: the array must be obtained with a call to PetscMalloc(), not malloc().
310: .keywords: Partitioning, destroy, context
312: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetAdjacency()
313: @*/
314: int MatPartitioningSetVertexWeights(MatPartitioning part,int *weights)
315: {
321: if (part->vertex_weights){
322: PetscFree(part->vertex_weights);
323: }
324: part->vertex_weights = weights;
325: return(0);
326: }
328: /*@C
329: MatPartitioningCreate - Creates a partitioning context.
331: Collective on MPI_Comm
333: Input Parameter:
334: . comm - MPI communicator
336: Output Parameter:
337: . newp - location to put the context
339: Level: beginner
341: .keywords: Partitioning, create, context
343: .seealso: MatPartitioningSetUp(), MatPartitioningApply(), MatPartitioningDestroy(),
344: MatPartitioningSetAdjacency()
346: @*/
347: int MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
348: {
349: MatPartitioning part;
350: int ierr;
353: *newp = 0;
355: PetscHeaderCreate(part,_p_MatPartitioning,struct _MatPartitioningOps,MAT_PARTITIONING_COOKIE,-1,"MatPartitioning",comm,MatPartitioningDestroy,
356: MatPartitioningView);
357: PetscLogObjectCreate(part);
358: part->type = -1;
359: part->vertex_weights = 0;
360: MPI_Comm_size(comm,&part->n);
362: *newp = part;
363: return(0);
364: }
366: /*@C
367: MatPartitioningView - Prints the partitioning data structure.
369: Collective on MatPartitioning
371: Input Parameters:
372: . part - the partitioning context
373: . viewer - optional visualization context
375: Level: intermediate
377: Note:
378: The available visualization contexts include
379: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
380: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
381: output where only the first processor opens
382: the file. All other processors send their
383: data to the first processor to print.
385: The user can open alternative visualization contexts with
386: . PetscViewerASCIIOpen() - output to a specified file
388: .keywords: Partitioning, view
390: .seealso: PetscViewerASCIIOpen()
391: @*/
392: int MatPartitioningView(MatPartitioning part,PetscViewer viewer)
393: {
394: int ierr;
395: PetscTruth isascii;
396: MatPartitioningType name;
400: if (!viewer) viewer = PETSC_VIEWER_STDOUT_(part->comm);
404: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
405: if (isascii) {
406: MatPartitioningGetType(part,&name);
407: PetscViewerASCIIPrintf(viewer,"MatPartitioning Object: %sn",name);
408: if (part->vertex_weights) {
409: PetscViewerASCIIPrintf(viewer," Using vertex weightsn");
410: }
411: } else {
412: SETERRQ1(1,"Viewer type %s not supported for this MatParitioning",((PetscObject)viewer)->type_name);
413: }
415: if (part->ops->view) {
416: PetscViewerASCIIPushTab(viewer);
417: (*part->ops->view)(part,viewer);
418: PetscViewerASCIIPopTab(viewer);
419: }
421: return(0);
422: }
424: /*@C
425: MatPartitioningSetType - Sets the type of partitioner to use
427: Collective on MatPartitioning
429: Input Parameter:
430: . part - the partitioning context.
431: . type - a known method
433: Options Database Command:
434: $ -mat_partitioning_type <type>
435: $ Use -help for a list of available methods
436: $ (for instance, parmetis)
438: Level: intermediate
440: .keywords: partitioning, set, method, type
442: .seealso: MatPartitioningCreate(), MatPartitioningApply()
444: @*/
445: int MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
446: {
447: int ierr,(*r)(MatPartitioning);
448: PetscTruth match;
454: PetscTypeCompare((PetscObject)part,type,&match);
455: if (match) return(0);
457: if (part->setupcalled) {
458: (*part->ops->destroy)(part);
459: part->data = 0;
460: part->setupcalled = 0;
461: }
463: /* Get the function pointers for the method requested */
464: if (!MatPartitioningRegisterAllCalled){ MatPartitioningRegisterAll(0);}
465: PetscFListFind(part->comm,MatPartitioningList,type,(void (**)(void)) &r);
467: if (!r) {SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Unknown partitioning type %s",type);}
469: part->ops->destroy = (int (*)(MatPartitioning)) 0;
470: part->ops->view = (int (*)(MatPartitioning,PetscViewer)) 0;
471: (*r)(part);
473: PetscStrfree(part->type_name);
474: PetscStrallocpy(type,&part->type_name);
475: return(0);
476: }
478: /*@
479: MatPartitioningSetFromOptions - Sets various partitioning options from the
480: options database.
482: Collective on MatPartitioning
484: Input Parameter:
485: . part - the partitioning context.
487: Options Database Command:
488: $ -mat_partitioning_type <type>
489: $ Use -help for a list of available methods
490: $ (for instance, parmetis)
492: Level: beginner
494: .keywords: partitioning, set, method, type
495: @*/
496: int MatPartitioningSetFromOptions(MatPartitioning part)
497: {
498: int ierr;
499: PetscTruth flag;
500: char type[256],*def;
503: if (!MatPartitioningRegisterAllCalled){ MatPartitioningRegisterAll(0);}
504: PetscOptionsBegin(part->comm,part->prefix,"Partitioning options","MatOrderings");
505: if (!part->type_name) {
506: #if defined(PETSC_HAVE_PARMETIS)
507: def = MAT_PARTITIONING_PARMETIS;
508: #else
509: def = MAT_PARTITIONING_CURRENT;
510: #endif
511: } else {
512: def = part->type_name;
513: }
514: PetscOptionsList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
515: if (flag) {
516: MatPartitioningSetType(part,type);
517: }
518: /*
519: Set the type if it was never set.
520: */
521: if (!part->type_name) {
522: MatPartitioningSetType(part,def);
523: }
525: if (part->ops->setfromoptions) {
526: (*part->ops->setfromoptions)(part);
527: }
528: PetscOptionsEnd();
529: return(0);
530: }