Actual source code: pastix.c
1: #define PETSCMAT_DLL
3: /*
4: Provides an interface to the PaStiX sparse solver
5: */
6: #include ../src/mat/impls/aij/seq/aij.h
7: #include ../src/mat/impls/aij/mpi/mpiaij.h
8: #include ../src/mat/impls/sbaij/seq/sbaij.h
9: #include ../src/mat/impls/sbaij/mpi/mpisbaij.h
11: #if defined(PETSC_HAVE_STDLIB_H)
12: #include <stdlib.h>
13: #endif
14: #if defined(PETSC_HAVE_STRING_H)
15: #include <string.h>
16: #endif
19: #include "pastix.h"
22: typedef struct Mat_Pastix_ {
23: pastix_data_t *pastix_data; /* Pastix data storage structure */
24: MatStructure matstruc;
25: PetscInt n; /* Number of columns in the matrix */
26: PetscInt *colptr; /* Index of first element of each column in row and val */
27: PetscInt *row; /* Row of each element of the matrix */
28: PetscScalar *val; /* Value of each element of the matrix */
29: PetscInt *perm; /* Permutation tabular */
30: PetscInt *invp; /* Reverse permutation tabular */
31: PetscScalar *rhs; /* Rhight-hand-side member */
32: PetscInt rhsnbr; /* Rhight-hand-side number (must be 1) */
33: PetscInt iparm[64]; /* Integer parameters */
34: double dparm[64]; /* Floating point parameters */
35: MPI_Comm pastix_comm; /* PaStiX MPI communicator */
36: PetscMPIInt commRank; /* MPI rank */
37: PetscMPIInt commSize; /* MPI communicator size */
38: PetscTruth CleanUpPastix; /* Boolean indicating if we call PaStiX clean step */
39: VecScatter scat_rhs;
40: VecScatter scat_sol;
41: Vec b_seq;
42: PetscTruth isAIJ;
43: PetscErrorCode (*MatDestroy)(Mat);
44: } Mat_Pastix;
46: EXTERN PetscErrorCode MatDuplicate_Pastix(Mat,MatDuplicateOption,Mat*);
50: /*
51: convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz]
53: input:
54: A - matrix in seqaij or mpisbaij (bs=1) format
55: valOnly - FALSE: spaces are allocated and values are set for the CSC
56: TRUE: Only fill values
57: output:
58: n - Size of the matrix
59: colptr - Index of first element of each column in row and val
60: row - Row of each element of the matrix
61: values - Value of each element of the matrix
62: */
63: PetscErrorCode MatConvertToCSC(Mat A,PetscTruth valOnly,PetscInt *n,PetscInt **colptr,PetscInt **row,PetscScalar **values)
64: {
65: Mat_SeqAIJ *aa = (Mat_SeqAIJ*)A->data;
66: PetscInt *rowptr = aa->i;
67: PetscInt *col = aa->j;
68: PetscScalar *rvalues = aa->a;
69: PetscInt m = A->rmap->N;
70: PetscInt nnz;
71: PetscInt i,j, k;
72: PetscInt base = 1;
73: PetscInt idx;
74: PetscErrorCode ierr;
75: PetscInt colidx;
76: PetscInt *colcount;
77: PetscTruth isSBAIJ;
78: PetscTruth isSeqSBAIJ;
79: PetscTruth isMpiSBAIJ;
80: PetscTruth isSym;
83: MatIsSymmetric(A,0.0,&isSym);
84: PetscTypeCompare((PetscObject)A,MATSBAIJ,&isSBAIJ);
85: PetscTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
86: PetscTypeCompare((PetscObject)A,MATMPISBAIJ,&isMpiSBAIJ);
88: *n = A->cmap->N;
90: /* PaStiX only needs triangular matrix if matrix is symmetric
91: */
92: if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) {
93: nnz = (aa->nz - *n)/2 + *n;
94: }
95: else {
96: nnz = aa->nz;
97: }
99: if (!valOnly){
100: PetscMalloc(((*n)+1) *sizeof(PetscInt) ,colptr );
101: PetscMalloc( nnz *sizeof(PetscInt) ,row);
102: PetscMalloc( nnz *sizeof(PetscScalar),values);
104: if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
105: PetscMemcpy (*colptr, rowptr, ((*n)+1)*sizeof(PetscInt));
106: for (i = 0; i < *n+1; i++)
107: (*colptr)[i] += base;
108: PetscMemcpy (*row, col, (nnz)*sizeof(PetscInt));
109: for (i = 0; i < nnz; i++)
110: (*row)[i] += base;
111: PetscMemcpy (*values, rvalues, (nnz)*sizeof(PetscScalar));
112: } else {
113: PetscMalloc((*n)*sizeof(PetscInt) ,&colcount);
115: for (i = 0; i < m; i++) colcount[i] = 0;
116: /* Fill-in colptr */
117: for (i = 0; i < m; i++) {
118: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
119: if (!isSym || col[j] <= i) colcount[col[j]]++;
120: }
121: }
122:
123: (*colptr)[0] = base;
124: for (j = 0; j < *n; j++) {
125: (*colptr)[j+1] = (*colptr)[j] + colcount[j];
126: /* in next loop we fill starting from (*colptr)[colidx] - base */
127: colcount[j] = -base;
128: }
129:
130: /* Fill-in rows and values */
131: for (i = 0; i < m; i++) {
132: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
133: if (!isSym || col[j] <= i) {
134: colidx = col[j];
135: idx = (*colptr)[colidx] + colcount[colidx];
136: (*row)[idx] = i + base;
137: (*values)[idx] = rvalues[j];
138: colcount[colidx]++;
139: }
140: }
141: }
142: PetscFree(colcount);
143: }
144: } else {
145: /* Fill-in only values */
146: for (i = 0; i < m; i++) {
147: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
148: colidx = col[j];
149: if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) ||!isSym || col[j] <= i)
150: {
151: /* look for the value to fill */
152: for (k = (*colptr)[colidx] - base; k < (*colptr)[colidx + 1] - base; k++) {
153: if (((*row)[k]-base) == i) {
154: (*values)[k] = rvalues[j];
155: break;
156: }
157: }
158: /* data structure of sparse matrix has changed */
159: if (k == (*colptr)[colidx + 1] - base) SETERRQ1(PETSC_ERR_PLIB,"overflow on k %D",k);
160: }
161: }
162: }
163: }
164: {
165: PetscScalar *tmpvalues;
166: PetscInt *tmprows,*tmpcolptr;
167: PetscMalloc3(nnz,PetscScalar,&tmpvalues,nnz,PetscInt,&tmprows,(*n+1),PetscInt,&tmpcolptr);
168: if (sizeof(PetscScalar) != sizeof(pastix_float_t)) {
169: SETERRQ2(PETSC_ERR_SUP,"sizeof(PetscScalar) %d != sizeof(pastix_float_t) %d",sizeof(PetscScalar),sizeof(pastix_float_t));
170: }
171: if (sizeof(PetscInt) != sizeof(pastix_int_t)) {
172: SETERRQ2(PETSC_ERR_SUP,"sizeof(PetscInt) %d != sizeof(pastix_int_t) %d",sizeof(PetscInt),sizeof(pastix_int_t));
173: }
175: PetscMemcpy(tmpcolptr,*colptr,(*n+1)*sizeof(PetscInt));
176: PetscMemcpy(tmprows,*row,nnz*sizeof(PetscInt));
177: PetscMemcpy(tmpvalues,*values,nnz*sizeof(PetscScalar));
178: PetscFree(*row);
179: PetscFree(*values);
181: pastix_checkMatrix(MPI_COMM_WORLD,API_VERBOSE_NO,((isSym != 0) ? API_SYM_YES : API_SYM_NO),API_YES,*n,&tmpcolptr,&tmprows,&tmpvalues,NULL,1);
182:
183: PetscMemcpy(*colptr,tmpcolptr,(*n+1)*sizeof(PetscInt));
184: PetscMalloc(((*colptr)[*n]-1)*sizeof(PetscInt),row);
185: PetscMemcpy(*row,tmprows,((*colptr)[*n]-1)*sizeof(PetscInt));
186: PetscMalloc(((*colptr)[*n]-1)*sizeof(PetscScalar),values);
187: PetscMemcpy(*values,tmpvalues,((*colptr)[*n]-1)*sizeof(PetscScalar));
188: PetscFree3(tmpvalues,tmprows,tmpcolptr);
189: }
190: return(0);
191: }
197: /*
198: Call clean step of PaStiX if lu->CleanUpPastix == true.
199: Free the CSC matrix.
200: */
201: PetscErrorCode MatDestroy_Pastix(Mat A)
202: {
203: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
204: PetscErrorCode ierr;
205: PetscMPIInt size=lu->commSize;
208: if (lu->CleanUpPastix) {
209: /* Terminate instance, deallocate memories */
210: if (size > 1){
211: VecScatterDestroy(lu->scat_rhs);
212: VecDestroy(lu->b_seq);
213: VecScatterDestroy(lu->scat_sol);
214: }
215:
216: lu->iparm[IPARM_START_TASK]=API_TASK_CLEAN;
217: lu->iparm[IPARM_END_TASK]=API_TASK_CLEAN;
219: pastix((pastix_data_t **)&(lu->pastix_data),
220: lu->pastix_comm,
221: (pastix_int_t) lu->n,
222: (pastix_int_t*) lu->colptr,
223: (pastix_int_t*) lu->row,
224: (pastix_float_t*) lu->val,
225: (pastix_int_t*) lu->perm,
226: (pastix_int_t*) lu->invp,
227: (pastix_float_t*) lu->rhs,
228: (pastix_int_t) lu->rhsnbr,
229: (pastix_int_t*) lu->iparm,
230: lu->dparm);
232: PetscFree(lu->colptr);
233: PetscFree(lu->row);
234: PetscFree(lu->val);
235: PetscFree(lu->perm);
236: PetscFree(lu->invp);
237: MPI_Comm_free(&(lu->pastix_comm));
238: }
239: (lu->MatDestroy)(A);
240: return(0);
241: }
245: /*
246: Gather right-hand-side.
247: Call for Solve step.
248: Scatter solution.
249: */
250: PetscErrorCode MatSolve_PaStiX(Mat A,Vec b,Vec x)
251: {
252: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
253: PetscScalar *array;
254: Vec x_seq;
255: PetscErrorCode ierr;
258: lu->rhsnbr = 1;
259: x_seq = lu->b_seq;
260: if (lu->commSize > 1){
261: /* PaStiX only supports centralized rhs. Scatter b into a seqential rhs vector */
262: VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
263: VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
264: VecGetArray(x_seq,&array);
265: } else { /* size == 1 */
266: VecCopy(b,x);
267: VecGetArray(x,&array);
268: }
269: lu->rhs = array;
270: if (lu->commSize == 1){
271: VecRestoreArray(x,&array);
272: } else {
273: VecRestoreArray(x_seq,&array);
274: }
276: /* solve phase */
277: /*-------------*/
278: lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
279: lu->iparm[IPARM_END_TASK] = API_TASK_REFINE;
280: lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;
281:
282: pastix((pastix_data_t **)&(lu->pastix_data),
283: (MPI_Comm) lu->pastix_comm,
284: (pastix_int_t) lu->n,
285: (pastix_int_t*) lu->colptr,
286: (pastix_int_t*) lu->row,
287: (pastix_float_t*) lu->val,
288: (pastix_int_t*) lu->perm,
289: (pastix_int_t*) lu->invp,
290: (pastix_float_t*) lu->rhs,
291: (pastix_int_t) lu->rhsnbr,
292: (pastix_int_t*) lu->iparm,
293: (double*) lu->dparm);
294:
295: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) {
296: SETERRQ1(PETSC_ERR_LIB,"Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %d\n",lu->iparm[IPARM_ERROR_NUMBER] );
297: }
299: if (lu->commSize == 1){
300: VecRestoreArray(x,&(lu->rhs));
301: } else {
302: VecRestoreArray(x_seq,&(lu->rhs));
303: }
305: if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
306: VecScatterBegin(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
307: VecScatterEnd(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
308: }
309: return(0);
310: }
312: /*
313: Numeric factorisation using PaStiX solver.
315: */
318: PetscErrorCode MatFactorNumeric_PaStiX(Mat F,Mat A,const MatFactorInfo *info)
319: {
320: Mat_Pastix *lu =(Mat_Pastix*)(F)->spptr;
321: Mat *tseq;
322: PetscErrorCode 0;
323: PetscInt icntl;
324: PetscInt M=A->rmap->N;
325: PetscTruth valOnly,flg, isSym;
326: Mat F_diag;
327: IS is_iden;
328: Vec b;
329: IS isrow;
330: PetscTruth isSeqAIJ,isSeqSBAIJ;
333:
334: PetscTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
335: PetscTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
336: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
337: (F)->ops->solve = MatSolve_PaStiX;
339: /* Initialize a PASTIX instance */
340: MPI_Comm_dup(((PetscObject)A)->comm,&(lu->pastix_comm));
341: MPI_Comm_rank(lu->pastix_comm, &lu->commRank);
342: MPI_Comm_size(lu->pastix_comm, &lu->commSize);
344: /* Set pastix options */
345: lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
346: lu->iparm[IPARM_START_TASK] = API_TASK_INIT;
347: lu->iparm[IPARM_END_TASK] = API_TASK_INIT;
348: lu->rhsnbr = 1;
350: /* Call to set default pastix options */
351: pastix((pastix_data_t **)&(lu->pastix_data),
352: (MPI_Comm) lu->pastix_comm,
353: (pastix_int_t) lu->n,
354: (pastix_int_t*) lu->colptr,
355: (pastix_int_t*) lu->row,
356: (pastix_float_t*) lu->val,
357: (pastix_int_t*) lu->perm,
358: (pastix_int_t*) lu->invp,
359: (pastix_float_t*) lu->rhs,
360: (pastix_int_t) lu->rhsnbr,
361: (pastix_int_t*) lu->iparm,
362: (double*) lu->dparm);
364: PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"PaStiX Options","Mat");
366: icntl=-1;
367: lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;
368: PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",lu->iparm[IPARM_VERBOSE],&icntl,&flg);
369: if ((flg && icntl > 0) || PetscLogPrintInfo) {
370: lu->iparm[IPARM_VERBOSE] = icntl;
371: }
372: icntl=-1;
373: PetscOptionsInt("-mat_pastix_threadnbr","iparm[IPARM_THREAD_NBR] : Number of thread by MPI node","None",lu->iparm[IPARM_THREAD_NBR],&icntl,PETSC_NULL);
374: if ((flg && icntl > 0)) {
375: lu->iparm[IPARM_THREAD_NBR] = icntl;
376: }
377: PetscOptionsEnd();
378: valOnly = PETSC_FALSE;
379: } else {
380: valOnly = PETSC_TRUE;
381: }
383: lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;
385: /* convert mpi A to seq mat A */
386: ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
387: MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
388: ISDestroy(isrow);
390: MatConvertToCSC(*tseq,valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val);
391: MatIsSymmetric(*tseq,0.0,&isSym);
392: MatDestroyMatrices(1,&tseq);
394: PetscMalloc((lu->n)*sizeof(PetscInt) ,&(lu->perm));
395: PetscMalloc((lu->n)*sizeof(PetscInt) ,&(lu->invp));
397: if (isSym) {
398: /* On symmetric matrix, LLT */
399: lu->iparm[IPARM_SYM] = API_SYM_YES;
400: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
401: } else {
402: /* On unsymmetric matrix, LU */
403: lu->iparm[IPARM_SYM] = API_SYM_NO;
404: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
405: }
406:
407: /*----------------*/
408: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
409: if (!(isSeqAIJ || isSeqSBAIJ)) {
410: /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
411: VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);
412: ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
413: VecCreate(((PetscObject)A)->comm,&b);
414: VecSetSizes(b,A->rmap->n,PETSC_DECIDE);
415: VecSetFromOptions(b);
416:
417: VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);
418: VecScatterCreate(lu->b_seq,is_iden,b,is_iden,&lu->scat_sol);
419: ISDestroy(is_iden);
420: VecDestroy(b);
421: }
422: lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
423: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
425: pastix((pastix_data_t **)&(lu->pastix_data),
426: (MPI_Comm) lu->pastix_comm,
427: (pastix_int_t) lu->n,
428: (pastix_int_t*) lu->colptr,
429: (pastix_int_t*) lu->row,
430: (pastix_float_t*) lu->val,
431: (pastix_int_t*) lu->perm,
432: (pastix_int_t*) lu->invp,
433: (pastix_float_t*) lu->rhs,
434: (pastix_int_t) lu->rhsnbr,
435: (pastix_int_t*) lu->iparm,
436: (double*) lu->dparm);
437: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) {
438: SETERRQ1(PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
439: }
440: } else {
441: lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
442: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
443: pastix((pastix_data_t **)&(lu->pastix_data),
444: (MPI_Comm) lu->pastix_comm,
445: (pastix_int_t) lu->n,
446: (pastix_int_t*) lu->colptr,
447: (pastix_int_t*) lu->row,
448: (pastix_float_t*) lu->val,
449: (pastix_int_t*) lu->perm,
450: (pastix_int_t*) lu->invp,
451: (pastix_float_t*) lu->rhs,
452: (pastix_int_t) lu->rhsnbr,
453: (pastix_int_t*) lu->iparm,
454: (double*) lu->dparm);
456: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) {
457: SETERRQ1(PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
458: }
459: }
461: if (lu->commSize > 1){
462: if ((F)->factor == MAT_FACTOR_LU){
463: F_diag = ((Mat_MPIAIJ *)(F)->data)->A;
464: } else {
465: F_diag = ((Mat_MPISBAIJ *)(F)->data)->A;
466: }
467: F_diag->assembled = PETSC_TRUE;
468: }
469: (F)->assembled = PETSC_TRUE;
470: lu->matstruc = SAME_NONZERO_PATTERN;
471: lu->CleanUpPastix = PETSC_TRUE;
472: return(0);
473: }
475: /* Note the Petsc r and c permutations are ignored */
478: PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
479: {
480: Mat_Pastix *lu = (Mat_Pastix*)F->spptr;
483: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
484: lu->iparm[IPARM_SYM] = API_SYM_YES;
485: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
486: F->ops->lufactornumeric = MatFactorNumeric_PaStiX;
487: return(0);
488: }
491: /* Note the Petsc r permutation is ignored */
494: PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F,Mat A,IS r,const MatFactorInfo *info)
495: {
496: Mat_Pastix *lu = (Mat_Pastix*)(F)->spptr;
499: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LLT;
500: lu->iparm[IPARM_SYM] = API_SYM_NO;
501: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
502: (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
503: return(0);
504: }
508: PetscErrorCode MatView_PaStiX(Mat A,PetscViewer viewer)
509: {
510: PetscErrorCode ierr;
511: PetscTruth iascii;
512: PetscViewerFormat format;
515: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
516: if (iascii) {
517: PetscViewerGetFormat(viewer,&format);
518: if (format == PETSC_VIEWER_ASCII_INFO){
519: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
521: PetscViewerASCIIPrintf(viewer,"PaStiX run parameters:\n");
522: PetscViewerASCIIPrintf(viewer," Matrix type : %s \n",((lu->iparm[IPARM_SYM] == API_SYM_YES)?"Symmetric":"Unsymmetric"));
523: PetscViewerASCIIPrintf(viewer," Level of printing (0,1,2): %d \n",lu->iparm[IPARM_VERBOSE]);
524: PetscViewerASCIIPrintf(viewer," Number of refinements iterations : %d \n",lu->iparm[IPARM_NBITER]);
525: PetscPrintf(PETSC_COMM_SELF," Error : %g \n",lu->dparm[DPARM_RELATIVE_ERROR]);
526: }
527: }
528: return(0);
529: }
532: /*MC
533: MAT_SOLVER_PASTIX - A solver package providing direct solvers (LU) for distributed
534: and sequential matrices via the external package PaStiX.
536: Use config/configure.py --download-pastix to have PETSc installed with PaStiX
538: Options Database Keys:
539: + -mat_pastix_verbose <0,1,2> - print level
540: - -mat_pastix_threadnbr <integer> - Set the thread number by MPI task.
542: Level: beginner
544: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
546: M*/
551: PetscErrorCode MatGetInfo_PaStiX(Mat A,MatInfoType flag,MatInfo *info)
552: {
553: Mat_Pastix *lu =(Mat_Pastix*)A->spptr;
556: info->block_size = 1.0;
557: info->nz_allocated = lu->iparm[IPARM_NNZEROS];
558: info->nz_used = lu->iparm[IPARM_NNZEROS];
559: info->nz_unneeded = 0.0;
560: info->assemblies = 0.0;
561: info->mallocs = 0.0;
562: info->memory = 0.0;
563: info->fill_ratio_given = 0;
564: info->fill_ratio_needed = 0;
565: info->factor_mallocs = 0;
566: return(0);
567: }
572: PetscErrorCode MatFactorGetSolverPackage_pastix(Mat A,const MatSolverPackage *type)
573: {
575: *type = MAT_SOLVER_PASTIX;
576: return(0);
577: }
581: /*
582: The seq and mpi versions of this function are the same
583: */
586: PetscErrorCode MatGetFactor_seqaij_pastix(Mat A,MatFactorType ftype,Mat *F)
587: {
588: Mat B;
590: Mat_Pastix *pastix;
593: if (ftype != MAT_FACTOR_LU) {
594: SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
595: }
596: /* Create the factorization matrix */
597: MatCreate(((PetscObject)A)->comm,&B);
598: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
599: MatSetType(B,((PetscObject)A)->type_name);
600: MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
602: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
603: B->ops->view = MatView_PaStiX;
604: B->ops->getinfo = MatGetInfo_PaStiX;
605: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix", MatFactorGetSolverPackage_pastix);
606: B->factor = MAT_FACTOR_LU;
608: PetscNewLog(B,Mat_Pastix,&pastix);
609: pastix->CleanUpPastix = PETSC_FALSE;
610: pastix->isAIJ = PETSC_TRUE;
611: pastix->scat_rhs = PETSC_NULL;
612: pastix->scat_sol = PETSC_NULL;
613: pastix->MatDestroy = B->ops->destroy;
614: B->ops->destroy = MatDestroy_Pastix;
615: B->spptr = (void*)pastix;
617: *F = B;
618: return(0);
619: }
626: PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A,MatFactorType ftype,Mat *F)
627: {
628: Mat B;
630: Mat_Pastix *pastix;
633: if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
634: /* Create the factorization matrix */
635: MatCreate(((PetscObject)A)->comm,&B);
636: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
637: MatSetType(B,((PetscObject)A)->type_name);
638: MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
639: MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);
641: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
642: B->ops->view = MatView_PaStiX;
643: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix",MatFactorGetSolverPackage_pastix);
644: B->factor = MAT_FACTOR_LU;
646: PetscNewLog(B,Mat_Pastix,&pastix);
647: pastix->CleanUpPastix = PETSC_FALSE;
648: pastix->isAIJ = PETSC_TRUE;
649: pastix->scat_rhs = PETSC_NULL;
650: pastix->scat_sol = PETSC_NULL;
651: pastix->MatDestroy = B->ops->destroy;
652: B->ops->destroy = MatDestroy_Pastix;
653: B->spptr = (void*)pastix;
655: *F = B;
656: return(0);
657: }
663: PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
664: {
665: Mat B;
667: Mat_Pastix *pastix;
670: if (ftype != MAT_FACTOR_CHOLESKY) {
671: SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
672: }
673: /* Create the factorization matrix */
674: MatCreate(((PetscObject)A)->comm,&B);
675: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
676: MatSetType(B,((PetscObject)A)->type_name);
677: MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);
678: MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);
680: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
681: B->ops->view = MatView_PaStiX;
682: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix",MatFactorGetSolverPackage_pastix);
684: B->factor = MAT_FACTOR_CHOLESKY;
686: PetscNewLog(B,Mat_Pastix,&pastix);
687: pastix->CleanUpPastix = PETSC_FALSE;
688: pastix->isAIJ = PETSC_TRUE;
689: pastix->scat_rhs = PETSC_NULL;
690: pastix->scat_sol = PETSC_NULL;
691: pastix->MatDestroy = B->ops->destroy;
692: B->ops->destroy = MatDestroy_Pastix;
693: B->spptr = (void*)pastix;
695: *F = B;
696: return(0);
697: }
703: PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
704: {
705: Mat B;
707: Mat_Pastix *pastix;
708:
710: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
712: /* Create the factorization matrix */
713: MatCreate(((PetscObject)A)->comm,&B);
714: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
715: MatSetType(B,((PetscObject)A)->type_name);
716: MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);
717: MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);
719: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
720: B->ops->view = MatView_PaStiX;
721: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix",MatFactorGetSolverPackage_pastix);
722: B->factor = MAT_FACTOR_CHOLESKY;
724: PetscNewLog(B,Mat_Pastix,&pastix);
725: pastix->CleanUpPastix = PETSC_FALSE;
726: pastix->isAIJ = PETSC_TRUE;
727: pastix->scat_rhs = PETSC_NULL;
728: pastix->scat_sol = PETSC_NULL;
729: pastix->MatDestroy = B->ops->destroy;
730: B->ops->destroy = MatDestroy_Pastix;
731: B->spptr = (void*)pastix;
733: *F = B;
734: return(0);
735: }