Actual source code: dsnep.c
slepc-3.12.0 2019-09-30
1: /*
2: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
3: SLEPc - Scalable Library for Eigenvalue Problem Computations
4: Copyright (c) 2002-2019, Universitat Politecnica de Valencia, Spain
6: This file is part of SLEPc.
7: SLEPc is distributed under a 2-clause BSD license (see LICENSE).
8: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9: */
11: #include <slepc/private/dsimpl.h> /*I "slepcds.h" I*/
12: #include <slepcblaslapack.h>
14: typedef struct {
15: PetscInt nf; /* number of functions in f[] */
16: FN f[DS_NUM_EXTRA]; /* functions defining the nonlinear operator */
17: PetscInt neig; /* number of available eigenpairs */
18: void *computematrixctx;
19: PetscErrorCode (*computematrix)(DS,PetscScalar,PetscBool,DSMatType,void*);
20: } DS_NEP;
22: /*
23: DSNEPComputeMatrix - Build the matrix associated with a nonlinear operator
24: T(lambda) or its derivative T'(lambda), given the parameter lambda, where
25: T(lambda) = sum_i E_i*f_i(lambda). The result is written in mat.
26: */
27: static PetscErrorCode DSNEPComputeMatrix(DS ds,PetscScalar lambda,PetscBool deriv,DSMatType mat)
28: {
30: DS_NEP *ctx = (DS_NEP*)ds->data;
31: PetscScalar *T,*E,alpha;
32: PetscInt i,ld,n;
33: PetscBLASInt k,inc=1;
36: PetscLogEventBegin(DS_Other,ds,0,0,0);
37: if (ctx->computematrix) {
38: (*ctx->computematrix)(ds,lambda,deriv,mat,ctx->computematrixctx);
39: } else {
40: DSGetDimensions(ds,&n,NULL,NULL,NULL,NULL);
41: DSGetLeadingDimension(ds,&ld);
42: PetscBLASIntCast(ld*n,&k);
43: DSGetArray(ds,mat,&T);
44: PetscArrayzero(T,k);
45: for (i=0;i<ctx->nf;i++) {
46: if (deriv) {
47: FNEvaluateDerivative(ctx->f[i],lambda,&alpha);
48: } else {
49: FNEvaluateFunction(ctx->f[i],lambda,&alpha);
50: }
51: E = ds->mat[DSMatExtra[i]];
52: PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&k,&alpha,E,&inc,T,&inc));
53: }
54: DSRestoreArray(ds,mat,&T);
55: }
56: PetscLogEventEnd(DS_Other,ds,0,0,0);
57: return(0);
58: }
60: PetscErrorCode DSAllocate_NEP(DS ds,PetscInt ld)
61: {
63: DS_NEP *ctx = (DS_NEP*)ds->data;
64: PetscInt i;
67: if (!ctx->nf) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_WRONGSTATE,"DSNEP requires passing some functions via DSSetFN()");
68: DSAllocateMat_Private(ds,DS_MAT_X);
69: for (i=0;i<ctx->nf;i++) {
70: DSAllocateMat_Private(ds,DSMatExtra[i]);
71: }
72: PetscFree(ds->perm);
73: PetscMalloc1(ld,&ds->perm);
74: PetscLogObjectMemory((PetscObject)ds,ld*sizeof(PetscInt));
75: return(0);
76: }
78: PetscErrorCode DSView_NEP(DS ds,PetscViewer viewer)
79: {
80: PetscErrorCode ierr;
81: DS_NEP *ctx = (DS_NEP*)ds->data;
82: PetscViewerFormat format;
83: PetscInt i;
86: PetscViewerGetFormat(viewer,&format);
87: PetscViewerASCIIPrintf(viewer,"number of functions: %D\n",ctx->nf);
88: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) return(0);
89: for (i=0;i<ctx->nf;i++) {
90: FNView(ctx->f[i],viewer);
91: DSViewMat(ds,viewer,DSMatExtra[i]);
92: }
93: if (ds->state>DS_STATE_INTERMEDIATE) { DSViewMat(ds,viewer,DS_MAT_X); }
94: return(0);
95: }
97: PetscErrorCode DSVectors_NEP(DS ds,DSMatType mat,PetscInt *j,PetscReal *rnorm)
98: {
100: if (rnorm) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_SUP,"Not implemented yet");
101: switch (mat) {
102: case DS_MAT_X:
103: break;
104: case DS_MAT_Y:
105: SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_SUP,"Not implemented yet");
106: break;
107: default:
108: SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Invalid mat parameter");
109: }
110: return(0);
111: }
113: PetscErrorCode DSSort_NEP(DS ds,PetscScalar *wr,PetscScalar *wi,PetscScalar *rr,PetscScalar *ri,PetscInt *dummy)
114: {
116: DS_NEP *ctx = (DS_NEP*)ds->data;
117: PetscInt n,l,i,j,k,p,*perm,told,ld=ds->ld;
118: PetscScalar *A,*X,rtmp;
121: if (!ds->sc) return(0);
122: n = ds->n;
123: l = ds->l;
124: A = ds->mat[DS_MAT_A];
125: perm = ds->perm;
126: for (i=0;i<ctx->neig;i++) perm[i] = i;
127: told = ds->t;
128: ds->t = ctx->neig; /* force the sorting routines to consider ctx->neig eigenvalues */
129: if (rr) {
130: DSSortEigenvalues_Private(ds,rr,ri,perm,PETSC_FALSE);
131: } else {
132: DSSortEigenvalues_Private(ds,wr,NULL,perm,PETSC_FALSE);
133: }
134: ds->t = told; /* restore value of t */
135: for (i=l;i<n;i++) A[i+i*ld] = wr[perm[i]];
136: for (i=l;i<n;i++) wr[i] = A[i+i*ld];
137: /* cannot use DSPermuteColumns_Private() since not all columns are filled */
138: X = ds->mat[DS_MAT_X];
139: for (i=0;i<ctx->neig;i++) {
140: p = perm[i];
141: if (p != i) {
142: j = i + 1;
143: while (perm[j] != i) j++;
144: perm[j] = p; perm[i] = i;
145: /* swap columns i and j */
146: for (k=0;k<n;k++) {
147: rtmp = X[k+p*ld]; X[k+p*ld] = X[k+i*ld]; X[k+i*ld] = rtmp;
148: }
149: }
150: }
151: return(0);
152: }
154: PetscErrorCode DSSolve_NEP_SLP(DS ds,PetscScalar *wr,PetscScalar *wi)
155: {
156: #if defined(SLEPC_MISSING_LAPACK_GGEV)
158: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"GGEV - Lapack routine is unavailable");
159: #else
161: DS_NEP *ctx = (DS_NEP*)ds->data;
162: PetscScalar *A,*B,*W,*X,*work,*alpha,*beta;
163: PetscScalar norm,sigma,lambda,mu,re,re2,sone=1.0,zero=0.0;
164: PetscBLASInt info,n,ld,lrwork=0,lwork,one=1;
165: PetscInt it,pos,j,maxit=100,result;
166: PetscReal tol;
167: #if defined(PETSC_USE_COMPLEX)
168: PetscReal *rwork;
169: #else
170: PetscReal *alphai,im,im2;
171: #endif
174: if (!ds->mat[DS_MAT_A]) {
175: DSAllocateMat_Private(ds,DS_MAT_A);
176: }
177: if (!ds->mat[DS_MAT_B]) {
178: DSAllocateMat_Private(ds,DS_MAT_B);
179: }
180: if (!ds->mat[DS_MAT_W]) {
181: DSAllocateMat_Private(ds,DS_MAT_W);
182: }
183: PetscBLASIntCast(ds->n,&n);
184: PetscBLASIntCast(ds->ld,&ld);
185: #if defined(PETSC_USE_COMPLEX)
186: PetscBLASIntCast(2*ds->n+2*ds->n,&lwork);
187: PetscBLASIntCast(8*ds->n,&lrwork);
188: #else
189: PetscBLASIntCast(3*ds->n+8*ds->n,&lwork);
190: #endif
191: DSAllocateWork_Private(ds,lwork,lrwork,0);
192: alpha = ds->work;
193: beta = ds->work + ds->n;
194: #if defined(PETSC_USE_COMPLEX)
195: work = ds->work + 2*ds->n;
196: lwork -= 2*ds->n;
197: #else
198: alphai = ds->work + 2*ds->n;
199: work = ds->work + 3*ds->n;
200: lwork -= 3*ds->n;
201: #endif
202: A = ds->mat[DS_MAT_A];
203: B = ds->mat[DS_MAT_B];
204: W = ds->mat[DS_MAT_W];
205: X = ds->mat[DS_MAT_X];
207: sigma = 0.0;
208: if (ds->sc->comparison==SlepcCompareTargetMagnitude || ds->sc->comparison==SlepcCompareTargetReal) sigma = *(PetscScalar*)ds->sc->comparisonctx;
209: lambda = sigma;
210: tol = 1000*n*PETSC_MACHINE_EPSILON;
212: for (it=0;it<maxit;it++) {
214: /* evaluate T and T' */
215: DSNEPComputeMatrix(ds,lambda,PETSC_FALSE,DS_MAT_A);
216: if (it) {
217: PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&n,&n,&sone,A,&ld,X,&one,&zero,X+ld,&one));
218: norm = BLASnrm2_(&n,X+ld,&one);
219: if (PetscRealPart(norm)/PetscAbsScalar(lambda)<=tol) break;
220: }
221: DSNEPComputeMatrix(ds,lambda,PETSC_TRUE,DS_MAT_B);
223: /* compute eigenvalue correction mu and eigenvector u */
224: #if defined(PETSC_USE_COMPLEX)
225: rwork = ds->rwork;
226: PetscStackCallBLAS("LAPACKggev",LAPACKggev_("N","V",&n,A,&ld,B,&ld,alpha,beta,NULL,&ld,W,&ld,work,&lwork,rwork,&info));
227: #else
228: PetscStackCallBLAS("LAPACKggev",LAPACKggev_("N","V",&n,A,&ld,B,&ld,alpha,alphai,beta,NULL,&ld,W,&ld,work,&lwork,&info));
229: #endif
230: SlepcCheckLapackInfo("ggev",info);
232: /* find smallest eigenvalue */
233: j = 0;
234: if (beta[j]==0.0) re = (PetscRealPart(alpha[j])>0.0)? PETSC_MAX_REAL: PETSC_MIN_REAL;
235: else re = alpha[j]/beta[j];
236: #if !defined(PETSC_USE_COMPLEX)
237: if (beta[j]==0.0) im = (alphai[j]>0.0)? PETSC_MAX_REAL: PETSC_MIN_REAL;
238: else im = alphai[j]/beta[j];
239: #endif
240: pos = 0;
241: for (j=1;j<n;j++) {
242: if (beta[j]==0.0) re2 = (PetscRealPart(alpha[j])>0.0)? PETSC_MAX_REAL: PETSC_MIN_REAL;
243: else re2 = alpha[j]/beta[j];
244: #if !defined(PETSC_USE_COMPLEX)
245: if (beta[j]==0.0) im2 = (alphai[j]>0.0)? PETSC_MAX_REAL: PETSC_MIN_REAL;
246: else im2 = alphai[j]/beta[j];
247: SlepcCompareSmallestMagnitude(re,im,re2,im2,&result,NULL);
248: #else
249: SlepcCompareSmallestMagnitude(re,0.0,re2,0.0,&result,NULL);
250: #endif
251: if (result > 0) {
252: re = re2;
253: #if !defined(PETSC_USE_COMPLEX)
254: im = im2;
255: #endif
256: pos = j;
257: }
258: }
260: #if !defined(PETSC_USE_COMPLEX)
261: if (im!=0.0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"DSNEP found a complex eigenvalue; try rerunning with complex scalars");
262: #endif
263: mu = alpha[pos]/beta[pos];
264: PetscArraycpy(X,W+pos*ld,n);
265: norm = BLASnrm2_(&n,X,&one);
266: norm = 1.0/norm;
267: PetscStackCallBLAS("BLASscal",BLASscal_(&n,&norm,X,&one));
269: /* correct eigenvalue approximation */
270: lambda = lambda - mu;
271: }
273: if (it==maxit) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_CONV_FAILED,"DSNEP did not converge");
274: ctx->neig = 1;
275: wr[0] = lambda;
276: if (wi) wi[0] = 0.0;
277: return(0);
278: #endif
279: }
281: PetscErrorCode DSSynchronize_NEP(DS ds,PetscScalar eigr[],PetscScalar eigi[])
282: {
284: PetscInt k=0;
285: PetscMPIInt n,rank,size,off=0;
288: if (ds->state>=DS_STATE_CONDENSED) k += ds->n;
289: if (eigr) k += 1;
290: if (eigi) k += 1;
291: DSAllocateWork_Private(ds,k,0,0);
292: PetscMPIIntCast(k*sizeof(PetscScalar),&size);
293: PetscMPIIntCast(ds->n,&n);
294: MPI_Comm_rank(PetscObjectComm((PetscObject)ds),&rank);
295: if (!rank) {
296: if (ds->state>=DS_STATE_CONDENSED) {
297: MPI_Pack(ds->mat[DS_MAT_X],n,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds));
298: }
299: if (eigr) {
300: MPI_Pack(eigr,1,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds));
301: }
302: if (eigi) {
303: MPI_Pack(eigi,1,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds));
304: }
305: }
306: MPI_Bcast(ds->work,size,MPI_BYTE,0,PetscObjectComm((PetscObject)ds));
307: if (rank) {
308: if (ds->state>=DS_STATE_CONDENSED) {
309: MPI_Unpack(ds->work,size,&off,ds->mat[DS_MAT_X],n,MPIU_SCALAR,PetscObjectComm((PetscObject)ds));
310: }
311: if (eigr) {
312: MPI_Unpack(ds->work,size,&off,eigr,1,MPIU_SCALAR,PetscObjectComm((PetscObject)ds));
313: }
314: if (eigi) {
315: MPI_Unpack(ds->work,size,&off,eigi,1,MPIU_SCALAR,PetscObjectComm((PetscObject)ds));
316: }
317: }
318: return(0);
319: }
321: static PetscErrorCode DSNEPSetFN_NEP(DS ds,PetscInt n,FN fn[])
322: {
324: DS_NEP *ctx = (DS_NEP*)ds->data;
325: PetscInt i;
328: if (n<=0) SETERRQ1(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Must have one or more functions, you have %D",n);
329: if (n>DS_NUM_EXTRA) SETERRQ2(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Too many functions, you specified %D but the limit is %D",n,DS_NUM_EXTRA);
330: if (ds->ld) { PetscInfo(ds,"DSNEPSetFN() called after DSAllocate()\n"); }
331: for (i=0;i<n;i++) {
332: PetscObjectReference((PetscObject)fn[i]);
333: }
334: for (i=0;i<ctx->nf;i++) {
335: FNDestroy(&ctx->f[i]);
336: }
337: for (i=0;i<n;i++) ctx->f[i] = fn[i];
338: ctx->nf = n;
339: return(0);
340: }
342: /*@
343: DSNEPSetFN - Sets a number of functions that define the nonlinear
344: eigenproblem.
346: Collective on ds
348: Input Parameters:
349: + ds - the direct solver context
350: . n - number of functions
351: - fn - array of functions
353: Notes:
354: The nonlinear eigenproblem is defined in terms of the split nonlinear
355: operator T(lambda) = sum_i A_i*f_i(lambda).
357: This function must be called before DSAllocate(). Then DSAllocate()
358: will allocate an extra matrix A_i per each function, that can be
359: filled in the usual way.
361: Level: advanced
363: .seealso: DSNEPGetFN(), DSAllocate()
364: @*/
365: PetscErrorCode DSNEPSetFN(DS ds,PetscInt n,FN fn[])
366: {
367: PetscInt i;
374: for (i=0;i<n;i++) {
377: }
378: PetscTryMethod(ds,"DSNEPSetFN_C",(DS,PetscInt,FN[]),(ds,n,fn));
379: return(0);
380: }
382: static PetscErrorCode DSNEPGetFN_NEP(DS ds,PetscInt k,FN *fn)
383: {
384: DS_NEP *ctx = (DS_NEP*)ds->data;
387: if (k<0 || k>=ctx->nf) SETERRQ1(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"k must be between 0 and %D",ctx->nf-1);
388: *fn = ctx->f[k];
389: return(0);
390: }
392: /*@
393: DSNEPGetFN - Gets the functions associated with the nonlinear DS.
395: Not collective, though parallel FNs are returned if the DS is parallel
397: Input Parameter:
398: + ds - the direct solver context
399: - k - the index of the requested function (starting in 0)
401: Output Parameter:
402: . fn - the function
404: Level: advanced
406: .seealso: DSNEPSetFN()
407: @*/
408: PetscErrorCode DSNEPGetFN(DS ds,PetscInt k,FN *fn)
409: {
415: PetscUseMethod(ds,"DSNEPGetFN_C",(DS,PetscInt,FN*),(ds,k,fn));
416: return(0);
417: }
419: static PetscErrorCode DSNEPGetNumFN_NEP(DS ds,PetscInt *n)
420: {
421: DS_NEP *ctx = (DS_NEP*)ds->data;
424: *n = ctx->nf;
425: return(0);
426: }
428: /*@
429: DSNEPGetNumFN - Returns the number of functions stored internally by
430: the DS.
432: Not collective
434: Input Parameter:
435: . ds - the direct solver context
437: Output Parameters:
438: . n - the number of functions passed in DSNEPSetFN()
440: Level: advanced
442: .seealso: DSNEPSetFN()
443: @*/
444: PetscErrorCode DSNEPGetNumFN(DS ds,PetscInt *n)
445: {
451: PetscUseMethod(ds,"DSNEPGetNumFN_C",(DS,PetscInt*),(ds,n));
452: return(0);
453: }
455: static PetscErrorCode DSNEPSetComputeMatrixFunction_NEP(DS ds,PetscErrorCode (*fun)(DS,PetscScalar,PetscBool,DSMatType,void*),void *ctx)
456: {
457: DS_NEP *dsctx = (DS_NEP*)ds->data;
460: dsctx->computematrix = fun;
461: dsctx->computematrixctx = ctx;
462: return(0);
463: }
465: /*@
466: DSNEPSetComputeMatrixFunction - Sets a user-provided subroutine to compute
467: the matrices T(lambda) or T'(lambda).
469: Logically Collective on ds
471: Input Parameters:
472: + ds - the direct solver context
473: . fun - a pointer to the user function
474: - ctx - a context pointer (the last parameter to the user function)
476: Calling Sequence of fun:
477: $ fun(DS ds,PetscScalar lambda,PetscBool deriv,DSMatType mat,void *ctx)
479: + ds - the direct solver object
480: . lambda - point where T(lambda) or T'(lambda) must be evaluated
481: . deriv - if true compute T'(lambda), otherwise compute T(lambda)
482: . mat - the DS matrix where the result must be stored
483: - ctx - optional context, as set by DSNEPSetComputeMatrixFunction()
485: Note:
486: The result is computed as T(lambda) = sum_i E_i*f_i(lambda), and similarly
487: for the derivative.
489: Level: developer
490: @*/
491: PetscErrorCode DSNEPSetComputeMatrixFunction(DS ds,PetscErrorCode (*fun)(DS,PetscScalar,PetscBool,DSMatType,void*),void *ctx)
492: {
497: PetscTryMethod(ds,"DSNEPSetComputeMatrixFunction_C",(DS,PetscErrorCode (*)(DS,PetscScalar,PetscBool,DSMatType,void*),void*),(ds,fun,ctx));
498: return(0);
499: }
501: PetscErrorCode DSDestroy_NEP(DS ds)
502: {
504: DS_NEP *ctx = (DS_NEP*)ds->data;
505: PetscInt i;
508: for (i=0;i<ctx->nf;i++) {
509: FNDestroy(&ctx->f[i]);
510: }
511: PetscFree(ds->data);
512: PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetFN_C",NULL);
513: PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetFN_C",NULL);
514: PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetNumFN_C",NULL);
515: PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetComputeMatrixFunction_C",NULL);
516: return(0);
517: }
519: SLEPC_EXTERN PetscErrorCode DSCreate_NEP(DS ds)
520: {
521: DS_NEP *ctx;
525: PetscNewLog(ds,&ctx);
526: ds->data = (void*)ctx;
528: ds->ops->allocate = DSAllocate_NEP;
529: ds->ops->view = DSView_NEP;
530: ds->ops->vectors = DSVectors_NEP;
531: ds->ops->solve[0] = DSSolve_NEP_SLP;
532: ds->ops->sort = DSSort_NEP;
533: ds->ops->synchronize = DSSynchronize_NEP;
534: ds->ops->destroy = DSDestroy_NEP;
535: PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetFN_C",DSNEPSetFN_NEP);
536: PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetFN_C",DSNEPGetFN_NEP);
537: PetscObjectComposeFunction((PetscObject)ds,"DSNEPGetNumFN_C",DSNEPGetNumFN_NEP);
538: PetscObjectComposeFunction((PetscObject)ds,"DSNEPSetComputeMatrixFunction_C",DSNEPSetComputeMatrixFunction_NEP);
539: return(0);
540: }