320 $ LDQ, VL, VU, IL, IU, ABSTOL, M, W, Z,
321 $ LDZ, WORK, LWORK, IWORK, IFAIL, INFO )
330 CHARACTER JOBZ, RANGE, UPLO
331 INTEGER IL, INFO, , KD, LDAB, LDQ, LDZ, M, N, LWORK
332 DOUBLE PRECISION ABSTOL, VL, VU
335 INTEGER IFAIL( * ), IWORK( * )
336 DOUBLE PRECISION AB( LDAB, * ), Q( LDQ, * ), W( * ), WORK( * ),
343 DOUBLE PRECISION ZERO,
344 PARAMETER ( ZERO = 0.0d0, one = 1.0d0 )
347 LOGICAL ALLEIG, INDEIG, LOWER, TEST, VALEIG, WANTZ,
350 INTEGER I, IINFO, IMAX, INDD, INDE, INDEE, INDIBL,
351 $ indisp, indiwo, indwrk, iscale, itmp1, j, jj,
352 $ llwork, lwmin, lhtrd, lwtrd, ib, indhous,
354 DOUBLE PRECISION ABSTLL, ANRM, BIGNUM, EPS, RMAX, RMIN, SAFMIN,
355 $ SIGMA, SMLNUM, , VLL, VUU
360 DOUBLE PRECISION DLAMCH, DLANSB
361 EXTERNAL lsame, dlamch, dlansb, ilaenv2stage
375 wantz = lsame( jobz,
'V' )
376 alleig = lsame( range,
'A' )
377 valeig = lsame( range,
'V' )
378 indeig = lsame( range,
'I' )
379 lower = lsame( uplo,
'L' )
380 lquery = ( lwork.EQ.-1 )
383 IF( .NOT.( lsame( jobz,
'N' ) ) )
THEN
385 ELSE IF( .NOT.( alleig .OR. valeig .OR. indeig ) )
THEN
387 ELSE IF( .NOT.( lower .OR. lsame( uplo,
'U' ) ) )
THEN
389 ELSE IF( n.LT.0 )
THEN
391 ELSE IF( kd.LT.0 )
THEN
393 ELSE IF( ldab.LT.kd+1 )
THEN
395 ELSE IF( wantz .AND. ldq.LT.
max( 1, n ) )
THEN
399 IF( n.GT.0 .AND. vu.LE.vl )
401 ELSE IF( indeig )
THEN
402 IF( il.LT.1 .OR. il.GT.
max( 1, n ) )
THEN
404 ELSE IF( iu.LT.
min( n, il ) .OR. iu.GT.n )
THEN
410 IF( ldz.LT.1 .OR. ( wantz .AND. ldz.LT.n ) )
419 ib = ilaenv2stage( 2,
'DSYTRD_SB2ST', jobz,
421 lhtrd = ilaenv2stage( 3,
'DSYTRD_SB2ST', jobz,
423 lwtrd = ilaenv2stage( 4,
'DSYTRD_SB2ST', jobz,
425 lwmin = 2*n + lhtrd + lwtrd
429 IF( lwork.LT.lwmin .AND. .NOT.lquery )
434 CALL xerbla(
'DSBEVX_2STAGE ', -info )
436 ELSE IF( lquery )
THEN
454 IF( .NOT.( vl.LT.tmp1 .AND. vu.GE.tmp1 ) )
467 safmin = dlamch(
'Safe minimum' )
468 eps = dlamch(
'Precision' )
469 smlnum = safmin / eps
470 bignum = one / smlnum
471 rmin = sqrt( smlnum )
472 rmax =
min( sqrt( bignum ), one / sqrt( sqrt( safmin ) ) )
485 anrm = dlansb(
'M', uplo, n, kd, ab, ldab, work )
486 IF( anrm.GT.zero .AND. anrm.LT.rmin )
THEN
489 ELSE IF( anrm.GT.rmax )
THEN
493 IF( iscale.EQ.1 )
THEN
495 CALL dlascl(
'B', kd, kd, one, sigma, n, n, ab, ldab, info )
497 CALL dlascl(
'Q', kd, kd, one, sigma, n, n, ab, ldab, info )
500 $ abstll = abstol*sigma
512 indwrk = indhous + lhtrd
513 llwork = lwork - indwrk + 1
515 CALL dsytrd_sb2st(
"N", jobz, uplo, n, kd, ab, ldab, work( indd ),
516 $ work( inde ), work( indhous ), lhtrd,
517 $ work( indwrk ), llwork, iinfo )
525 IF (il.EQ.1 .AND. iu.EQ.n)
THEN
529 IF ((alleig .OR. test) .AND. (abstol.LE.zero))
THEN
530 CALL dcopy( n, work( indd ), 1, w, 1 )
532 IF( .NOT.wantz )
THEN
533 CALL dcopy( n-1, work( inde ), 1, work( indee ), 1 )
534 CALL dsterf( n, w, work( indee ), info )
536 CALL dlacpy(
'A', n, n, q, ldq, z, ldz )
537 CALL dcopy( n-1, work( inde ), 1, work( indee ), 1 )
538 CALL dsteqr( jobz, n, w, work( indee ), z, ldz,
539 $ work( indwrk ), info )
563 CALL dstebz( range, order, n, vll, vuu, il, iu, abstll,
564 $ work( indd ), work( inde ), m, nsplit, w,
565 $ iwork( indibl ), iwork( indisp ), work( indwrk ),
566 $ iwork( indiwo ), info )
569 CALL dstein( n, work( indd ), work( inde ), m, w,
570 $ iwork( indibl ), iwork( indisp ), z, ldz,
571 $ work( indwrk ), iwork( indiwo ), ifail, info )
577 CALL dcopy( n, z( 1, j ), 1, work( 1 ), 1 )
578 CALL dgemv(
'N', n, n, one, q, ldq, work, 1, zero,
586 IF( iscale.EQ.1 )
THEN
592 CALL dscal( imax, one / sigma, w, 1 )
603 IF( w( jj ).LT.tmp1 )
THEN
610 itmp1 = iwork( indibl+i-1 )
612 iwork( indibl+i-1 ) = iwork( indibl+j-1 )
614 iwork( indibl+j-1 ) = itmp1
615 CALL dswap( n, z( 1, i ), 1, z( 1, j ), 1 )
618 ifail( i ) = ifail( j )
subroutine dsbevx_2stage(jobz, range, uplo, n, kd, ab, ldab, q, ldq, vl, vu, il, iu, abstol, m, w, z, ldz, work, lwork, iwork, ifail, info)
DSBEVX_2STAGE computes the eigenvalues and, optionally, the left and/or right eigenvectors for OTHER ...