322 $ Q, LDQ, VL, VU, IL, IU, ABSTOL, M, W,
323 $ Z, LDZ, WORK, LWORK, RWORK, IWORK,
333 CHARACTER JOBZ, RANGE, UPLO
334 INTEGER IL, INFO, IU, KD, LDAB, LDQ, LDZ, M, N, LWORK
335 DOUBLE PRECISION ABSTOL, VL, VU
338 INTEGER IFAIL( * ), IWORK( * )
339 DOUBLE PRECISION RWORK( * ), W( * )
340 COMPLEX*16 AB( LDAB, * ), Q( LDQ, * ), WORK( * ),
347 DOUBLE PRECISION ZERO, ONE
348 PARAMETER ( ZERO = 0.0d0, one = 1.0d0 )
349 COMPLEX*16 CZERO, CONE
350 parameter( czero = ( 0.0d0, 0.0d0 ),
351 $ cone = ( 1.0d0, 0.0d0 ) )
354 LOGICAL ALLEIG, INDEIG, LOWER, TEST, VALEIG, WANTZ,
357 INTEGER I, IINFO, IMAX, INDD, INDE, INDEE, INDIBL,
358 $ INDISP, INDIWK, INDRWK, INDWRK, ISCALE, ITMP1,
359 $ llwork, lwmin, lhtrd, lwtrd, ib, indhous,
361 DOUBLE PRECISION ABSTLL, ANRM, BIGNUM, EPS, RMAX, RMIN, SAFMIN,
362 $ SIGMA, SMLNUM, TMP1, VLL, VUU
368 DOUBLE PRECISION DLAMCH, ZLANHB
369 EXTERNAL lsame, dlamch, zlanhb, ilaenv2stage
378 INTRINSIC dble, max, min, sqrt
384 wantz = lsame( jobz,
'V' )
385 alleig = lsame( range,
'A' )
386 valeig = lsame( range,
'V' )
387 indeig = lsame( range,
'I' )
388 lower = lsame( uplo,
'L' )
389 lquery = ( lwork.EQ.-1 )
392 IF( .NOT.( lsame( jobz,
'N' ) ) )
THEN
394 ELSE IF( .NOT.( alleig .OR. valeig .OR. indeig ) )
THEN
396 ELSE IF( .NOT.( lower .OR. lsame( uplo,
'U' ) ) )
THEN
398 ELSE IF( n.LT.0 )
THEN
400 ELSE IF( kd.LT.0 )
THEN
402 ELSE IF( ldab.LT.kd+1 )
THEN
404 ELSE IF( wantz .AND. ldq.LT.max( 1, n ) )
THEN
408 IF( n.GT.0 .AND. vu.LE.vl )
410 ELSE IF( indeig )
THEN
411 IF( il.LT.1 .OR. il.GT.max( 1, n ) )
THEN
413 ELSE IF( iu.LT.min( n, il ) .OR. iu.GT.n )
THEN
419 IF( ldz.LT.1 .OR. ( wantz .AND. ldz.LT.n ) )
428 ib = ilaenv2stage( 2,
'ZHETRD_HB2ST', jobz,
430 lhtrd = ilaenv2stage( 3,
'ZHETRD_HB2ST', jobz,
432 lwtrd = ilaenv2stage( 4,
'ZHETRD_HB2ST', jobz,
434 lwmin = lhtrd + lwtrd
438 IF( lwork.LT.lwmin .AND. .NOT.lquery )
443 CALL xerbla(
'ZHBEVX_2STAGE', -info )
445 ELSE IF( lquery )
THEN
460 ctmp1 = ab( kd+1, 1 )
464 IF( .NOT.( vl.LT.tmp1 .AND. vu.GE.tmp1 ) )
468 w( 1 ) = dble( ctmp1 )
477 safmin = dlamch(
'Safe minimum' )
478 eps = dlamch(
'Precision' )
479 smlnum = safmin / eps
480 bignum = one / smlnum
481 rmin = sqrt( smlnum )
482 rmax = min( sqrt( bignum ), one / sqrt( sqrt( safmin ) ) )
495 anrm = zlanhb(
'M', uplo, n, kd, ab, ldab, rwork )
496 IF( anrm.GT.zero .AND. anrm.LT.rmin )
THEN
499 ELSE IF( anrm.GT.rmax )
THEN
503 IF( iscale.EQ.1 )
THEN
505 CALL zlascl(
'B', kd, kd, one, sigma, n, n, ab, ldab,
508 CALL zlascl(
'Q', kd, kd, one, sigma, n, n, ab, ldab,
512 $ abstll = abstol*sigma
526 indwrk = indhous + lhtrd
527 llwork = lwork - indwrk + 1
530 $ rwork( indd ), rwork( inde ), work( indhous ),
531 $ lhtrd, work( indwrk ), llwork, iinfo )
539 IF (il.EQ.1 .AND. iu.EQ.n)
THEN
543 IF ((alleig .OR. test) .AND. (abstol.LE.zero))
THEN
544 CALL dcopy( n, rwork( indd ), 1, w, 1 )
546 IF( .NOT.wantz )
THEN
547 CALL dcopy( n-1, rwork( inde ), 1, rwork( indee ), 1 )
548 CALL dsterf( n, w, rwork( indee ), info )
550 CALL zlacpy(
'A', n, n, q, ldq, z, ldz )
551 CALL dcopy( n-1, rwork( inde ), 1, rwork( indee ), 1 )
552 CALL zsteqr( jobz, n, w, rwork( indee ), z, ldz,
553 $ rwork( indrwk ), info )
577 CALL dstebz( range, order, n, vll, vuu, il, iu, abstll,
578 $ rwork( indd ), rwork( inde ), m, nsplit, w,
579 $ iwork( indibl ), iwork( indisp ), rwork( indrwk ),
580 $ iwork( indiwk ), info )
583 CALL zstein( n, rwork( indd ), rwork( inde ), m, w,
584 $ iwork( indibl ), iwork( indisp ), z, ldz,
585 $ rwork( indrwk ), iwork( indiwk ), ifail, info )
591 CALL zcopy( n, z( 1, j ), 1, work( 1 ), 1 )
592 CALL zgemv(
'N', n, n, cone, q, ldq, work, 1, czero,
600 IF( iscale.EQ.1 )
THEN
606 CALL dscal( imax, one / sigma, w, 1 )
617 IF( w( jj ).LT.tmp1 )
THEN
624 itmp1 = iwork( indibl+i-1 )
626 iwork( indibl+i-1 ) = iwork( indibl+j-1 )
628 iwork( indibl+j-1 ) = itmp1
629 CALL zswap( n, z( 1, i ), 1, z( 1, j ), 1 )
632 ifail( i ) = ifail( j )
subroutine zhbevx_2stage(jobz, range, uplo, n, kd, ab, ldab, q, ldq, vl, vu, il, iu, abstol, m, w, z, ldz, work, lwork, rwork, iwork, ifail, info)
ZHBEVX_2STAGE computes the eigenvalues and, optionally, the left and/or right eigenvectors for OTHER ...