⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 pseudosqrt.cpp

📁 有很多的函数库
💻 CPP
📖 第 1 页 / 共 2 页
字号:
                }
            }
            return result;
        }

        // Matrix infinity norm. See Golub and van Loan (2.3.10) or
        // <http://en.wikipedia.org/wiki/Matrix_norm>
        Real normInf(const Matrix& M) {
            Size rows = M.rows();
            Size cols = M.columns();
            Real norm = 0.0;
            for (Size i=0; i<rows; ++i) {
                Real colSum = 0.0;
                for (Size j=0; j<cols; ++j)
                    colSum += std::fabs(M[i][j]);
                norm = std::max(norm, colSum);
            }
            return norm;
        }

        // Take a matrix and make all the diagonal entries 1.
        const Disposable <Matrix>
        projectToUnitDiagonalMatrix(const Matrix& M) {
            Size size = M.rows();
            QL_REQUIRE(size == M.columns(),
                       "matrix not square");

            Matrix result(M);
            for (Size i=0; i<size; ++i)
                result[i][i] = 1.0;

            return result;
        }

        // Take a matrix and make all the eigenvalues non-negative
        const Disposable <Matrix>
        projectToPositiveSemidefiniteMatrix(Matrix& M) {
            Size size = M.rows();
            QL_REQUIRE(size == M.columns(),
                       "matrix not square");

            Matrix diagonal(size, size, 0.0);
            SymmetricSchurDecomposition jd(M);
            for (Size i=0; i<size; ++i)
                diagonal[i][i] = std::max<Real>(jd.eigenvalues()[i], 0.0);

            Matrix result =
                jd.eigenvectors()*diagonal*transpose(jd.eigenvectors());
            return result;
        }

        // implementation of the Higham algorithm to find the nearest
        // correlation matrix.
        const Disposable <Matrix>
        highamImplementation(const Matrix& A,
                             const Size maxIterations,
                             const Real& tolerance) {

            Size size = A.rows();
            Matrix R, Y(A), X(A), deltaS(size, size, 0.0);

            Matrix lastX(X);
            Matrix lastY(Y);

            for (Size i=0; i<maxIterations; ++i) {
                R = Y - deltaS;
                X = projectToPositiveSemidefiniteMatrix(R);
                deltaS = X - R;
                Y = projectToUnitDiagonalMatrix(X);

                // convergence test
                if (std::max(normInf(X-lastX)/normInf(X),
                        std::max(normInf(Y-lastY)/normInf(Y),
                                normInf(Y-X)/normInf(Y)))
                        <= tolerance)
                {
                    break;
                }
                lastX = X;
                lastY = Y;
            }

            // ensure we return a symmetric matrix
            for (Size i=0; i<size; ++i)
                for (Size j=0; j<i; ++j)
                    Y[i][j] = Y[j][i];

            return Y;
        }

    }


    const Disposable<Matrix> pseudoSqrt(const Matrix& matrix,
                                        SalvagingAlgorithm::Type sa) {
        Size size = matrix.rows();

        #if defined(QL_EXTRA_SAFETY_CHECKS)
        checkSymmetry(matrix);
        #else
        QL_REQUIRE(size == matrix.columns(),
                   "non square matrix: " << size << " rows, " <<
                   matrix.columns() << " columns");
        #endif

        // spectral (a.k.a Principal Component) analysis
        SymmetricSchurDecomposition jd(matrix);
        Matrix diagonal(size, size, 0.0);

        // salvaging algorithm
        Matrix result(size, size);
        bool negative;
        switch (sa) {
          case SalvagingAlgorithm::None:
            // eigenvalues are sorted in decreasing order
            QL_REQUIRE(jd.eigenvalues()[size-1]>=-1e-16,
                       "negative eigenvalue(s) ("
                       << std::scientific << jd.eigenvalues()[size-1]
                       << ")");
            result = CholeskyDecomposition(matrix, true);
            break;
          case SalvagingAlgorithm::Spectral:
            // negative eigenvalues set to zero
            for (Size i=0; i<size; i++)
                diagonal[i][i] =
                    std::sqrt(std::max<Real>(jd.eigenvalues()[i], 0.0));

            result = jd.eigenvectors() * diagonal;
            normalizePseudoRoot(matrix, result);
            break;
          case SalvagingAlgorithm::Hypersphere:
            // negative eigenvalues set to zero
            negative=false;
            for (Size i=0; i<size; ++i){
                diagonal[i][i] =
                    std::sqrt(std::max<Real>(jd.eigenvalues()[i], 0.0));
                if (jd.eigenvalues()[i]<0.0) negative=true;
            }
            result = jd.eigenvectors() * diagonal;
            normalizePseudoRoot(matrix, result);

            if (negative)
                result = hypersphereOptimize(matrix, result, false);
            break;
          case SalvagingAlgorithm::LowerDiagonal:
            // negative eigenvalues set to zero
            negative=false;
            for (Size i=0; i<size; ++i){
                diagonal[i][i] =
                    std::sqrt(std::max<Real>(jd.eigenvalues()[i], 0.0));
                if (jd.eigenvalues()[i]<0.0) negative=true;
            }
            result = jd.eigenvectors() * diagonal;

            normalizePseudoRoot(matrix, result);

            if (negative)
                result = hypersphereOptimize(matrix, result, true);
            break;
          case SalvagingAlgorithm::Higham: {
              int maxIterations = 40;
              Real tol = 1e-6;
              result = highamImplementation(matrix, maxIterations, tol);
              result = CholeskyDecomposition(result, true);
            }
            break;
          default:
            QL_FAIL("unknown salvaging algorithm");
        }

        return result;
    }


    const Disposable<Matrix> rankReducedSqrt(const Matrix& matrix,
                                             Size maxRank,
                                             Real componentRetainedPercentage,
                                             SalvagingAlgorithm::Type sa) {
        Size size = matrix.rows();

        #if defined(QL_EXTRA_SAFETY_CHECKS)
        checkSymmetry(matrix);
        #else
        QL_REQUIRE(size == matrix.columns(),
                   "non square matrix: " << size << " rows, " <<
                   matrix.columns() << " columns");
        #endif

        QL_REQUIRE(componentRetainedPercentage>0.0,
                   "no eigenvalues retained");

        QL_REQUIRE(componentRetainedPercentage<=1.0,
                   "percentage to be retained > 100%");

        QL_REQUIRE(maxRank>=1,
                   "max rank required < 1");

        // spectral (a.k.a Principal Component) analysis
        SymmetricSchurDecomposition jd(matrix);
        Array eigenValues = jd.eigenvalues();

        // salvaging algorithm
        switch (sa) {
          case SalvagingAlgorithm::None:
            // eigenvalues are sorted in decreasing order
            QL_REQUIRE(eigenValues[size-1]>=-1e-16,
                       "negative eigenvalue(s) ("
                       << std::scientific << eigenValues[size-1]
                       << ")");
            break;
          case SalvagingAlgorithm::Spectral:
            // negative eigenvalues set to zero
            for (Size i=0; i<size; ++i)
                eigenValues[i] = std::max<Real>(eigenValues[i], 0.0);
            break;
          case SalvagingAlgorithm::Higham:
              {
                  int maxIterations = 40;
                  Real tolerance = 1e-6;
                  Matrix adjustedMatrix = highamImplementation(matrix, maxIterations, tolerance);
                  jd = SymmetricSchurDecomposition(adjustedMatrix);
                  eigenValues = jd.eigenvalues();
              }
              break;
          default:
            QL_FAIL("unknown or invalid salvaging algorithm");
        }

        // factor reduction
        Real enough = componentRetainedPercentage *
                      std::accumulate(eigenValues.begin(),
                                      eigenValues.end(), 0.0);
        if (componentRetainedPercentage == 1.0) {
            // numerical glitches might cause some factors to be discarded
            enough *= 1.1;
        }
        // retain at least one factor
        Real components = eigenValues[0];
        Size retainedFactors = 1;
        for (Size i=1; components<enough && i<size; ++i) {
            components += eigenValues[i];
            retainedFactors++;
        }
        // output is granted to have a rank<=maxRank
        retainedFactors=std::min(retainedFactors, maxRank);

        Matrix diagonal(size, retainedFactors, 0.0);
        for (Size i=0; i<retainedFactors; ++i)
            diagonal[i][i] = std::sqrt(eigenValues[i]);
        Matrix result = jd.eigenvectors() * diagonal;

        normalizePseudoRoot(matrix, result);
        return result;
    }

}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -