/** * @brief Self [co]variance matrix between the test data, Kss(Z, Z) * @param [in] logHyp The log hyperparameters * - logHyp(0) = \f$\log(l)\f$ * - logHyp(1) = \f$\log(\sigma_f)\f$ * @param [in] testData The test data * @param [in] fVarianceVector Flag for the return value * - fVarianceVector = true : return \f$\mathbf{k}_{**} \in \mathbb{R}^{M \times 1}, \mathbf{k}_{**}^i = k(\mathbf{Z}_i, \mathbf{Z}_i)\f$ (default) * - fVarianceVector = false: return \f$\mathbf{K}_{**} = \mathbf{K}(\mathbf{Z}, \mathbf{Z}) \in \mathbb{R}^{M \times M}\f$,\n * which can be used for Bayesian Committee Machines. * @return A matrix pointer\n * - Mx1 (fVarianceVector == true) * - MxM (fVarianceVector == false)\n * M: The number of test data */ static MatrixPtr Kss(const Hyp &logHyp, const TestData<Scalar> &testData, const bool fVarianceVector = true) { // The number of test data const int M = testData.M(); // Some constant values const Scalar sigma_f2 = exp(static_cast<Scalar>(2.0) * logHyp(1)); // sigma_f^2 // Output MatrixPtr pKss; // K: self-variance vector (Mx1) if(fVarianceVector) { // k(z, z) = sigma_f^2 pKss.reset(new Matrix(M, 1)); pKss->fill(sigma_f2); } // K: self-covariance matrix (MxM) else { // K(r) MatrixPtr pAbsDistXsXs = PairwiseOp<Scalar>::sqDist(testData.pXs()); // MxM pAbsDistXsXs->noalias() = pAbsDistXsXs->cwiseSqrt(); pKss = K(logHyp, pAbsDistXsXs); } return pKss; }
/** * @brief Gets the cross absolute distances between the training and test inputs * @param [in] pXs The M test inputs * @return An matrix pointer * \f[ * \mathbf{R} \in \mathbb{R}^{N \times M}, \quad * \mathbf{R}_{ij} = |\mathbf{x}_i - \mathbf{z}_j| * \f] * @todo Include this matrix as a member variable like m_pDistXX */ MatrixPtr pAbsDistXXs(const TestData<Scalar> &testData) const { MatrixPtr pAbsDist = pSqDistXXs(testData); // NxM pAbsDist->noalias() = pAbsDist->cwiseSqrt(); return pAbsDist; }