// copy constructor for batch processing TestData(const TestData &other, const int startRow, const int n) { assert(other.M() > 0); assert(startRow <= other.M()); assert(n > 0); if(other.M() > 0 && startRow <= other.M() && n > 0) { //m_pXs.reset(new Matrix(n, other.D())); //m_pXs->noalias() = other.m_pXs->middleRows(startRow, n); m_pXs.reset(new Matrix(other.m_pXs->middleRows(startRow, n))); } }
/** * @brief Self [co]variance matrix between the test data, Kss(Z, Z) * @param [in] logHyp The log hyperparameters * - logHyp(0) = \f$\log(l)\f$ * - logHyp(1) = \f$\log(\sigma_f)\f$ * @param [in] testData The test data * @param [in] fVarianceVector Flag for the return value * - fVarianceVector = true : return \f$\mathbf{k}_{**} \in \mathbb{R}^{M \times 1}, \mathbf{k}_{**}^i = k(\mathbf{Z}_i, \mathbf{Z}_i)\f$ (default) * - fVarianceVector = false: return \f$\mathbf{K}_{**} = \mathbf{K}(\mathbf{Z}, \mathbf{Z}) \in \mathbb{R}^{M \times M}\f$,\n * which can be used for Bayesian Committee Machines. * @return A matrix pointer\n * - Mx1 (fVarianceVector == true) * - MxM (fVarianceVector == false)\n * M: The number of test data */ static MatrixPtr Kss(const Hyp &logHyp, const TestData<Scalar> &testData, const bool fVarianceVector = true) { // The number of test data const int M = testData.M(); // Some constant values const Scalar sigma_f2 = exp(static_cast<Scalar>(2.0) * logHyp(1)); // sigma_f^2 // Output MatrixPtr pKss; // K: self-variance vector (Mx1) if(fVarianceVector) { // k(z, z) = sigma_f^2 pKss.reset(new Matrix(M, 1)); pKss->fill(sigma_f2); } // K: self-covariance matrix (MxM) else { // K(r) MatrixPtr pAbsDistXsXs = PairwiseOp<Scalar>::sqDist(testData.pXs()); // MxM pAbsDistXsXs->noalias() = pAbsDistXsXs->cwiseSqrt(); pKss = K(logHyp, pAbsDistXsXs); } return pKss; }
/** * @brief Gets the cross differences between the training and test inputs. * @param [in] pXs The M test inputs * @param [in] coord Corresponding coordinate * @return An matrix pointer * \f[ * \mathbf{D} \in \mathbb{R}^{N \times M}, \quad * \mathbf{D}_{ij} = \mathbf{x}_i^c - \mathbf{z}_j^c * \f] * @todo Include this matrix as a member variable like m_pDeltaXXList */ MatrixPtr pDeltaXXs(const TestData<Scalar> &testData, const int coord) const { assert(m_pX && testData.M() > 0); assert(D() == testData.D()); return PairwiseOp<Scalar>::delta(m_pX, testData.pXs(), coord); // NxM }
/** * @brief Gets the cross squared distances between the training and test inputs * @param [in] pXs The M test inputs * @return An matrix pointer * \f[ * \mathbf{R^2} \in \mathbb{R}^{N \times M}, \quad * \mathbf{R^2}_{ij} = (\mathbf{x}_i - \mathbf{z}_j)^\text{T}(\mathbf{x}_i - \mathbf{z}_j) * \f] * @todo Include this matrix as a member variable like m_pSqDistXX */ MatrixPtr pSqDistXXs(const TestData<Scalar> &testData) const { assert(m_pX && testData.M() > 0); assert(D() == testData.D()); return PairwiseOp<Scalar>::sqDist(m_pX, testData.pXs()); // NxM }