template<typename PointT> inline void pcl::PCA<PointT>::update (const PointT& input_point, FLAG flag) { if (!compute_done_) initCompute (); if (!compute_done_) PCL_THROW_EXCEPTION (InitFailedException, "[pcl::PCA::update] PCA initCompute failed"); Eigen::Vector3f input (input_point.x, input_point.y, input_point.z); const size_t n = eigenvectors_.cols ();// number of eigen vectors Eigen::VectorXf meanp = (float(n) * (mean_.head<3>() + input)) / float(n + 1); Eigen::VectorXf a = eigenvectors_.transpose() * (input - mean_.head<3>()); Eigen::VectorXf y = (eigenvectors_ * a) + mean_.head<3>(); Eigen::VectorXf h = y - input; if (h.norm() > 0) h.normalize (); else h.setZero (); float gamma = h.dot(input - mean_.head<3>()); Eigen::MatrixXf D = Eigen::MatrixXf::Zero (a.size() + 1, a.size() + 1); D.block(0,0,n,n) = a * a.transpose(); D /= float(n)/float((n+1) * (n+1)); for(std::size_t i=0; i < a.size(); i++) { D(i,i)+= float(n)/float(n+1)*eigenvalues_(i); D(D.rows()-1,i) = float(n) / float((n+1) * (n+1)) * gamma * a(i); D(i,D.cols()-1) = D(D.rows()-1,i); D(D.rows()-1,D.cols()-1) = float(n)/float((n+1) * (n+1)) * gamma * gamma; } Eigen::MatrixXf R(D.rows(), D.cols()); Eigen::EigenSolver<Eigen::MatrixXf> D_evd (D, false); Eigen::VectorXf alphap = D_evd.eigenvalues().real(); eigenvalues_.resize(eigenvalues_.size() +1); for(std::size_t i=0; i<eigenvalues_.size(); i++) { eigenvalues_(i) = alphap(eigenvalues_.size()-i-1); R.col(i) = D.col(D.cols()-i-1); } Eigen::MatrixXf Up = Eigen::MatrixXf::Zero(eigenvectors_.rows(), eigenvectors_.cols()+1); Up.topLeftCorner(eigenvectors_.rows(),eigenvectors_.cols()) = eigenvectors_; Up.rightCols<1>() = h; eigenvectors_ = Up*R; if (!basis_only_) { Eigen::Vector3f etha = Up.transpose() * (mean_.head<3>() - meanp); coefficients_.resize(coefficients_.rows()+1,coefficients_.cols()+1); for(std::size_t i=0; i < (coefficients_.cols() - 1); i++) { coefficients_(coefficients_.rows()-1,i) = 0; coefficients_.col(i) = (R.transpose() * coefficients_.col(i)) + etha; } a.resize(a.size()+1); a(a.size()-1) = 0; coefficients_.col(coefficients_.cols()-1) = (R.transpose() * a) + etha; } mean_.head<3>() = meanp; switch (flag) { case increase: if (eigenvectors_.rows() >= eigenvectors_.cols()) break; case preserve: if (!basis_only_) coefficients_ = coefficients_.topRows(coefficients_.rows() - 1); eigenvectors_ = eigenvectors_.leftCols(eigenvectors_.cols() - 1); eigenvalues_.resize(eigenvalues_.size()-1); break; default: PCL_ERROR("[pcl::PCA] unknown flag\n"); } }
virtual void update_one_sgd_step(const Instance& ins, double step_size) { double pred = predict(ins); size_t user_id, item_id, item_neg_id; user_id = ins.get_feature_group_index(0, 0); item_id = ins.get_feature_group_index(1, 0) + data_->feature_group_start_idx(1); for (size_t idx = 0; idx < num_neg_; idx++) { /////////////////////////////////////////// // sample negative instance item_neg_id = sample_negative_item(user_id); //TODO double pred_neg = predict_user_item_rating(user_id, item_neg_id - num_users_); double pair_pred = pred - pred_neg; double gradient = loss_->gradient(pair_pred, 1.); DMatrix user_grad = factors_.row(user_id) * lambda_; DMatrix item_grad = factors_.row(item_id) * lambda_; DMatrix item_neg_grad = factors_.row(item_neg_id) * lambda_; user_grad += gradient * (factors_.row(item_id) - factors_.row(item_neg_id)); item_grad += gradient * factors_.row(user_id); item_neg_grad += gradient * -1. * factors_.row(user_id); //////////////////////////////////////// // Update double grad; grad = coefficients_(item_id) * lambda_ + gradient; if (using_adagrad_) { coeff_grad_square_(item_id) += grad * grad; grad /= std::sqrt(coeff_grad_square_(item_id)); } coefficients_(item_id) -= step_size * grad; grad = coefficients_(item_neg_id) * lambda_ + gradient; if (using_adagrad_) { coeff_grad_square_(item_neg_id) += grad * grad; grad /= std::sqrt(coeff_grad_square_(item_neg_id)); } coefficients_(item_neg_id) -= step_size * grad; if (using_adagrad_) { factor_grad_square_.row(user_id) += user_grad.cwiseProduct(user_grad); user_grad = user_grad.cwiseQuotient(factor_grad_square_.row(user_id).cwiseSqrt()); } factors_.row(user_id) -= step_size * user_grad; if (using_adagrad_) { factor_grad_square_.row(item_id) += item_grad.cwiseProduct(item_grad); item_grad = item_grad.cwiseQuotient(factor_grad_square_.row(item_id).cwiseSqrt()); } factors_.row(item_id) -= step_size * item_grad; if (using_adagrad_) { factor_grad_square_.row(item_neg_id) += item_neg_grad.cwiseProduct(item_neg_grad); item_neg_grad = item_neg_grad.cwiseQuotient(factor_grad_square_.row(item_neg_id).cwiseSqrt()); } factors_.row(item_neg_id) -= step_size * item_neg_grad; } }