void passive_aggressive_2::train(const common::sfv_t& sfv, const string& label) { check_touchable(label); labels_.get_model()->increment(label); string incorrect_label; float margin = calc_margin(sfv, label, incorrect_label); float loss = 1.f + margin; if (loss < 0.f) { storage_->register_label(label); return; } float sfv_norm = squared_norm(sfv); if (sfv_norm == 0.f) { storage_->register_label(label); return; } update_weight( sfv, loss / (2 * sfv_norm + 1 / (2 * config_.regularization_weight)), label, incorrect_label); touch(label); }
void NXVectorTest::squared_normTest(void) { double pData[4] = { 5.0, 6.0, 7.0, 8.0 }; NXVectorRef4d p(pData); double const pSquaredNorm = squared_norm(p); double const answer = (pData[0] * pData[0] + pData[1] * pData[1] + pData[2] * pData[2] + pData[3] * pData[3]); CPPUNIT_ASSERT(pSquaredNorm == answer); }
void PA::train(const sfv_t& sfv, const string& label){ string incorrect_label; float margin = calc_margin(sfv, label, incorrect_label); float loss = 1.f + margin; if (loss < 0.f){ return; } float sfv_norm = squared_norm(sfv); if (sfv_norm == 0.f) { return; } update_weight(sfv, loss / sfv_norm, label, incorrect_label); }
void passive_aggressive_2::train(const common::sfv_t& sfv, const string& label) { string incorrect_label; float margin = calc_margin(sfv, label, incorrect_label); float loss = 1.f + margin; if (loss < 0.f) { return; } float sfv_norm = squared_norm(sfv); if (sfv_norm == 0.f) { return; } update_weight( sfv, loss / (2 * sfv_norm + 1 / (2 * config_.C)), label, incorrect_label); }
void quaternion_t::rectify() { quaternion_t temp_q = *this; if( fabs( squared_norm() - 1 ) > PRX_ZERO_CHECK ) normalize(); if(is_approximate_equal(temp_q)) *this = temp_q; // double theta = 2.0 * acos( q[3] ); // double d = sin(theta*.5); // if( d == 0 ) // { // q[0] = q[1] = q[2] = 0; // q[3] = 1; // } // else // { // double ax = q[0]/d; // double ay = q[1]/d; // double az = q[2]/d; // double norm = sqrt( ax*ax + ay*ay + az*az ); // if( fabs( norm - 1 ) < PRX_ZERO_CHECK ) // { // ax /= norm; // ay /= norm; // az /= norm; // q[0] = sin( theta / 2.0 ) * ax; // q[1] = sin( theta / 2.0 ) * ay; // q[2] = sin( theta / 2.0 ) * az; // } // else // { // q[0] = 0.0; // q[1] = 0.0; // q[2] = 0.0; // q[3] = 1.0; // } // } }
void passive_aggressive_1::train(const common::sfv_t& sfv, const string& label) { string incorrect_label; float margin = calc_margin(sfv, label, incorrect_label); float loss = 1.f + margin; if (loss < 0.f) { storage_->register_label(label); return; } float sfv_norm = squared_norm(sfv); if (sfv_norm == 0.f) { storage_->register_label(label); return; } update_weight( sfv, min(config_.C, loss / (2 * sfv_norm)), label, incorrect_label); touch(label); }
void passive_aggressive::train(const common::sfv_t& fv, float value) { sum_ += value; sq_sum_ += value * value; count_ += 1; float avg = sum_ / count_; float std_dev = std::sqrt(sq_sum_ / count_ - avg * avg); float predict = estimate(fv); float error = value - predict; float sign_error = error > 0.f ? 1.0f : -1.0f; float loss = sign_error * error - config_.sensitivity * std_dev; if (loss > 0.f) { float coeff = sign_error * loss / squared_norm(fv); if (!std::isinf(coeff)) { update(fv, coeff); } } }
double vector_t::norm() const { return sqrt( squared_norm() ); }
double quaternion_t::norm() const { return sqrt( squared_norm() ); }
void inverse() { conjugate(); *this /= squared_norm(); }
T norm() const { return cmath<T>::sqrt( squared_norm()); }