Exemplo n.º 1
0
void CLatentModel::argmax_h(const SGVector<float64_t>& w)
{
	int32_t num = get_num_vectors();
	CBinaryLabels* y = CBinaryLabels::obtain_from_generic(m_labels->get_labels());
	ASSERT(num > 0);
	ASSERT(num == m_labels->get_num_labels());
	

	// argmax_h only for positive examples
	for (int32_t i = 0; i < num; ++i)
	{
		if (y->get_label(i) == 1)
		{
			// infer h and set it for the argmax_h <w,psi(x,h)>
			CData* latent_data = infer_latent_variable(w, i);
			m_labels->set_latent_label(i, latent_data);
		}
	}
}
Exemplo n.º 2
0
bool CDomainAdaptationSVM::train_machine(CFeatures* data)
{

	if (data)
	{
		if (m_labels->get_num_labels() != data->get_num_vectors())
			SG_ERROR("Number of training vectors does not match number of labels\n");
		kernel->init(data, data);
	}

	if (m_labels->get_label_type() != LT_BINARY)
		SG_ERROR("DomainAdaptationSVM requires binary labels\n");

	int32_t num_training_points = get_labels()->get_num_labels();
	CBinaryLabels* labels = (CBinaryLabels*) get_labels();

	float64_t* lin_term = SG_MALLOC(float64_t, num_training_points);

	// grab current training features
	CFeatures* train_data = get_kernel()->get_lhs();

	// bias of parent SVM was set to zero in constructor, already contains B
	CBinaryLabels* parent_svm_out = presvm->apply_binary(train_data);

	// pre-compute linear term
	for (int32_t i=0; i<num_training_points; i++)
	{
		lin_term[i] = train_factor * B * labels->get_label(i) * parent_svm_out->get_label(i) - 1.0;
	}

	//set linear term for QP
	this->set_linear_term(SGVector<float64_t>(lin_term, num_training_points));

	//train SVM
	bool success = CSVMLight::train_machine();
	SG_UNREF(labels);
	SG_UNREF(parent_svm_out);

	ASSERT(presvm);

	return success;

}
Exemplo n.º 3
0
void fImgSvm::test_libsvm2()
{
    init_shogun(&print_message);
    const int32_t feature_cache=0;
    const int32_t kernel_cache=0;
    const float64_t rbf_width=10;
    const float64_t svm_C=10;
    const float64_t svm_eps=0.001;

    int32_t num=mtrainimgsum;
    int32_t dims=SIFTN;
    float64_t dist=0.5;

    SGVector<float64_t> lab(num); //标签
    SGMatrix<float64_t> feat(dims, num);

    //gen_rand_data(lab, feat, dist);
    for(int i = 0 ; i < num ; i ++ ) {
        for(int j = 0 ; j < dims ; j ++ ) {
            feat(j,i) = imgvec[i][j];
        }
    }

    for(int i = 0 ; i < num ; i ++ ) {
        //lab[i] = imglabelvec[i]*1.0;
        if(imgtrainlabelvec[i] ==  1)
            lab[i] = -1.0;
        else
            lab[i] = 1.0;
    }

    // create train labels
    CLabels* labels=new CBinaryLabels(lab);

    // create train features
    CDenseFeatures<float64_t>* features=new CDenseFeatures<float64_t>(feature_cache);
    SG_REF(features);
    features->set_feature_matrix(feat);

    // create gaussian kernel
    CGaussianKernel* kernel=new CGaussianKernel(kernel_cache, rbf_width);
    SG_REF(kernel);
    kernel->init(features, features);

    // create svm via libsvm and train
    CLibSVM* svm=new CLibSVM(svm_C, kernel, labels);
    SG_REF(svm);
    svm->set_epsilon(svm_eps);
    svm->train();

    SG_SPRINT("num_sv:%d b:%f\n", svm->get_num_support_vectors(),
              svm->get_bias());

    // classify + display output
    CBinaryLabels* out_labels=CBinaryLabels::obtain_from_generic(svm->apply());

    for (int32_t i=0; i<num; i++) {
        SG_SPRINT("out[%d]=%f (%f)\n", i, out_labels->get_label(i),
                  out_labels->get_confidence(i));
    }

    CBinaryLabels* result = CBinaryLabels::obtain_from_generic (svm->apply(features) );
    for (int32_t i=0; i<3; i++)
        SG_SPRINT("output[%d]=%f\n", i, result->get_label(i));

    // update
    // predict the
    printf("----------------test -----------------\n");

    getTestImg(imgtestvec);
    int32_t testnum = mtestingsum;
    SGMatrix<float64_t> testfeat(dims, testnum);

    for(int i = 0 ; i < testnum ; i ++ ) {
        for(int j = 0 ; j < dims ; j ++ ) {
            testfeat(j,i) = imgtestvec[i][j];
        }
    }

    CDenseFeatures<float64_t>* testfeatures=new CDenseFeatures<float64_t>(feature_cache);
    SG_REF(testfeatures);
    testfeatures->set_feature_matrix(testfeat);
    CBinaryLabels* testresult = CBinaryLabels::obtain_from_generic (svm->apply(testfeatures) );
    int32_t rightnum1 = 0;
    int32_t rightsum1 = 0;
    int32_t rightnum2 = 0;
    for (int32_t i=0; i<testnum; i++) {
        SG_SPRINT("output[%d]=%f\n", i, testresult->get_label(i));
        if(imgtestlabelvec[i] == 1  ) {
            if( (testresult->get_label(i))  < 0.0) {
                rightnum1 ++;
            }
            rightsum1 ++ ;
        } else
            if(imgtestlabelvec[i] == 2 && testresult->get_label(i) > 0.0) {
                rightnum2 ++ ;
            }
    }

    printf(" %lf\n ",(rightnum1+rightnum2)*1.0 / testnum);
    printf("class 1 : %lf\n",rightnum1 *1.0 / rightsum1);
    printf("class 2 : %lf\n",rightnum2 *1.0 / (testnum -  rightsum1));



    SG_UNREF(out_labels);
    SG_UNREF(kernel);
    SG_UNREF(features);
    SG_UNREF(svm);

    exit_shogun();
}