Example #1
0
CLatentLabels* CLatentSVM::apply()
{
	if (!m_model)
		SG_ERROR("LatentModel is not set!\n");

	if (!features)
		return NULL;

	index_t num_examples = m_model->get_num_vectors();
	CLatentLabels* hs = new CLatentLabels(num_examples);
	CBinaryLabels* ys = new CBinaryLabels(num_examples);
	hs->set_labels(ys);
	m_model->set_labels(hs);

	for (index_t i = 0; i < num_examples; ++i)
	{
		/* find h for the example */
		CData* h = m_model->infer_latent_variable(w, i);
		hs->add_latent_label(h);
	}

	/* compute the y labels */
	CDotFeatures* x = m_model->get_psi_feature_vectors();
	x->dense_dot_range(ys->get_labels().vector, 0, num_examples, NULL, w.vector, w.vlen, 0.0);

	return hs;
}
Example #2
0
void CLatentModel::argmax_h(const SGVector<float64_t>& w)
{
	int32_t num = get_num_vectors();
	CBinaryLabels* y = CBinaryLabels::obtain_from_generic(m_labels->get_labels());
	ASSERT(num > 0);
	ASSERT(num == m_labels->get_num_labels());
	

	// argmax_h only for positive examples
	for (int32_t i = 0; i < num; ++i)
	{
		if (y->get_label(i) == 1)
		{
			// infer h and set it for the argmax_h <w,psi(x,h)>
			CData* latent_data = infer_latent_variable(w, i);
			m_labels->set_latent_label(i, latent_data);
		}
	}
}
Example #3
0
bool CDomainAdaptationSVM::train_machine(CFeatures* data)
{

	if (data)
	{
		if (m_labels->get_num_labels() != data->get_num_vectors())
			SG_ERROR("Number of training vectors does not match number of labels\n");
		kernel->init(data, data);
	}

	if (m_labels->get_label_type() != LT_BINARY)
		SG_ERROR("DomainAdaptationSVM requires binary labels\n");

	int32_t num_training_points = get_labels()->get_num_labels();
	CBinaryLabels* labels = (CBinaryLabels*) get_labels();

	float64_t* lin_term = SG_MALLOC(float64_t, num_training_points);

	// grab current training features
	CFeatures* train_data = get_kernel()->get_lhs();

	// bias of parent SVM was set to zero in constructor, already contains B
	CBinaryLabels* parent_svm_out = presvm->apply_binary(train_data);

	// pre-compute linear term
	for (int32_t i=0; i<num_training_points; i++)
	{
		lin_term[i] = train_factor * B * labels->get_label(i) * parent_svm_out->get_label(i) - 1.0;
	}

	//set linear term for QP
	this->set_linear_term(SGVector<float64_t>(lin_term, num_training_points));

	//train SVM
	bool success = CSVMLight::train_machine();
	SG_UNREF(labels);
	SG_UNREF(parent_svm_out);

	ASSERT(presvm);

	return success;

}
Example #4
0
void fImgSvm::test_libsvm2()
{
    init_shogun(&print_message);
    const int32_t feature_cache=0;
    const int32_t kernel_cache=0;
    const float64_t rbf_width=10;
    const float64_t svm_C=10;
    const float64_t svm_eps=0.001;

    int32_t num=mtrainimgsum;
    int32_t dims=SIFTN;
    float64_t dist=0.5;

    SGVector<float64_t> lab(num); //标签
    SGMatrix<float64_t> feat(dims, num);

    //gen_rand_data(lab, feat, dist);
    for(int i = 0 ; i < num ; i ++ ) {
        for(int j = 0 ; j < dims ; j ++ ) {
            feat(j,i) = imgvec[i][j];
        }
    }

    for(int i = 0 ; i < num ; i ++ ) {
        //lab[i] = imglabelvec[i]*1.0;
        if(imgtrainlabelvec[i] ==  1)
            lab[i] = -1.0;
        else
            lab[i] = 1.0;
    }

    // create train labels
    CLabels* labels=new CBinaryLabels(lab);

    // create train features
    CDenseFeatures<float64_t>* features=new CDenseFeatures<float64_t>(feature_cache);
    SG_REF(features);
    features->set_feature_matrix(feat);

    // create gaussian kernel
    CGaussianKernel* kernel=new CGaussianKernel(kernel_cache, rbf_width);
    SG_REF(kernel);
    kernel->init(features, features);

    // create svm via libsvm and train
    CLibSVM* svm=new CLibSVM(svm_C, kernel, labels);
    SG_REF(svm);
    svm->set_epsilon(svm_eps);
    svm->train();

    SG_SPRINT("num_sv:%d b:%f\n", svm->get_num_support_vectors(),
              svm->get_bias());

    // classify + display output
    CBinaryLabels* out_labels=CBinaryLabels::obtain_from_generic(svm->apply());

    for (int32_t i=0; i<num; i++) {
        SG_SPRINT("out[%d]=%f (%f)\n", i, out_labels->get_label(i),
                  out_labels->get_confidence(i));
    }

    CBinaryLabels* result = CBinaryLabels::obtain_from_generic (svm->apply(features) );
    for (int32_t i=0; i<3; i++)
        SG_SPRINT("output[%d]=%f\n", i, result->get_label(i));

    // update
    // predict the
    printf("----------------test -----------------\n");

    getTestImg(imgtestvec);
    int32_t testnum = mtestingsum;
    SGMatrix<float64_t> testfeat(dims, testnum);

    for(int i = 0 ; i < testnum ; i ++ ) {
        for(int j = 0 ; j < dims ; j ++ ) {
            testfeat(j,i) = imgtestvec[i][j];
        }
    }

    CDenseFeatures<float64_t>* testfeatures=new CDenseFeatures<float64_t>(feature_cache);
    SG_REF(testfeatures);
    testfeatures->set_feature_matrix(testfeat);
    CBinaryLabels* testresult = CBinaryLabels::obtain_from_generic (svm->apply(testfeatures) );
    int32_t rightnum1 = 0;
    int32_t rightsum1 = 0;
    int32_t rightnum2 = 0;
    for (int32_t i=0; i<testnum; i++) {
        SG_SPRINT("output[%d]=%f\n", i, testresult->get_label(i));
        if(imgtestlabelvec[i] == 1  ) {
            if( (testresult->get_label(i))  < 0.0) {
                rightnum1 ++;
            }
            rightsum1 ++ ;
        } else
            if(imgtestlabelvec[i] == 2 && testresult->get_label(i) > 0.0) {
                rightnum2 ++ ;
            }
    }

    printf(" %lf\n ",(rightnum1+rightnum2)*1.0 / testnum);
    printf("class 1 : %lf\n",rightnum1 *1.0 / rightsum1);
    printf("class 2 : %lf\n",rightnum2 *1.0 / (testnum -  rightsum1));



    SG_UNREF(out_labels);
    SG_UNREF(kernel);
    SG_UNREF(features);
    SG_UNREF(svm);

    exit_shogun();
}
static void read_dataset(char* fname, CLatentFeatures*& feats, CLatentLabels*& labels)
{
	FILE* fd = fopen(fname, "r");
	char line[MAX_LINE_LENGTH];
	char *pchar, *last_pchar;
	int num_examples,label,height,width;

	char* path = dirname(fname);

	if (fd == NULL)
		SG_SERROR("Cannot open input file %s!\n", fname);

	fgets(line, MAX_LINE_LENGTH, fd);
	num_examples = atoi(line);

	labels = new CLatentLabels(num_examples);
	SG_REF(labels);

	CBinaryLabels* ys = new CBinaryLabels(num_examples);

	feats = new CLatentFeatures(num_examples);
	SG_REF(feats);

	CMath::init_random();
	for (int i = 0; (!feof(fd)) && (i < num_examples); ++i)
	{
		fgets(line, MAX_LINE_LENGTH, fd);

		pchar = line;
		while ((*pchar)!=' ') pchar++;
		*pchar = '\0';
		pchar++;

		/* label: {-1, 1} */
		last_pchar = pchar;
		while ((*pchar)!=' ') pchar++;
		*pchar = '\0';
		label = (atoi(last_pchar) % 2 == 0) ? 1 : -1;
		pchar++;

		if (ys->set_label(i, label) == false)
			SG_SERROR("Couldn't set label for element %d\n", i);

		last_pchar = pchar;
		while ((*pchar)!=' ') pchar++;
		*pchar = '\0';
		width = atoi(last_pchar);
		pchar++;

		last_pchar = pchar;
		while ((*pchar)!='\n') pchar++;
		*pchar = '\0';
		height = atoi(last_pchar);

		/* create latent label */
		int x = CMath::random(0, width-1);
		int y = CMath::random(0, height-1);
		CBoundingBox* bb = new CBoundingBox(x,y);
		labels->add_latent_label(bb);

		SG_SPROGRESS(i, 0, num_examples);
		CHOGFeatures* hog = new CHOGFeatures(width, height);
		hog->hog = SG_CALLOC(float64_t**, hog->width);
		for (int j = 0; j < width; ++j)
		{
			hog->hog[j] = SG_CALLOC(float64_t*, hog->height);
			for (int k = 0; k < height; ++k)
			{
				char filename[MAX_LINE_LENGTH];
				hog->hog[j][k] = SG_CALLOC(float64_t, HOG_SIZE);

				sprintf(filename,"%s/%s.%03d.%03d.txt",path,line,j,k);
				FILE* f = fopen(filename, "r");
				if (f == NULL)
					SG_SERROR("Could not open file: %s\n", filename);
				for (int l = 0; l < HOG_SIZE; ++l)
					fscanf(f,"%lf",&hog->hog[j][k][l]);
				fclose(f);
			}
		}
		feats->add_sample(hog);
	}
	fclose(fd);

	labels->set_labels(ys);

	SG_SDONE();
}