void human_poseCallback(const geometry_msgs::PoseWithCovarianceStamped::ConstPtr& human_pose)
{

	//Create candidates
	my_candidates.clear(); 
	
	tracking::Candidate temp_candidate;
	temp_candidate.pose = *human_pose;
	temp_candidate.flag = true;
	my_candidates.push_back(temp_candidate);


	t_identify_new_legs(my_candidates, &human_pt);
	update_all_time_candidates(my_candidates, my_all_time_candidates);
	
	compute_likelihood(my_candidates, my_all_time_candidates, forgetting_factor);

	std::vector<tracking::Candidate>::iterator publish_iterator = my_candidates.end();

	std::vector<tracking::Candidate>::iterator update_likelihood_iterator = my_all_time_candidates.end();
	
	while(publish_iterator != my_candidates.begin())
	{
		publish_iterator--;
		candidate_array_msg.candidates.push_back(*publish_iterator);
		
		update_likelihood_iterator--;
		update_likelihood_iterator->likelihood = publish_iterator->likelihood;
	}

}
示例#2
0
double lda_inference(document* doc, lda_model* model, double* var_gamma, double** phi)
{
    double converged = 1;
    double phisum = 0, likelihood = 0;
    double likelihood_old = 0, oldphi[model->num_topics];
    int k, n, var_iter;
    double digamma_gam[model->num_topics];

    // compute posterior dirichlet

    for (k = 0; k < model->num_topics; k++)
    {
        var_gamma[k] = model->alpha + (doc->total/((double) model->num_topics));
        digamma_gam[k] = digamma(var_gamma[k]);
        for (n = 0; n < doc->length; n++)
            phi[n][k] = 1.0/model->num_topics;
    }
    var_iter = 0;

    while ((converged > VAR_CONVERGED) &&
           ((var_iter < VAR_MAX_ITER) || (VAR_MAX_ITER == -1)))
    {
	var_iter++;
	for (n = 0; n < doc->length; n++)
	{
            phisum = 0;
            for (k = 0; k < model->num_topics; k++)
            {
                oldphi[k] = phi[n][k];
                phi[n][k] =
                    digamma_gam[k] +
                    model->log_prob_w[k][doc->words[n]];

                if (k > 0)
                    phisum = log_sum(phisum, phi[n][k]);
                else
                    phisum = phi[n][k]; // note, phi is in log space
            }

            for (k = 0; k < model->num_topics; k++)
            {
                phi[n][k] = exp(phi[n][k] - phisum);
                var_gamma[k] =
                    var_gamma[k] + doc->counts[n]*(phi[n][k] - oldphi[k]);
                // !!! a lot of extra digamma's here because of how we're computing it
                // !!! but its more automatically updated too.
                digamma_gam[k] = digamma(var_gamma[k]);
            }
        }

        likelihood = compute_likelihood(doc, model, phi, var_gamma);
        assert(!isnan(likelihood));
        converged = (likelihood_old - likelihood) / likelihood_old;
        likelihood_old = likelihood;

        // printf("[LDA INF] %8.5f %1.3e\n", likelihood, converged);
    }
    return(likelihood);
}
示例#3
0
文件: var_bayes.cpp 项目: DylanV/lda
double var_bayes::inference(const document &doc, std::vector<double>& var_gamma,
                            std::vector<std::vector<double>>& phi) {

    std::vector<double> digamma_gam(numTopics);

    for(int k=0; k<numTopics; k++){
        var_gamma[k] = alpha.alpha[k] + doc.count/numTopics;
    }

    int iteration = 0;
    double converged = 1;
    double phisum;
    std::vector<double> prev_gamma = std::vector<double>(numTopics);

    while((converged > INF_CONV_THRESH) and (iteration < INF_MAX_ITER)){
        iteration++;

        for(int k=0; k<numTopics; k++){
            digamma_gam[k] = digamma(var_gamma[k]);
            prev_gamma[k] = var_gamma[k];
            var_gamma[k] = alpha.alpha[k];
        }

        int n=0;
        for(auto const& word_count : doc.wordCounts){
            phisum = 0;
            for(int k=0; k<numTopics; k++){
                phi[n][k] = digamma_gam[k] + logProbW[k][word_count.first];

                if(k>0){
                    phisum = log_sum(phisum, phi[n][k]);
                } else {
                    phisum = phi[n][k];
                }
            }
            // Estimate gamma and phi
            for(int k=0; k<numTopics; k++){
                phi[n][k] = exp(phi[n][k] - phisum);
                var_gamma[k] += word_count.second*(phi[n][k]);
            }
            n++;
        }

        converged = 0;
        for(int k=0; k<numTopics; ++k){
            converged += fabs(prev_gamma[k] - var_gamma[k]);
        }
        converged /= numTopics;
    }

    return compute_likelihood(doc, var_gamma, phi);;
}
void human_posesCallback(const tracking::PoseWithCovarianceStampedArray::ConstPtr& human_poses)
{
        	
	std::string node_name = ros::this_node::getName(); 
	if (human_poses->poses[0].header.frame_id != likelihood_frame)
	{
		ROS_ERROR("[%s] received human_pose in %s frame but likelihood is computed in %s -  message dropped", node_name.c_str(), human_poses->poses[0].header.frame_id.c_str(), likelihood_frame.c_str());
		return;
	}
	if (human_poses->poses.size()==0)
	{
		ROS_ERROR("[%s] empty message", node_name.c_str());
		return;
	}
	if ((human_poses->only_for_tracking.data)&&(only_detection_data))
	{
		//ROS_ERROR("[compute_likelihood_detection] data only for tracking");
		return;
	}
	tracking::PoseWithCovarianceStampedArray human_poses_new = *human_poses;
	tracking::PoseWithCovarianceStampedArray human_poses_ok = *human_poses;
	
	
	int my_size = human_poses_ok.poses.size();
	for(int i=0; i<my_size; i++)
	{
		for(int j=0; j<human_poses_old.poses.size(); j++)
		{
			bool check_x = (human_poses_ok.poses[i].pose.pose.position.x == human_poses_old.poses[j].pose.pose.position.x); 
			bool check_y = (human_poses_ok.poses[i].pose.pose.position.y == human_poses_old.poses[j].pose.pose.position.y);
			if(check_x && check_y)
			{
				//ROS_ERROR("avoid bugs");
				human_poses_ok.poses.erase(human_poses_ok.poses.begin()+i);
				i = i-1;
				my_size = human_poses_ok.poses.size();
				break;
			}
		}

	}
	human_poses_old = human_poses_new;
    

    if (human_poses_ok.poses.size()==0)
	{
		//ROS_ERROR("[%s] empty message", node_name.c_str());
		return;
	}
	


	//Create candidates
	
	my_candidates.clear(); 
	
	for (int i = 0; i < human_poses_ok.poses.size(); i++)
	{
		tracking::Candidate temp_candidate;
		temp_candidate.pose = human_poses_ok.poses[i];
		temp_candidate.flag = true;
		temp_candidate.counter_debug = counter_debug;
		counter_debug++;
		
		my_candidates.push_back(temp_candidate);
	}


	t_identify_new_legs(my_candidates, &human_pt);

    update_all_time_candidates(my_candidates, my_all_time_candidates);
	
    compute_likelihood(my_candidates, my_all_time_candidates, forgetting_factor);

	std::vector<tracking::Candidate>::iterator publish_iterator = my_candidates.end();

	std::vector<tracking::Candidate>::iterator update_likelihood_iterator = my_all_time_candidates.end();
	
	while(publish_iterator != my_candidates.begin())
	{
		publish_iterator--;
		candidate_array_msg.candidates.push_back(*publish_iterator);
		
		update_likelihood_iterator--;
		update_likelihood_iterator->likelihood = publish_iterator->likelihood;
	}
        	

}
示例#5
0
文件: infer.c 项目: RenqinCai/sctm
void infer (char* odir, sctm_data* data, sctm_params* params,
		sctm_latent* latent, sctm_counts* counts) {
	int iter, d;
	double* docLogLikelihood = (double*) malloc(sizeof(double)*(data->D));
	int token = 0;
	int likelihoodCount; 

	likelihoodCount = 0;

	printf("\ninfer---likelihoodcount%d", likelihoodCount);

	for(d=0; d<data->D; d++){
		*(docLogLikelihood+d) = 0;
	}

	for (iter = 1; iter < params->ITER+1; iter++) {

		if (params->word_sparsity > 0 && params->trte == 0)
			infer_phi(data, params, latent, counts);

		if (params->model == 2) {
			infer_b(data, params, latent, counts);
		}

		infer_z(data, params, latent, counts, iter, odir);
		//assignment(odir, data, params, latent, counts, iter);

		if (params->CMNTS) {
			if (params->sents_sparsity) infer_xi(data, params, latent, counts);
			//assignment(odir, data, params, latent, counts, iter);

			infer_y(data, params, latent, counts, iter);
			//assignment(odir, data, params, latent, counts, iter);

			infer_t(data, params, latent, counts);
		}

		// printf("end infer....");
		if (iter % params->save_step == 0 || iter == 1 || (iter >= params->burn_in && (iter-params->burn_in)%params->save_state == 0)) {
			// printf("\nsave....");
			// fflush(stdout);

			// printf("\n%4diterations", iter);
			// printf("\n%dtotal iterations", params->ITER);
			printf("\n%4d of %d iterations done", iter, params->ITER);
			
			// if(params->trte==1){

			// 	// printf("compute_likelihood");
			// 	// fflush(stdout);

				likelihoodCount += 1;
				compute_likelihood(data, params, latent, counts, docLogLikelihood, &token);
				printf("\nlikelihoodcount%d", likelihoodCount);
				fflush(stdout);

			// }

			assignment(odir, data, params, latent, counts, iter);
			fflush(stdout);
//			if (params->trte == 1)
//				compute_perplexity(odir, data, params, latent, counts);
		}

	}

	assignment(odir, data, params, latent, counts, iter);
	if (params->trte == 1){


		// likelihoodCount += 1;
		// compute_likelihood(data, params, latent, counts, docLogLikelihood, &token);
		// printf("\nlikelihoodcount%d", likelihoodCount);
		// fflush(stdout);

			// }
		printf("\nlikelihoodCount%d", likelihoodCount);
		compute_perplexity(odir, data, docLogLikelihood, &likelihoodCount);
	}


	// if (params->trte == 1)
	// 	compute_perplexity(odir, data, params, latent, counts, docLogLikelihood);


}