Пример #1
0
  bool MeasureGroup_Impl::push(const Measure& measure) {
    MeasureVector candidates = measures(false);
    candidates.push_back(measure);
    if (!measuresAreCompatible(candidates)) {
      return false;
    }

    m_measures.push_back(measure);
    m_measures.back().onChange();
    connectChild(m_measures.back(),true);
    onChange(AnalysisObject_Impl::Benign);
    return true;
  }
Пример #2
0
  QVariant MeasureGroup_Impl::toVariant() const {
    QVariantMap measureGroupData = InputVariable_Impl::toVariant().toMap();

    measureGroupData["workflow_step_type"] = "MeasureGroup";

    int index(0);
    QVariantList measuresList;
    for (const Measure& measure : measures(false)) {
      QVariantMap measureData = measure.toVariant().toMap();
      measureData["measure_group_index"] = index;
      measuresList.push_back(measureData);
      ++index;
    }
    measureGroupData["measures"] = measuresList;

    return QVariant(measureGroupData);
  }
Пример #3
0
  bool MeasureGroup_Impl::fileTypesAreCompatible(
      const Measure& childMeasure,
      const FileReferenceType& proposedInputFileType,
      const FileReferenceType& proposedOutputFileType) const
  {
    // can only change file types if no null measures
    if (proposedInputFileType != proposedOutputFileType) {
      NullMeasureVector nullMeasures = subsetCastVector<NullMeasure>(m_measures);
      if (!nullMeasures.empty()) {
        return false;
      }
    }

    // check proposals against file types of other measures
    MeasureVector pv = measures(false);
    auto it = std::find(pv.begin(),pv.end(),childMeasure);
    OS_ASSERT(it != pv.end());
    pv.erase(it);

    std::pair<bool,OptionalFileReferenceType> inputFileTypeResult = detail::inputFileType(pv);
    OS_ASSERT(inputFileTypeResult.first);
    if (inputFileTypeResult.second && (proposedInputFileType != *(inputFileTypeResult.second))) {
      return false;
    }

    std::pair<bool,OptionalFileReferenceType> outputFileTypeResult = detail::outputFileType(pv);
    if (outputFileTypeResult.second && (proposedOutputFileType != *(outputFileTypeResult.second))) {
      return false;
    }

    // either proposals match current file types, or at least one current file type is null.
    // if matches current, should be a-okay.
    // otherwise, proposing a new file type on input or output side for this variable, so
    // ask problem if ok.
    if (OptionalAnalysisObject parent = this->parent()) {
      if (!inputFileTypeResult.second || !outputFileTypeResult.second) {
        if (!(parent->cast<WorkflowStep>().fileTypesAreCompatible(proposedInputFileType,
                                                                  proposedOutputFileType)))
        {
          return false;
        }
      }
    }
    return true;
  }
Пример #4
0
  std::vector<int> MeasureGroup_Impl::validValues(bool selectedOnly) const {
    IntVector result;

    if (selectedOnly) {
      int index(0);
      for (const Measure& measure : measures(false)) {
        if (measure.isSelected()) {
          result.push_back(index);
        }
        ++index;
      }
    }
    else {
      for (int i = 0, n = numMeasures(false); i < n; ++i) {
        result.push_back(i);
      }
    }

    return result;
  }
Пример #5
0
void Score::readStaff(XmlReader& e)
      {
      int staff = e.intAttribute("id", 1) - 1;
      int measureIdx = 0;
      e.setCurrentMeasureIndex(0);
      e.initTick(0);
      e.setTrack(staff * VOICES);

      if (staff == 0) {
            while (e.readNextStartElement()) {
                  const QStringRef& tag(e.name());

                  if (tag == "Measure") {
                        Measure* measure = 0;
                        measure = new Measure(this);
                        measure->setTick(e.tick());
                        e.setCurrentMeasureIndex(measureIdx++);
                        //
                        // inherit timesig from previous measure
                        //
                        Measure* m = e.lastMeasure(); // measure->prevMeasure();
                        Fraction f(m ? m->timesig() : Fraction(4,4));
                        measure->setLen(f);
                        measure->setTimesig(f);

                        measure->read(e, staff);
                        measure->checkMeasure(staff);
                        if (!measure->isMMRest()) {
                              measures()->add(measure);
                              e.setLastMeasure(measure);
                              e.initTick(measure->tick() + measure->ticks());
                              }
                        else {
                              // this is a multi measure rest
                              // always preceded by the first measure it replaces
                              Measure* m1 = e.lastMeasure();

                              if (m1) {
                                    m1->setMMRest(measure);
                                    measure->setTick(m1->tick());
                                    }
                              }
                        }
                  else if (tag == "HBox" || tag == "VBox" || tag == "TBox" || tag == "FBox") {
                        MeasureBase* mb = toMeasureBase(Element::name2Element(tag, this));
                        mb->read(e);
                        mb->setTick(e.tick());
                        measures()->add(mb);
                        }
                  else if (tag == "tick")
                        e.initTick(fileDivision(e.readInt()));
                  else
                        e.unknown();
                  }
            }
      else {
            Measure* measure = firstMeasure();
            while (e.readNextStartElement()) {
                  const QStringRef& tag(e.name());

                  if (tag == "Measure") {
                        if (measure == 0) {
                              qDebug("Score::readStaff(): missing measure!");
                              measure = new Measure(this);
                              measure->setTick(e.tick());
                              measures()->add(measure);
                              }
                        e.initTick(measure->tick());
                        e.setCurrentMeasureIndex(measureIdx++);
                        measure->read(e, staff);
                        measure->checkMeasure(staff);
                        if (measure->isMMRest())
                              measure = e.lastMeasure()->nextMeasure();
                        else {
                              e.setLastMeasure(measure);
                              if (measure->mmRest())
                                    measure = measure->mmRest();
                              else
                                    measure = measure->nextMeasure();
                              }
                        }
                  else if (tag == "tick")
                        e.initTick(fileDivision(e.readInt()));
                  else
                        e.unknown();
                  }
            }
      }
Пример #6
0
void FaceDetector::imageCallback(const sensor_msgs::Image::ConstPtr& image_ptr)
{


    private_nh_.param("/qbo_face_tracking/send_to_recognizer", send_to_face_recognizer_, false);
	/*
	 * Cretate a CvImage pointer to get cv::Mat representation of image
	 */
	cv_bridge::CvImagePtr cv_ptr;
    try
    {
    	cv_ptr = cv_bridge::toCvCopy(image_ptr, sensor_msgs::image_encodings::RGB8);
    }
    catch (cv_bridge::Exception& e)
    {
	    ROS_ERROR("cv_bridge exception: %s", e.what());
	    return;
    }

    cv::Mat image_received = (cv_ptr->image).clone();
    image_size_ = cv_ptr->image.size();

    /*
     * Creating structure to publish nose color
     */
    qbo_arduqbo::Nose nose;
    nose.header.stamp = ros::Time::now();
    nose.color=1;


    string detection_type = "";

    if(face_detected_bool_) //If face was detected in previous iteration - use CAM shift
    {
    	if(detectFaceCamShift(image_received) != 0)
    	{
    		face_detected_bool_ = false;
    		//Reset cam_shift_detected
    		cam_shift_detected_count_ = 0;
    		cam_shift_undetected_count_++;
    	}
    	else
    	{
    		detection_type = "CAMSHIFT";

    		//Update cam_shift counter
    		cam_shift_detected_count_++;
    		cam_shift_undetected_count_ = 0;
    	}
    }


    if(!face_detected_bool_) //If face was not detected - use Haar Cascade
    {
        vector<cv::Rect> faces_roi;
    	detectFacesHaar(image_received, faces_roi);

    	if(faces_roi.size() > 0) //If Haar cascade classifier found a face
    	{
    		face_detected_bool_ = true;
    		track_object_ = false;

    		//Changed
    		detected_face_roi_ = faces_roi[0];
    		detected_face_ = cv_ptr->image(detected_face_roi_);

    		//Adjust face ratio because Haar Cascade always returns a square
    		face_ratio_ = 1.3*detected_face_roi_.height/detected_face_roi_.width;

    		detection_type = "HAAR";
    	}


    }



    if(!face_detected_bool_ && exist_alternative_) //If face was not detected - use Haar Cascade
    {
        vector<cv::Rect> faces_roi;
        detectFacesAltHaar(image_received, faces_roi);

        if(faces_roi.size() > 0) //If Haar cascade classifier found a face
        {
                face_detected_bool_ = true;
                track_object_ = false;

                //Changed
                detected_face_roi_ = faces_roi[0];
                detected_face_ = cv_ptr->image(detected_face_roi_);

                //Adjust face ratio because Haar Cascade always returns a square
                face_ratio_ = 1.3*detected_face_roi_.height/detected_face_roi_.width;

                detection_type = "HAAR";
        }


    }



	/*
	 * Kalman filter for Face pos estimation
	 */
    kalman_filter_.predict();
	if(face_detected_bool_) //IF face detected, use measured position to update kalman filter
	{
		if(undetected_count_>=undetected_threshold_)
		{
			cv::Mat new_state(4,1, CV_32FC1);
			new_state.at<float>(0,0) = float(detected_face_roi_.x+detected_face_roi_.width/2.);
			new_state.at<float>(1,0) = float(detected_face_roi_.y+detected_face_roi_.height/2.);
			new_state.at<float>(2,0) = 0.;
			new_state.at<float>(3,0) = 0.;

			new_state.copyTo(kalman_filter_.statePre);
			new_state.copyTo(kalman_filter_.statePost);

		}

		else
		{
			float u_vel = float(detected_face_roi_.x+detected_face_roi_.width/2.)-kalman_filter_.statePost.at<float>(0,0);
			float v_vel = float(detected_face_roi_.y+detected_face_roi_.height/2.)-kalman_filter_.statePost.at<float>(1,0);

			cv::Mat measures(4,1, CV_32FC1);
			measures.at<float>(0,0) = float(detected_face_roi_.x+detected_face_roi_.width/2.);
			measures.at<float>(1,0) = float(detected_face_roi_.y+detected_face_roi_.height/2.);
			measures.at<float>(2,0) = u_vel;
			measures.at<float>(3,0) = v_vel;

			kalman_filter_.correct(measures);
		}
	}
    else //If face haven't been found, use only Kalman prediction
    {
    	kalman_filter_.statePre.copyTo(kalman_filter_.statePost);
    	kalman_filter_.errorCovPre.copyTo(kalman_filter_.errorCovPost);
    }
    

	/*
	 * Compute head distance
	 */
    float head_distance;

	if(!face_detected_bool_)
    {
	  if(head_distances_.size()!=0)
			head_distance=head_distances_[0];//100;
		else
		  head_distance = 5;


	}
	else
	{
		head_distance=calcDistanceToHead(detected_face_, kalman_filter_);
    	//	ROS_ERROR("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxhead_distance:%f", head_distance);
	}
	
    if(head_distances_.size()==0)
    {
      for(int i=0;i<10;i++)
	    head_distances_.push_back(head_distance);
    }
    else
    {
      head_distances_.pop_back();
      head_distances_.insert(head_distances_.begin(),head_distance);
    }


    head_distance=0.0; //Reuse variable to compute mean head distance

    //Use mean distance of last measured head distances
    for(unsigned int i=0;i<head_distances_.size();i++)
    	{
                //ROS_ERROR("xxxxxxxxxx0xxxxxhead_distance:%f, s_:%f\n", head_distance, head_distances_[i]);
		head_distance+=head_distances_[i];
}

    head_distance=head_distance/head_distances_.size();
    //ROS_ERROR("xxxxxxxxxx2xxxxxhead_distance:%f", head_distance);

    //Update undetected count
	if(!face_detected_bool_)
	{	undetected_count_++;

	}
	else
	{
		undetected_count_ = 0;
	}

	//Create Face Pos and Size message
        qbo_face_msgs::FacePosAndDist message;
	message.image_width=cv_ptr->image.cols;
	message.image_height=cv_ptr->image.rows;
	message.type_of_tracking = detection_type;



	if(undetected_count_<undetected_threshold_) //If head have been recently detected, use Kalman filter prediction
	{
		message.face_detected = true;
		message.u = kalman_filter_.statePost.at<float>(0,0) - cv_ptr->image.cols/2.;
		message.v = kalman_filter_.statePost.at<float>(1,0) - cv_ptr->image.rows/2.;
		message.distance_to_head = float(head_distance);

	}
	else //If head has not been recently detected, face detection have failed
	{
		message.u = default_pos_.x;
		message.v = default_pos_.y;

		message.face_detected = false;

	}

    if(face_detected_bool_)
    {
    	//Publish head to topic
    	cv_ptr->image = detected_face_;
		face_pub_.publish(cv_ptr->toImageMsg());


		if(send_to_face_recognizer_)
			sendToRecognizer();


		//Change nose color
        nose.color=4; //If face detected - Blue
		if(head_distance<distance_threshold_)
        {
		 	nose.color=2; //If face is near - Green
        }
    }

    //Publish nose color
    nose_color_pub_.publish(nose);

    /*
     * Draws for the Viewer
     * Velocity vector, face rectangle and face center
     */
    int pred_x = int(message.u) + int(message.image_width)/2;
    int pred_y = int(message.v) + int(message.image_height)/2;

    cv::Point pred_kal = cv::Point(pred_x, pred_y);
    cv::Point tip_u_vel = cv::Point(pred_kal.x + (kalman_filter_.statePost.at<float>(2,0))/1., pred_kal.y);
    cv::Point tip_v_vel = cv::Point(pred_kal.x, pred_kal.y + (kalman_filter_.statePost.at<float>(3,0))/1.);
    //Draw face center and rectangle
    cv::circle(image_received, pred_kal,3,cv::Scalar(0,0,255), 3);
	cv::rectangle(image_received, detected_face_roi_, cv::Scalar(255,0,0), 2);
    if(tip_u_vel.x >= 0 && tip_u_vel.y >= 0 && tip_u_vel.x < image_received.cols && tip_u_vel.y < image_received.rows)
    {
    	cv::line(image_received, pred_kal, tip_u_vel, cv::Scalar(0,255,0), 2);
    }
    if(tip_v_vel.x >= 0 && tip_v_vel.y >= 0 && tip_v_vel.x < image_received.cols && tip_v_vel.y < image_received.rows)
    {
    	cv::line(image_received, pred_kal, tip_v_vel, cv::Scalar(255,0,0), 2);
    }

    //Draw name of person in image
	if(print_recognized_face_) 
		cv::putText(image_received, name_detected_, cv::Point(40,40), cv::FONT_HERSHEY_SIMPLEX, 1, cv::Scalar(0,0,255), 3);

    
    /*
     * Publish Image viewer
     */
    cv_bridge::CvImagePtr cv_ptr2;
    try
    {
      cv_ptr2 = cv_bridge::toCvCopy(image_ptr, sensor_msgs::image_encodings::RGB8);
    }
    catch (cv_bridge::Exception& e)
    {
	    ROS_ERROR("cv_bridge exception: %s", e.what());
	    return;
    }
    
    cv_ptr2->image = image_received;  
    viewer_image_pub_.publish(cv_ptr2->toImageMsg());
    

    /*
     * Publish face position and size
     */
    face_position_and_size_pub_.publish(message);

    imshow("1111",image_received);
    cv::waitKey(10);
    /*
     * Update Haar cascade use frequency for Dynamic Haar Cascade
     */
    if(dynamic_check_haar_)
    {
	if(undetected_count_ > 10)
	{
		check_Haar_ = 2;
		cam_shift_detected_count_ = 0;
		cam_shift_undetected_count_ = 0;
	}
	
    	else if(cam_shift_undetected_count_>3)
    	{
    		check_Haar_/=2;
    		if(check_Haar_<2)
    			check_Haar_ = 2;
    	}

    	if(cam_shift_detected_count_>10)
    	{
    		check_Haar_+=5;
    		if(check_Haar_>100)
    			check_Haar_ = 100;

    	}
	

    	track_object_= check_Haar_ ;
    }

    /*
     * ROS_INFO
     */
    if(face_detected_bool_)
    {
    	if(dynamic_check_haar_)
    ROS_INFO("FACE DETECTED -> Head Pos :(%d, %d), Head Distance: %lg, Dynamic check Haar: %u, Det type: %s, Alt: %s", (int)message.u, (int)message.v, head_distance, check_Haar_, detection_type.c_str(), (exist_alternative_)?"true":"false");
    	else
    		ROS_INFO("FACE DETECTED -> Head Pos :(%d, %d), Head Distance: %lg, Check Haar: %u, Detection type: %s, Alt: %s",
    				(int)message.u, (int)message.v, head_distance, check_Haar_, detection_type.c_str(), (exist_alternative_)?"true":"false");
    }
    else
    {
    	if(dynamic_check_haar_)
    		ROS_INFO("NO FACE DETECTED. Using Haar Cascade Classifier to find one. Dynamic Check Haar: %u, Alt: %s", check_Haar_, (exist_alternative_)?"true":"false");
    	else
    		ROS_INFO("NO FACE DETECTED. Using Haar Cascade Classifier to find one. Check Haar: %u, Alt: %s", check_Haar_, (exist_alternative_)?"true":"false");
    }
    
	/*
	 * Refresh face_detected bool to use Haar Cascade once in a while
	 */
    if(loop_counter_%check_Haar_==0)
    	face_detected_bool_ = false;

    if(loop_counter_%check_track_obj_==0)
    	track_object_ = false;

    loop_counter_++;
    
}
Пример #7
0
CFeatures* CFeatureSelection<ST>::apply_backward_elimination(CFeatures* features)
{
	SG_DEBUG("Entering!\n");

	// precompute whenever appropriate for performing the rest of the tasks
	precompute();

	// NULL check for features is handled in get_num_features
	index_t num_features=get_num_features(features);
	SG_DEBUG("Initial number of features %d!\n", num_features);

	// the main loop
	while (num_features>m_target_dim)
	{
		// tune the measurement parameters whenever necessary based on current
		// features
		adapt_params(features);

		// compute the measures for each of the current dimensions
		SGVector<float64_t> measures(num_features);
		for (index_t i=0; i<num_features; ++i)
			measures[i]=compute_measures(features, i);

		if (io->get_loglevel()==MSG_DEBUG || io->get_loglevel()==MSG_GCDEBUG)
			measures.display_vector("measures");

		// rank the measures
		SGVector<index_t> argsorted=CMath::argsort(measures);

		if (io->get_loglevel()==MSG_DEBUG || io->get_loglevel()==MSG_GCDEBUG)
			argsorted.display_vector("argsorted");

		// make sure that we don't end up with lesser feats than target dim
		index_t to_remove;
		if (m_policy==N_SMALLEST || m_policy==N_LARGEST)
			to_remove=m_num_remove;
		else
			to_remove=num_features*m_num_remove*0.01;

		index_t can_remove=num_features-m_target_dim;

		// if policy is to remove N feats corresponding to smallest/largest
		// measures, we just replace N with can_remove. if policy is to remove
		// N% feats, then we change the policy temporarily and remove a fixed
		// can_remove number of feats instead
		index_t orig_remove=m_num_remove;
		EFeatureRemovalPolicy orig_policy=m_policy;

		if (to_remove>can_remove)
		{
			m_num_remove=can_remove;
			SG_DEBUG("Can only remove %d features in this iteration!\n",
					can_remove);

			if (m_policy==PERCENTILE_SMALLEST)
				m_policy=N_SMALLEST;
			else if (m_policy==PERCENTILE_LARGEST)
				m_policy=N_LARGEST;
		}

		// remove appropriate number of features based on the measures and the
		// removal policy. this internally update the subset for selected
		// features as well
		features=remove_feats(features, argsorted);

		// restore original removal policy and numbers if necessary for the
		// sake of consistency
		if (to_remove>can_remove)
		{
			m_policy=orig_policy;
			m_num_remove=orig_remove;
		}

		// update the number of features
		num_features=get_num_features(features);
		SG_DEBUG("Current number of features %d!\n", num_features);
	}

	// sanity check
	ASSERT(m_subset->get_size()==m_target_dim);

	SG_DEBUG("Leaving!\n");
	return features;
}
Пример #8
0
 unsigned MeasureGroup_Impl::numMeasures(bool selectedMeasuresOnly) const {
   return measures(selectedMeasuresOnly).size();
 }
Пример #9
0
 boost::optional<FileReferenceType> MeasureGroup_Impl::outputFileType() const {
   std::pair<bool,OptionalFileReferenceType> intermediate = detail::outputFileType(measures(false));
   OS_ASSERT(intermediate.first);
   return intermediate.second;
 }
Пример #10
0
int main(int argc, char* argv[]){

	//float beta;
	//int row,col;

	string file_name = FILE_NAME;

	HostMatrix<float> X;
	HostMatrix<float> X_test; 
	HostMatrix<float> Y;
	HostMatrix<float> Y_test;

	HostMatrix<float> Input;
	HostMatrix<float> Target;

	std::map<string,int> Classes;
	std::map<int,string> ClassesLookup;

	readFile(file_name,Input,Target,Classes,ClassesLookup);

	int kfold = 1;
	int correct_instances = 0;
	int incorrect_instances = 0;
	int total_instances = 0;

	int **confusionMatrix;

	confusionMatrix = (int**) malloc(sizeof(int*)*Classes.size());

	for(int i = 0; i < (int)Classes.size(); i++){
		confusionMatrix[i] = (int*) malloc(sizeof(int)*Classes.size());
		memset(confusionMatrix[i],0,sizeof(int)*Classes.size());
	}


	float Pet_mean = 0;
	float Ped_mean = 0;

	unsigned int seed = (unsigned)time(0);

	/***************RUN INFORMATION*************/

	writeHeader(Input,Classes.size(),seed);

	/*******************************************/


	if(!InitCUDA()) {
		return 1;
	}

	culaStatus status;
	status = culaInitialize();


	std::cout << "Starting " << std::endl;


	float center_time = 0;
	float width_time = 0;
	float weight_time = 0;
	float scaling_time = 0;

	unsigned int time_total = 0;
	unsigned int testing_time = 0;
	unsigned int training_time = 0;

	clock_t initialTimeTotal = clock();

	do{	
		X = crossvalidationTrain(Input,KFOLDS,kfold);
		X_test = crossvalidationTest(Input,KFOLDS,kfold);
		Y = crossvalidationTrain(Target,KFOLDS,kfold);
		Y_test = crossvalidationTest(Target,KFOLDS,kfold);

		HostMatrix<float> Weights;
		HostMatrix<float> Centers;

		/*Train Network*/

		clock_t initialTime = clock();
		RadialBasisFunction RBF(NETWORK_SIZE,RNEIGHBOURS,SCALING_FACTOR,Classes.size());
		RBF.SetSeed(seed);
		RBF.Train(X,Y);
		training_time = (clock() - initialTime);

		center_time += RBF.times[0];
		width_time += RBF.times[1];
		weight_time += RBF.times[2];
		scaling_time += RBF.times[3];

		/*Test Network*/

		initialTime = clock();
		std::cout << "Testing" << std::endl;
		HostMatrix<float> out_test;


		out_test = RBF.Test(X_test);

		for(int i = 0; i< X_test.Rows();i++){

			float max = 0;
			float out_class = 0;
			for(int j = 0; j < (int) Classes.size(); j++){
				if(out_test(i,j) > max){
					out_class = (float)j;
					max = out_test(i,j);
				}
			}

			out_test(i,0) = out_class+1;

		}

		for (int i = 0; i < out_test.Rows(); i++)
		{

			out_test(i,0) = (float)round(out_test(i,0));     

			if(out_test(i,0) <= 0) out_test(i,0) = 1;

			if(out_test(i,0) > Classes.size()) out_test(i,0) = (float)Classes.size();

			std::cout << Y_test(i,0) << " " << out_test(i,0) << std::endl;
		}


		correct_instances += out_test.Rows() - error_calc(Y_test,out_test);
		incorrect_instances += error_calc(Y_test,out_test);
		total_instances += out_test.Rows();

		/*Add values to Confusion Matrix*/
		for(int i = 0; i < Y_test.Rows(); i++){
			confusionMatrix[((int)Y_test(i,0))-1][((int)out_test(i,0))-1] = confusionMatrix[((int)Y_test(i,0))-1][((int)out_test(i,0))-1] + 1;
		}

		testing_time = (clock() - initialTime);

		/*Increment fold number, for use in crossvalidation*/
		kfold++;
	}while(kfold <= KFOLDS);

	time_total  = (clock() - initialTimeTotal);

	/*****************MEASURES****************/

	measures(correct_instances,total_instances,incorrect_instances,confusionMatrix,Classes,ClassesLookup);

	writeFooter(center_time,width_time,weight_time,scaling_time,training_time,testing_time,time_total);

	culaShutdown();
	cudaThreadExit();

	return 0;
}