コード例 #1
0
ファイル: stateidentifier.cpp プロジェクト: Blejzer/openalpr
// Attempts to recognize the plate.  Returns a confidence level.  Updates teh "stateCode" variable 
// with the value of the country/state
int StateIdentifier::recognize(Mat img, char* stateCode)
{

  timespec startTime;
  getTime(&startTime);
  
  cvtColor(img, img, CV_BGR2GRAY);
  
  resize(img, img, getSizeMaintainingAspect(img, config->stateIdImageWidthPx, config->stateIdimageHeightPx));
  
  Mat plateImg(img.size(), img.type());
  //plateImg = equalizeBrightness(img);
  img.copyTo(plateImg);
  
  Mat debugImg(plateImg.size(), plateImg.type());
  plateImg.copyTo(debugImg);
  vector<int> matchesArray(featureMatcher->numTrainingElements());
  
  
  RecognitionResult result = featureMatcher->recognize(plateImg, true, &debugImg, true, matchesArray );
  
  if (this->config->debugStateId)
  {
    
    
    displayImage(config, "State Identifier1", plateImg);
    displayImage(config, "State Identifier", debugImg);
    cout << result.haswinner << " : " << result.confidence << " : " << result.winner << endl;
  }
  
  
  if (config->debugTiming)
  {
    timespec endTime;
    getTime(&endTime);
    cout << "State Identification Time: " << diffclock(startTime, endTime) << "ms." << endl;
  }
  
  
  if (result.haswinner == false)
    return 0;
  
  strcpy(stateCode, result.winner.c_str());
  
  
  return result.confidence;
}
コード例 #2
0
ファイル: stateidentifier.cpp プロジェクト: ChinMin/openalpr
  // Attempts to recognize the plate.  Returns a confidence level.  Updates the region code and confidence
  // If region is found, returns true.
  bool StateIdentifier::recognize(PipelineData* pipeline_data)
  {
    timespec startTime;
    getTime(&startTime);

    Mat plateImg = Mat(pipeline_data->grayImg, pipeline_data->regionOfInterest);

    resize(plateImg, plateImg, getSizeMaintainingAspect(plateImg, config->stateIdImageWidthPx, config->stateIdimageHeightPx));


    Mat debugImg(plateImg.size(), plateImg.type());
    plateImg.copyTo(debugImg);
    vector<int> matchesArray(featureMatcher->numTrainingElements());

    RecognitionResult result = featureMatcher->recognize(plateImg, true, &debugImg, true, matchesArray );

    if (this->config->debugStateId)
    {
      displayImage(config, "State Identifier1", plateImg);
      displayImage(config, "State Identifier", debugImg);
      cout << result.haswinner << " : " << result.confidence << " : " << result.winner << endl;
    }

    if (config->debugTiming)
    {
      timespec endTime;
      getTime(&endTime);
      cout << "State Identification Time: " << diffclock(startTime, endTime) << "ms." << endl;
    }

    if (result.haswinner == false)
      return 0;

    pipeline_data->region_code = result.winner;
    pipeline_data->region_confidence = result.confidence;

    if (result.confidence >= 10)
      return true;

    return false;
  }
コード例 #3
0
bool FaceDetector::DetectFaces(const cv::Mat& cropped_img,
                               std::vector<cv::Rect>& faces_front,
                               std::vector<cv::Rect>& faces_profile) const{

    cv::Mat cascade_img;
    std::vector<cv::Rect>::iterator face_it;

    // create a copy of the image
    cropped_img.copyTo(cascade_img);

    // increase contrast of the image
    normalize(cascade_img, cascade_img, 0, 255, cv::NORM_MINMAX, CV_8UC1);



    // detect frontal faces
    classifier_front_.detectMultiScale(cascade_img,
                                            faces_front,
                                            classifier_front_scale_factor_,
                                            classifier_front_min_neighbours_,
                                            0|CV_HAAR_SCALE_IMAGE,
                                            classif_front_min_size_);

    // discard face if its not close to the top of the region, false positive
    face_it = faces_front.begin();
    for ( ; face_it != faces_front.end(); ) {
        // allowed area is the full width and three times the size of the detected face
        cv::Rect allowed_area (0, 0, cropped_img.cols, face_it->height * 3);

        // test if the rectangles intersect
        if ( !(allowed_area & *face_it).area()) {
            face_it = faces_front.erase(face_it);
        }else
            ++face_it;
    }

    // only search profile faces if the frontal face detection failed
    if (faces_front.empty())
    {
        classifier_profile_.detectMultiScale(cascade_img,
                                                  faces_profile,
                                                  classifier_profile_scale_factor_,
                                                  classifier_profile_min_neighbours_,
                                                  0|CV_HAAR_SCALE_IMAGE,
                                                  classif_profile_min_size_);

        // discard face if its not close to the top of the region, false positive
        face_it = faces_profile.begin();
        for ( ; face_it != faces_profile.end(); ) {
            // allowed area is the full width and three times the size of the detected face
            cv::Rect allowed_area (0, 0, cropped_img.cols, face_it->height * 3);

            // test if the rectangles intersect
            if ( !(allowed_area & *face_it).area()) {
                face_it = faces_profile.erase(face_it);
            }else
                ++face_it;
        }
    }


    // if debug mode is active and faces were found
    if (debug_mode_)
    {
        cv::Mat debugImg(cropped_img);

        for (uint j = 0; j < faces_front.size(); j++)
            cv::rectangle(debugImg, faces_front[j], cv::Scalar(0, 255, 0), 2, CV_AA);

        for (uint j = 0; j < faces_profile.size(); j++)
            cv::rectangle(debugImg, faces_profile[j], cv::Scalar(0, 0, 255), 2, CV_AA);


        cv::imwrite(debug_folder_ + ed::Entity::generateID().c_str() + "_face_detector.png", debugImg);
        cv::imshow("Face Detector Output", debugImg);
    }

    // return true if a face was found
    return (!faces_front.empty() || !faces_profile.empty());
}