Exemple #1
0
  AlprImpl::AlprImpl(const std::string country, const std::string configFile, const std::string runtimeDir)
  {
    
    timespec startTime;
    getTimeMonotonic(&startTime);
    
    config = new Config(country, configFile, runtimeDir);
    
    plateDetector = ALPR_NULL_PTR;
    stateDetector = ALPR_NULL_PTR;
    ocr = ALPR_NULL_PTR;
    prewarp = ALPR_NULL_PTR;
    
    // Config file or runtime dir not found.  Don't process any further.
    if (config->loaded == false)
    {
      return;
    }

    plateDetector = createDetector(config);
    ocr = new OCR(config);
    setNumThreads(0);

    setDetectRegion(DEFAULT_DETECT_REGION);
    this->topN = DEFAULT_TOPN;
    setDefaultRegion("");
    
    prewarp = new PreWarp(config);
    
    timespec endTime;
    getTimeMonotonic(&endTime);
    if (config->debugTiming)
      cout << "OpenALPR Initialization Time: " << diffclock(startTime, endTime) << "ms." << endl;
    
  }
Exemple #2
0
  ColorFilter::ColorFilter(Mat image, Mat characterMask, Config* config)
  {
    timespec startTime;
    getTimeMonotonic(&startTime);

    this->config = config;

    this->debug = config->debugColorFiler;

    this->grayscale = imageIsGrayscale(image);

    if (this->debug)
      cout << "ColorFilter: isGrayscale = " << grayscale << endl;

    this->hsv = Mat(image.size(), image.type());
    cvtColor( image, this->hsv, CV_BGR2HSV );
    preprocessImage();

    this->charMask = characterMask;

    this->colorMask = Mat(image.size(), CV_8U);

    findCharColors();

    if (config->debugTiming)
    {
      timespec endTime;
      getTimeMonotonic(&endTime);
      cout << "  -- ColorFilter Time: " << diffclock(startTime, endTime) << "ms." << endl;
    }
  }
Exemple #3
0
  vector<Rect> DetectorCPU::find_plates(Mat frame, cv::Size min_plate_size, cv::Size max_plate_size)
  {

    vector<Rect> plates;
   
    //-- Detect plates
    timespec startTime;
    getTimeMonotonic(&startTime);

    equalizeHist( frame, frame );
    
    plate_cascade.detectMultiScale( frame, plates, config->detection_iteration_increase, config->detectionStrictness,
                                      CV_HAAR_DO_CANNY_PRUNING,
                                      //0|CV_HAAR_SCALE_IMAGE,
                                      min_plate_size, max_plate_size );


    if (config->debugTiming)
    {
      timespec endTime;
      getTimeMonotonic(&endTime);
      cout << "LBP Time: " << diffclock(startTime, endTime) << "ms." << endl;
    }

    return plates;

  }
Exemple #4
0
Boolean
_setupClockDriver_unix(ClockDriver* self)
{

    if((self->type == SYSTEM_CLOCK_TYPE) && (_instanceCount > 0)) {
	WARNING(THIS_COMPONENT"Only one instance of the system clock driver is allowed\n");
	return FALSE;
    }

    INIT_INTERFACE(self);
    INIT_DATA_CLOCKDRIVER(self, linuxphc);
    INIT_CONFIG_CLOCKDRIVER(self, linuxphc);

    _instanceCount++;

    self->_instanceCount = &_instanceCount;

    self->systemClock = TRUE;

    resetIntPermanentAdev(&self->_adev);
    getTimeMonotonic(self, &self->_initTime);

    strncpy(self->name, SYSTEM_CLOCK_NAME, CLOCKDRIVER_NAME_MAX);

    INFO(THIS_COMPONENT"Started Unix clock driver %s\n", self->name);

    return TRUE;

}
Exemple #5
0
  void AlprImpl::setDetectRegion(bool detectRegion)
  {
    
    this->detectRegion = detectRegion;
    if (detectRegion && this->stateDetector == NULL)
    {
        timespec startTime;
        getTimeMonotonic(&startTime);
        
        this->stateDetector = new StateDetector(this->config->country, this->config->runtimeBaseDir);
        
        timespec endTime;
        getTimeMonotonic(&endTime);
        if (config->debugTiming)
          cout << "State Identification Initialization Time: " << diffclock(startTime, endTime) << "ms." << endl;
    }


  }
  // Attempts to recognize the plate.  Returns a confidence level.  Updates the region code and confidence
  // If region is found, returns true.
  bool StateIdentifier::recognize(PipelineData* pipeline_data)
  {
    timespec startTime;
    getTimeMonotonic(&startTime);

    Mat plateImg = Mat(pipeline_data->grayImg, pipeline_data->regionOfInterest);

    resize(plateImg, plateImg, getSizeMaintainingAspect(plateImg, config->stateIdImageWidthPx, config->stateIdimageHeightPx));


    Mat debugImg(plateImg.size(), plateImg.type());
    plateImg.copyTo(debugImg);
    vector<int> matchesArray(featureMatcher->numTrainingElements());

    RecognitionResult result = featureMatcher->recognize(plateImg, true, &debugImg, true, matchesArray );

    if (this->config->debugStateId)
    {
      displayImage(config, "State Identifier1", plateImg);
      displayImage(config, "State Identifier", debugImg);
      cout << result.haswinner << " : " << result.confidence << " : " << result.winner << endl;
    }

    if (config->debugTiming)
    {
      timespec endTime;
      getTimeMonotonic(&endTime);
      cout << "State Identification Time: " << diffclock(startTime, endTime) << "ms." << endl;
    }

    if (result.haswinner == false)
      return 0;

    pipeline_data->region_code = result.winner;
    pipeline_data->region_confidence = result.confidence;

    if (result.confidence >= 10)
      return true;

    return false;
  }
Exemple #7
0
  void OCR::performOCR(PipelineData* pipeline_data)
  {
    const int SPACE_CHAR_CODE = 32;

    timespec startTime;
    getTimeMonotonic(&startTime);

    postProcessor.clear();

    // Don't waste time on OCR processing if it is impossible to get sufficient characters
    int total_char_spaces = 0;
    for (unsigned int i = 0; i < pipeline_data->charRegions.size(); i++)
      total_char_spaces += pipeline_data->charRegions[i].size();
    if (total_char_spaces < config->postProcessMinCharacters)
    {
      pipeline_data->disqualify_reason = "Insufficient character boxes detected.  No OCR performed.";
      pipeline_data->disqualified = true;
      return;
    }

    for (unsigned int i = 0; i < pipeline_data->thresholds.size(); i++)
    {
      // Make it black text on white background
      bitwise_not(pipeline_data->thresholds[i], pipeline_data->thresholds[i]);
      tesseract.SetImage((uchar*) pipeline_data->thresholds[i].data,
                          pipeline_data->thresholds[i].size().width, pipeline_data->thresholds[i].size().height,
                          pipeline_data->thresholds[i].channels(), pipeline_data->thresholds[i].step1());

      int absolute_charpos = 0;
      for (unsigned int line_idx = 0; line_idx < pipeline_data->charRegions.size(); line_idx++)
      {
        for (unsigned int j = 0; j < pipeline_data->charRegions[line_idx].size(); j++)
        {
          Rect expandedRegion = expandRect( pipeline_data->charRegions[line_idx][j], 2, 2, pipeline_data->thresholds[i].cols, pipeline_data->thresholds[i].rows) ;

          tesseract.SetRectangle(expandedRegion.x, expandedRegion.y, expandedRegion.width, expandedRegion.height);
          tesseract.Recognize(NULL);

          tesseract::ResultIterator* ri = tesseract.GetIterator();
          tesseract::PageIteratorLevel level = tesseract::RIL_SYMBOL;
          do
          {
            const char* symbol = ri->GetUTF8Text(level);
            float conf = ri->Confidence(level);

            bool dontcare;
            int fontindex = 0;
            int pointsize = 0;
            const char* fontName = ri->WordFontAttributes(&dontcare, &dontcare, &dontcare, &dontcare, &dontcare, &dontcare, &pointsize, &fontindex);

            // Ignore NULL pointers, spaces, and characters that are way too small to be valid
            if(symbol != 0 && symbol[0] != SPACE_CHAR_CODE && pointsize >= config->ocrMinFontSize)
            {
              postProcessor.addLetter(string(symbol), line_idx, absolute_charpos, conf);

              if (this->config->debugOcr)
                printf("charpos%d line%d: threshold %d:  symbol %s, conf: %f font: %s (index %d) size %dpx\n", absolute_charpos, line_idx, i, symbol, conf, fontName, fontindex, pointsize);

              bool indent = false;
              tesseract::ChoiceIterator ci(*ri);
              do
              {
                const char* choice = ci.GetUTF8Text();
                //1/17/2016 adt adding check to avoid double adding same character if ci is same as symbol. Otherwise first choice will get doubled boost when choiceIterator run.
                if (string(symbol) != string(choice))
                  postProcessor.addLetter(string(choice), line_idx, absolute_charpos, ci.Confidence());

                if (this->config->debugOcr)
                {
                  if (indent) printf("\t\t ");
                  printf("\t- ");
                  printf("%s conf: %f\n", choice, ci.Confidence());
                }

                indent = true;
              }
              while(ci.Next());

            }

            if (this->config->debugOcr)
              printf("---------------------------------------------\n");

            delete[] symbol;
          }
          while((ri->Next(level)));

          delete ri;

          absolute_charpos++;
        }
      }
    }

    if (config->debugTiming)
    {
      timespec endTime;
      getTimeMonotonic(&endTime);
      cout << "OCR Time: " << diffclock(startTime, endTime) << "ms." << endl;
    }
  }
Exemple #8
0
  void PlateLines::processImage(Mat inputImage, vector<TextLine> textLines, float sensitivity)
  {
    if (this->debug)
      cout << "PlateLines findLines" << endl;

    timespec startTime;
    getTimeMonotonic(&startTime);


    // Ignore input images that are pure white or pure black
    Scalar avgPixelIntensity = mean(inputImage);
    if (avgPixelIntensity[0] >= 252)
      return;
    else if (avgPixelIntensity[0] <= 3)
      return;

    // Do a bilateral filter to clean the noise but keep edges sharp
    Mat smoothed(inputImage.size(), inputImage.type());
    adaptiveBilateralFilter(inputImage, smoothed, Size(3,3), 45, 45);


    int morph_elem  = 2;
    int morph_size = 2;
    Mat element = getStructuringElement( morph_elem, Size( 2*morph_size + 1, 2*morph_size+1 ), Point( morph_size, morph_size ) );


    Mat edges(inputImage.size(), inputImage.type());
    Canny(smoothed, edges, 66, 133);

    // Create a mask that is dilated based on the detected characters


    Mat mask = Mat::zeros(inputImage.size(), CV_8U);

    for (unsigned int i = 0; i < textLines.size(); i++)
    {
      vector<vector<Point> > polygons;
      polygons.push_back(textLines[i].textArea);
      fillPoly(mask, polygons, Scalar(255,255,255));
    }



    dilate(mask, mask, getStructuringElement( 1, Size( 1 + 1, 2*1+1 ), Point( 1, 1 ) ));
    bitwise_not(mask, mask);

    // AND canny edges with the character mask
    bitwise_and(edges, mask, edges);


    vector<PlateLine> hlines = this->getLines(edges, sensitivity, false);
    vector<PlateLine> vlines = this->getLines(edges, sensitivity, true);
    for (unsigned int i = 0; i < hlines.size(); i++)
      this->horizontalLines.push_back(hlines[i]);
    for (unsigned int i = 0; i < vlines.size(); i++)
      this->verticalLines.push_back(vlines[i]);

    // if debug is enabled, draw the image
    if (this->debug)
    {
      Mat debugImgHoriz(edges.size(), edges.type());
      Mat debugImgVert(edges.size(), edges.type());
      edges.copyTo(debugImgHoriz);
      edges.copyTo(debugImgVert);
      cvtColor(debugImgHoriz,debugImgHoriz,CV_GRAY2BGR);
      cvtColor(debugImgVert,debugImgVert,CV_GRAY2BGR);

      for( size_t i = 0; i < this->horizontalLines.size(); i++ )
      {
        line( debugImgHoriz, this->horizontalLines[i].line.p1, this->horizontalLines[i].line.p2, Scalar(0,0,255), 1, CV_AA);
      }

      for( size_t i = 0; i < this->verticalLines.size(); i++ )
      {
        line( debugImgVert, this->verticalLines[i].line.p1, this->verticalLines[i].line.p2, Scalar(0,0,255), 1, CV_AA);
      }

      vector<Mat> images;
      images.push_back(debugImgHoriz);
      images.push_back(debugImgVert);

      Mat dashboard = drawImageDashboard(images, debugImgVert.type(), 1);
      displayImage(pipelineData->config, "Hough Lines", dashboard);
    }

    if (pipelineData->config->debugTiming)
    {
      timespec endTime;
      getTimeMonotonic(&endTime);
      cout << "Plate Lines Time: " << diffclock(startTime, endTime) << "ms." << endl;
    }

  }
Exemple #9
0
  AlprFullDetails AlprImpl::recognizeFullDetails(cv::Mat img, std::vector<cv::Rect> regionsOfInterest)
  {
    timespec startTime;
    getTimeMonotonic(&startTime);


    AlprFullDetails response;

    response.results.epoch_time = getEpochTimeMs();
    response.results.img_width = img.cols;
    response.results.img_height = img.rows;

    // Fix regions of interest in case they extend beyond the bounds of the image
    for (unsigned int i = 0; i < regionsOfInterest.size(); i++)
      regionsOfInterest[i] = expandRect(regionsOfInterest[i], 0, 0, img.cols, img.rows);

    for (unsigned int i = 0; i < regionsOfInterest.size(); i++)
    {
      response.results.regionsOfInterest.push_back(AlprRegionOfInterest(regionsOfInterest[i].x, regionsOfInterest[i].y,
              regionsOfInterest[i].width, regionsOfInterest[i].height));
    }

    if (!img.data)
    {
      // Invalid image
      if (this->config->debugGeneral)
        std::cerr << "Invalid image" << std::endl;

      return response;
    }

    // Convert image to grayscale if required
    Mat grayImg = img;
    if (img.channels() > 2)
      cvtColor( img, grayImg, CV_BGR2GRAY );
    
    // Prewarp the image and ROIs if configured]
    std::vector<cv::Rect> warpedRegionsOfInterest = regionsOfInterest;
    // Warp the image if prewarp is provided
    grayImg = prewarp->warpImage(grayImg);
    warpedRegionsOfInterest = prewarp->projectRects(regionsOfInterest, grayImg.cols, grayImg.rows, false);
    
    vector<PlateRegion> warpedPlateRegions;
    // Find all the candidate regions
    if (config->skipDetection == false)
    {
      warpedPlateRegions = plateDetector->detect(grayImg, warpedRegionsOfInterest);
    }
    else
    {
      // They have elected to skip plate detection.  Instead, return a list of plate regions
      // based on their regions of interest
      for (unsigned int i = 0; i < warpedRegionsOfInterest.size(); i++)
      {
        PlateRegion pr;
        pr.rect = cv::Rect(warpedRegionsOfInterest[i]);
        warpedPlateRegions.push_back(pr);
      }
    }

    queue<PlateRegion> plateQueue;
    for (unsigned int i = 0; i < warpedPlateRegions.size(); i++)
      plateQueue.push(warpedPlateRegions[i]);

    int platecount = 0;
    while(!plateQueue.empty())
    {
      PlateRegion plateRegion = plateQueue.front();
      plateQueue.pop();

      PipelineData pipeline_data(img, grayImg, plateRegion.rect, config);
      pipeline_data.prewarp = prewarp;

      timespec platestarttime;
      getTimeMonotonic(&platestarttime);

      LicensePlateCandidate lp(&pipeline_data);

      lp.recognize();

      bool plateDetected = false;
      if (!pipeline_data.disqualified)
      {
        AlprPlateResult plateResult;
        plateResult.region = defaultRegion;
        plateResult.regionConfidence = 0;
        plateResult.plate_index = platecount++;

        // If using prewarp, remap the plate corners to the original image
        vector<Point2f> cornerPoints = pipeline_data.plate_corners;
        cornerPoints = prewarp->projectPoints(cornerPoints, true);
        
        for (int pointidx = 0; pointidx < 4; pointidx++)
        {
          plateResult.plate_points[pointidx].x = (int) cornerPoints[pointidx].x;
          plateResult.plate_points[pointidx].y = (int) cornerPoints[pointidx].y;
        }
        
        if (detectRegion)
        {
          std::vector<StateCandidate> state_candidates = stateDetector->detect(pipeline_data.color_deskewed.data,
                                                                               pipeline_data.color_deskewed.elemSize(),
                                                                               pipeline_data.color_deskewed.cols,
                                                                               pipeline_data.color_deskewed.rows);

          if (state_candidates.size() > 0)
          {
            plateResult.region = state_candidates[0].state_code;
            plateResult.regionConfidence = (int) state_candidates[0].confidence;
          }
        }

        if (plateResult.region.length() > 0 && ocr->postProcessor.regionIsValid(plateResult.region) == false)
        {
          std::cerr << "Invalid pattern provided: " << plateResult.region << std::endl;
          std::cerr << "Valid patterns are located in the " << config->country << ".patterns file" << std::endl;
        }

        ocr->performOCR(&pipeline_data);
        ocr->postProcessor.analyze(plateResult.region, topN);

        timespec resultsStartTime;
        getTimeMonotonic(&resultsStartTime);

        const vector<PPResult> ppResults = ocr->postProcessor.getResults();

        int bestPlateIndex = 0;

        cv::Mat charTransformMatrix = getCharacterTransformMatrix(&pipeline_data);
        for (unsigned int pp = 0; pp < ppResults.size(); pp++)
        {

          // Set our "best plate" match to either the first entry, or the first entry with a postprocessor template match
          if (bestPlateIndex == 0 && ppResults[pp].matchesTemplate)
            bestPlateIndex = plateResult.topNPlates.size();
            
          AlprPlate aplate;
          aplate.characters = ppResults[pp].letters;
          aplate.overall_confidence = ppResults[pp].totalscore;
          aplate.matches_template = ppResults[pp].matchesTemplate;
            
          // Grab detailed results for each character
          for (unsigned int c_idx = 0; c_idx < ppResults[pp].letter_details.size(); c_idx++)
          {
            AlprChar character_details;
            character_details.character = ppResults[pp].letter_details[c_idx].letter;
            character_details.confidence = ppResults[pp].letter_details[c_idx].totalscore;
            cv::Rect char_rect = pipeline_data.charRegions[ppResults[pp].letter_details[c_idx].charposition];
            std::vector<AlprCoordinate> charpoints = getCharacterPoints(char_rect, charTransformMatrix );
            for (int cpt = 0; cpt < 4; cpt++)
              character_details.corners[cpt] = charpoints[cpt];
            aplate.character_details.push_back(character_details);
          }
          plateResult.topNPlates.push_back(aplate);
        }

        if (plateResult.topNPlates.size() > bestPlateIndex)
        {
          AlprPlate bestPlate;
          bestPlate.characters = plateResult.topNPlates[bestPlateIndex].characters;
          bestPlate.matches_template = plateResult.topNPlates[bestPlateIndex].matches_template;
          bestPlate.overall_confidence = plateResult.topNPlates[bestPlateIndex].overall_confidence;
          bestPlate.character_details = plateResult.topNPlates[bestPlateIndex].character_details;
          
          plateResult.bestPlate = bestPlate;
        }

        timespec plateEndTime;
        getTimeMonotonic(&plateEndTime);
        plateResult.processing_time_ms = diffclock(platestarttime, plateEndTime);
        if (config->debugTiming)
        {
          cout << "Result Generation Time: " << diffclock(resultsStartTime, plateEndTime) << "ms." << endl;
        }

        if (plateResult.topNPlates.size() > 0)
        {
          plateDetected = true;
          response.results.plates.push_back(plateResult);
        }
      }

      if (!plateDetected)
      {
        // Not a valid plate
        // Check if this plate has any children, if so, send them back up for processing
        for (unsigned int childidx = 0; childidx < plateRegion.children.size(); childidx++)
        {
          plateQueue.push(plateRegion.children[childidx]);
        }
      }

    }

    // Unwarp plate regions if necessary
    prewarp->projectPlateRegions(warpedPlateRegions, grayImg.cols, grayImg.rows, true);
    response.plateRegions = warpedPlateRegions;
    
    timespec endTime;
    getTimeMonotonic(&endTime);
    response.results.total_processing_time_ms = diffclock(startTime, endTime);

    if (config->debugTiming)
    {
      cout << "Total Time to process image: " << diffclock(startTime, endTime) << "ms." << endl;
    }

    if (config->debugGeneral && config->debugShowImages)
    {
      for (unsigned int i = 0; i < regionsOfInterest.size(); i++)
      {
        rectangle(img, regionsOfInterest[i], Scalar(0,255,0), 2);
      }

      for (unsigned int i = 0; i < response.plateRegions.size(); i++)
      {
        rectangle(img, response.plateRegions[i].rect, Scalar(0, 0, 255), 2);
      }

      for (unsigned int i = 0; i < response.results.plates.size(); i++)
      {
        // Draw a box around the license plate 
        for (int z = 0; z < 4; z++)
        {
          AlprCoordinate* coords = response.results.plates[i].plate_points;
          Point p1(coords[z].x, coords[z].y);
          Point p2(coords[(z + 1) % 4].x, coords[(z + 1) % 4].y);
          line(img, p1, p2, Scalar(255,0,255), 2);
        }
        
        // Draw the individual character boxes
        for (int q = 0; q < response.results.plates[i].bestPlate.character_details.size(); q++)
        {
          AlprChar details = response.results.plates[i].bestPlate.character_details[q];
          line(img, Point(details.corners[0].x, details.corners[0].y), Point(details.corners[1].x, details.corners[1].y), Scalar(0,255,0), 1);
          line(img, Point(details.corners[1].x, details.corners[1].y), Point(details.corners[2].x, details.corners[2].y), Scalar(0,255,0), 1);
          line(img, Point(details.corners[2].x, details.corners[2].y), Point(details.corners[3].x, details.corners[3].y), Scalar(0,255,0), 1);
          line(img, Point(details.corners[3].x, details.corners[3].y), Point(details.corners[0].x, details.corners[0].y), Scalar(0,255,0), 1);
        }
      }


      displayImage(config, "Main Image", img);

      // Sleep 1ms
      sleep_ms(1);

    }


    if (config->debugPauseOnFrame)
    {
      // Pause indefinitely until they press a key
      while ((char) cv::waitKey(50) == -1)
      {}
    }

    return response;
  }
  void CharacterAnalysis::analyze()
  {
    timespec startTime;
    getTimeMonotonic(&startTime);

    pipeline_data->clearThresholds();
    pipeline_data->thresholds = produceThresholds(pipeline_data->crop_gray, config);

    timespec contoursStartTime;
    getTimeMonotonic(&contoursStartTime);

    pipeline_data->textLines.clear();

    for (unsigned int i = 0; i < pipeline_data->thresholds.size(); i++)
    {
      TextContours tc(pipeline_data->thresholds[i]);

      allTextContours.push_back(tc);
    }

    if (config->debugTiming)
    {
      timespec contoursEndTime;
      getTimeMonotonic(&contoursEndTime);
      cout << "  -- Character Analysis Find Contours Time: " << diffclock(contoursStartTime, contoursEndTime) << "ms." << endl;
    }
    //Mat img_equalized = equalizeBrightness(img_gray);

    timespec filterStartTime;
    getTimeMonotonic(&filterStartTime);

    for (unsigned int i = 0; i < pipeline_data->thresholds.size(); i++)
    {
      this->filter(pipeline_data->thresholds[i], allTextContours[i]);

      if (config->debugCharAnalysis)
        cout << "Threshold " << i << " had " << allTextContours[i].getGoodIndicesCount() << " good indices." << endl;
    }

    if (config->debugTiming)
    {
      timespec filterEndTime;
      getTimeMonotonic(&filterEndTime);
      cout << "  -- Character Analysis Filter Time: " << diffclock(filterStartTime, filterEndTime) << "ms." << endl;
    }

    PlateMask plateMask(pipeline_data);
    plateMask.findOuterBoxMask(allTextContours);

    pipeline_data->hasPlateBorder = plateMask.hasPlateMask;
    pipeline_data->plateBorderMask = plateMask.getMask();

    if (plateMask.hasPlateMask)
    {
      // Filter out bad contours now that we have an outer box mask...
      for (unsigned int i = 0; i < pipeline_data->thresholds.size(); i++)
      {
        filterByOuterMask(allTextContours[i]);
      }
    }

    int bestFitScore = -1;
    int bestFitIndex = -1;
    for (unsigned int i = 0; i < pipeline_data->thresholds.size(); i++)
    {

      int segmentCount = allTextContours[i].getGoodIndicesCount();

      if (segmentCount > bestFitScore)
      {
        bestFitScore = segmentCount;
        bestFitIndex = i;
        bestThreshold = pipeline_data->thresholds[i];
        bestContours = allTextContours[i];
      }
    }

    if (this->config->debugCharAnalysis)
      cout << "Best fit score: " << bestFitScore << " Index: " << bestFitIndex << endl;

    if (bestFitScore <= 1)
    {
      pipeline_data->disqualified = true;
      pipeline_data->disqualify_reason = "Low best fit score in characteranalysis";
      return;
    }

    //getColorMask(img, allContours, allHierarchy, charSegments);

    if (this->config->debugCharAnalysis)
    {
      Mat img_contours = bestContours.drawDebugImage(bestThreshold);

      displayImage(config, "Matching Contours", img_contours);
    }

    LineFinder lf(pipeline_data);
    vector<vector<Point> > linePolygons = lf.findLines(pipeline_data->crop_gray, bestContours);

    vector<TextLine> tempTextLines;
    for (unsigned int i = 0; i < linePolygons.size(); i++)
    {
      vector<Point> linePolygon = linePolygons[i];

      LineSegment topLine = LineSegment(linePolygon[0].x, linePolygon[0].y, linePolygon[1].x, linePolygon[1].y);
      LineSegment bottomLine = LineSegment(linePolygon[3].x, linePolygon[3].y, linePolygon[2].x, linePolygon[2].y);

      vector<Point> textArea = getCharArea(topLine, bottomLine);

      TextLine textLine(textArea, linePolygon, pipeline_data->crop_gray.size());

      tempTextLines.push_back(textLine);
    }

    filterBetweenLines(bestThreshold, bestContours, tempTextLines);

    // Sort the lines from top to bottom.
    std::sort(tempTextLines.begin(), tempTextLines.end(), sort_text_line);

    // Now that we've filtered a few more contours, re-do the text area.
    for (unsigned int i = 0; i < tempTextLines.size(); i++)
    {
      vector<Point> updatedTextArea = getCharArea(tempTextLines[i].topLine, tempTextLines[i].bottomLine);
      vector<Point> linePolygon = tempTextLines[i].linePolygon;
      if (updatedTextArea.size() > 0 && linePolygon.size() > 0)
      {
        pipeline_data->textLines.push_back(TextLine(updatedTextArea, linePolygon, pipeline_data->crop_gray.size()));
      }

    }

    pipeline_data->plate_inverted = isPlateInverted();


    if (pipeline_data->textLines.size() > 0)
    {
      int confidenceDrainers = 0;
      int charSegmentCount = this->bestContours.getGoodIndicesCount();
      if (charSegmentCount == 1)
        confidenceDrainers += 91;
      else if (charSegmentCount < 5)
        confidenceDrainers += (5 - charSegmentCount) * 10;

      // Use the angle for the first line -- assume they'll always be parallel for multi-line plates
      int absangle = abs(pipeline_data->textLines[0].topLine.angle);
      if (absangle > config->maxPlateAngleDegrees)
        confidenceDrainers += 91;
      else if (absangle > 1)
        confidenceDrainers += (config->maxPlateAngleDegrees - absangle) ;

      // If a multiline plate has only one line, disqualify
      if (pipeline_data->isMultiline && pipeline_data->textLines.size() < 2)
      {
        if (config->debugCharAnalysis)
          std::cout << "Did not detect multiple lines on multi-line plate" << std::endl;
        confidenceDrainers += 95;
      }

      if (confidenceDrainers >= 90)
      {
        pipeline_data->disqualified = true;
        pipeline_data->disqualify_reason = "Low confidence in characteranalysis";
      }
      else
      {
        float confidence = 100 - confidenceDrainers;
        pipeline_data->confidence_weights.setScore("CHARACTER_ANALYSIS_SCORE", confidence, 1.0);
      }
    }
    else
    {
        pipeline_data->disqualified = true;
        pipeline_data->disqualify_reason = "No text lines found in characteranalysis";
    }

    if (config->debugTiming)
    {
      timespec endTime;
      getTimeMonotonic(&endTime);
      cout << "Character Analysis Time: " << diffclock(startTime, endTime) << "ms." << endl;
    }

    // Draw debug dashboard
    if (this->pipeline_data->config->debugCharAnalysis && pipeline_data->textLines.size() > 0)
    {
      vector<Mat> tempDash;
      for (unsigned int z = 0; z < pipeline_data->thresholds.size(); z++)
      {
        Mat tmp(pipeline_data->thresholds[z].size(), pipeline_data->thresholds[z].type());
        pipeline_data->thresholds[z].copyTo(tmp);
        cvtColor(tmp, tmp, CV_GRAY2BGR);

        tempDash.push_back(tmp);
      }

      Mat bestVal(this->bestThreshold.size(), this->bestThreshold.type());
      this->bestThreshold.copyTo(bestVal);
      cvtColor(bestVal, bestVal, CV_GRAY2BGR);

      for (unsigned int z = 0; z < this->bestContours.size(); z++)
      {
        Scalar dcolor(255,0,0);
        if (this->bestContours.goodIndices[z])
          dcolor = Scalar(0,255,0);
        drawContours(bestVal, this->bestContours.contours, z, dcolor, 1);
      }
      tempDash.push_back(bestVal);
      displayImage(config, "Character Region Step 1 Thresholds", drawImageDashboard(tempDash, bestVal.type(), 3));
    }
  }
Exemple #11
0
  vector<PlateRegion> DetectorCUDA::doCascade(Mat frame, int offset_x, int offset_y)
  {


    if (frame.cols > config->maxDetectionInputWidth)
    {
      // The frame is too wide
      this->scale_factor = ((float) config->maxDetectionInputWidth) / ((float) frame.cols);

      if (config->debugDetector)
        std::cout << "Input detection image is too wide.  Resizing with scale: " << this->scale_factor << endl;
    }
    else if (frame.rows > config->maxDetectionInputHeight)
    {
      // The frame is too tall
      this->scale_factor = ((float) config->maxDetectionInputHeight) / ((float) frame.rows);

      if (config->debugDetector)
        std::cout << "Input detection image is too tall.  Resizing with scale: " << this->scale_factor << endl;
    }

    int w = frame.size().width;
    int h = frame.size().height;

    vector<Rect> plates;

    equalizeHist( frame, frame );
    resize(frame, frame, Size(w * this->scale_factor, h * this->scale_factor));

    //-- Detect plates
    timespec startTime;
    getTimeMonotonic(&startTime);

    float maxWidth = ((float) w) * (config->maxPlateWidthPercent / 100.0f) * this->scale_factor;
    float maxHeight = ((float) h) * (config->maxPlateHeightPercent / 100.0f) * this->scale_factor;
    Size minSize(config->minPlateSizeWidthPx * this->scale_factor, config->minPlateSizeHeightPx * this->scale_factor);

    gpu::GpuMat cudaFrame, plateregions_buffer;
    Mat plateregions_downloaded;

    cudaFrame.upload(frame);
    int numdetected = cuda_cascade.detectMultiScale(cudaFrame, plateregions_buffer, (double) config->detection_iteration_increase, config->detectionStrictness, minSize);
    plateregions_buffer.colRange(0, numdetected).download(plateregions_downloaded);

    for (int i = 0; i < numdetected; ++i)
    {
      plates.push_back(plateregions_downloaded.ptr<cv::Rect>()[i]);
    }



    if (config->debugTiming)
    {
      timespec endTime;
      getTimeMonotonic(&endTime);
      cout << "LBP Time: " << diffclock(startTime, endTime) << "ms." << endl;
    }

    for( unsigned int i = 0; i < plates.size(); i++ )
    {
      plates[i].x = (plates[i].x / scale_factor) + offset_x;
      plates[i].y = (plates[i].y / scale_factor) + offset_y;
      plates[i].width = plates[i].width / scale_factor;
      plates[i].height = plates[i].height / scale_factor;
    }

    vector<PlateRegion> orderedRegions = aggregateRegions(plates);

    return orderedRegions;

  }