예제 #1
0
//--------------------------------------------------------------
void testApp::setup(){
	ofBackground(255,255,255);
    ofSetVerticalSync(true);
	ofSetFrameRate(60);
	
    // video setup
    camWidth 		= 320;	// try to grab at this size. 
	camHeight 		= 240;
	
    // ready a grabbery source
	vidGrabber.initGrabber(camWidth,camHeight);
    
    numVideoSources = 3;
    
    // ready a ipcam source
    string uri = "http://148.61.142.228/axis-cgi/mjpg/video.cgi?resolution="+ ofToString(camWidth) + "x" + ofToString(camHeight);
    vidIpCam.setURI(uri);
    vidIpCam.connect();

    // ready a video source
	vidPlayer.loadMovie("fingers.mov");
	vidPlayer.play();
    
    currentBufferPlayer = 0;
    currentVideoSource  = 0;
    
    
    for(int i = 0; i < 6; i++) {
        ofxSharedVideoBuffer videoBuffer(new ofxVideoBuffer(100));
        buffers.push_back(videoBuffer);
        ofxSharedVideoBufferPlayer videoPlayer(new ofxVideoBufferPlayer());
        videoPlayer->loadVideoBuffer(videoBuffer);
        bufferPlayers.push_back(videoPlayer);
        
        videoPlayer->start();
    }
    
    
    isRecording = false;
	
}
예제 #2
0
파일: daemon.cpp 프로젝트: Simage/openalpr
void streamRecognitionThread(void* arg)
{
  CaptureThreadData* tdata = (CaptureThreadData*) arg;
  
  LOG4CPLUS_INFO(logger, "country: " << tdata->country_code << " -- config file: " << tdata->config_file );
  LOG4CPLUS_INFO(logger, "Stream " << tdata->camera_id << ": " << tdata->stream_url);
  
  Alpr alpr(tdata->country_code, tdata->config_file);
  alpr.setTopN(tdata->top_n);
  
  
  int framenum = 0;
  
  LoggingVideoBuffer videoBuffer(logger);
  
  videoBuffer.connect(tdata->stream_url, 5);
  
  cv::Mat latestFrame;
  
  std::vector<uchar> buffer;
  
  LOG4CPLUS_INFO(logger, "Starting camera " << tdata->camera_id);
  
  while (daemon_active)
  {
    int response = videoBuffer.getLatestFrame(&latestFrame);
    
    if (response != -1)
    {
      
      timespec startTime;
      getTime(&startTime);
      cv::imencode(".bmp", latestFrame, buffer );
      std::vector<AlprResult> results = alpr.recognize(buffer);
      
      timespec endTime;
      getTime(&endTime);
      double totalProcessingTime = diffclock(startTime, endTime);
      
      if (tdata->clock_on)
      {
	LOG4CPLUS_INFO(logger, "Camera " << tdata->camera_id << " processed frame in: " << totalProcessingTime << " ms.");
      }
      
      if (results.size() > 0)
      {
	// Create a UUID for the image
	std::string uuid = newUUID();
	
	// Save the image to disk (using the UUID)
	if (tdata->output_images)
	{
	  std::stringstream ss;
	  ss << tdata->output_image_folder << "/" << uuid << ".jpg";
	  
	  cv::imwrite(ss.str(), latestFrame);
	}
	
	// Update the JSON content to include UUID and camera ID
  
	std::string json = alpr.toJson(results, totalProcessingTime);
	
	cJSON *root = cJSON_Parse(json.c_str());
	cJSON_AddStringToObject(root,	"uuid",		uuid.c_str());
	cJSON_AddNumberToObject(root,	"camera_id",	tdata->camera_id);
	cJSON_AddStringToObject(root, 	"site_id", 	tdata->site_id.c_str());
	cJSON_AddNumberToObject(root,	"img_width",	latestFrame.cols);
	cJSON_AddNumberToObject(root,	"img_height",	latestFrame.rows);

	char *out;
	out=cJSON_PrintUnformatted(root);
	cJSON_Delete(root);
	
	std::string response(out);
	
	free(out);
	
	// Push the results to the Beanstalk queue
	for (int j = 0; j < results.size(); j++)
	{
	  LOG4CPLUS_DEBUG(logger, "Writing plate " << results[j].bestPlate.characters << " (" <<  uuid << ") to queue.");
	}
	
	writeToQueue(response);
      }
    }
    
    usleep(10000);
  }
  
  
  videoBuffer.disconnect();
  
  LOG4CPLUS_INFO(logger, "Video processing ended");
  
  delete tdata;
}