static gboolean
set_look_at_smooth_func (BotViewHandler *vhandler)
{
    BotDefaultViewHandler *dvh = (BotDefaultViewHandler*) vhandler->user;

    gboolean end_timeout = TRUE;

    double elapsed_ms = (bot_timestamp_now() - dvh->viewpath_timer_start) * 1e-3;
    double time_param = elapsed_ms / dvh->viewpath_duration_ms;

    if (time_param >= 1) {
        time_param = 1;
        end_timeout = FALSE;
    }

    for (int i=0; i<3; i++) {
        dvh->eye[i]    = dvh->origin_eye[i]    + time_param * (dvh->goal_eye[i]    - dvh->origin_eye[i]);
        dvh->lookat[i] = dvh->origin_lookat[i] + time_param * (dvh->goal_lookat[i] - dvh->origin_lookat[i]);
        dvh->up[i]     = dvh->origin_up[i]     + time_param * (dvh->goal_up[i]     - dvh->origin_up[i]);
    }

    look_at_changed(dvh);
    bot_viewer_request_redraw(dvh->viewer);

    return end_timeout;
}
Exemple #2
0
  void operator()() {
    int publishPeriod = 1.0e6/mPublishFrequency;
    bot_core::utime_t msg;
    auto lcm = mLcmWrapper->get();
    int64_t currentTime, remainingTime;

    while (true) {
      msg.utime = bot_timestamp_now();
      lcm->publish(mOutputChannel, &msg);
      currentTime = bot_timestamp_now();
      remainingTime = publishPeriod - (currentTime - msg.utime);
      if (remainingTime > 0) {
        std::this_thread::sleep_for(std::chrono::microseconds(remainingTime));
      }
    }
  }
Exemple #3
0
/* apply the transition update to the belief state
 * <dg> is the graph (entire map)
 * <cg> is the current gate
 */
void state_transition_update (dijk_graph_t *dg, int radius, double state_sigma)
{
    if (!dg) return;

    g_state_sigma = state_sigma;

    int64_t timestamp = bot_timestamp_now ();

    /* reset all pdf to zero */
    for (GList *iter=g_queue_peek_head_link(dg->nodes);iter;iter=iter->next) {
        dijk_node_t *nd = (dijk_node_t*)iter->data;
        nd->pdf0 = .0;
    }

    /* loop through all nodes */
    for (GList *iter=g_queue_peek_head_link(dg->nodes);iter;iter=iter->next) {
        dijk_node_t *nd = (dijk_node_t*)iter->data;

        // convert to dual tree
        GNode *tr = dijk_to_tree (nd, radius);

        // convolve with gaussian kernel
        double val = .0;
        g_node_traverse (tr, G_PRE_ORDER, G_TRAVERSE_ALL, -1, state_transition_convolve_cb, &val);
        nd->pdf0 = val;
        
        g_node_destroy (tr);
    }

    // normalize
    double t = .0;
    for (GList *iter=g_queue_peek_head_link(dg->nodes);iter;iter=iter->next) {
        dijk_node_t *nd = (dijk_node_t*)iter->data;
        t += nd->pdf0;
    }

    if (t > 1E-6) {
        for (GList *iter=g_queue_peek_head_link(dg->nodes);iter;iter=iter->next) {
            dijk_node_t *nd = (dijk_node_t*)iter->data;
            nd->pdf0 /= t;
        }
    }
}
static void set_look_at_smooth (BotViewHandler *vhandler, const double eye[3], const double lookat[3], const double up[3], double duration_ms)
{
    BotDefaultViewHandler *dvh = (BotDefaultViewHandler*) vhandler->user;
    dvh->viewpath_duration_ms = duration_ms;

    dvh->viewpath_timer_start = bot_timestamp_now();

    // get the origin for the view path, and set the goal
    vhandler->get_eye_look(vhandler, dvh->origin_eye, dvh->origin_lookat, dvh->origin_up);
    memcpy(dvh->goal_eye, eye, 3 * sizeof(double));
    memcpy(dvh->goal_lookat, lookat, 3 * sizeof(double));
    double up_norm = sqrt(up[0]*up[0] + up[1]*up[1] + up[2]*up[2]);
    dvh->goal_up[0] = up[0] / up_norm;
    dvh->goal_up[1] = up[1] / up_norm;
    dvh->goal_up[2] = up[2] / up_norm;
//    memcpy(dvh->goal_up, up, 3 * sizeof(double));

    // periodically update the viewpoint until the goal viewpoint is reached
    g_timeout_add(30, (GSourceFunc)set_look_at_smooth_func, vhandler);
}
static void on_param_widget_changed(BotGtkParamWidget *pw, const char *name, void *user)
{
  RendererFrames *self = (RendererFrames*) user;
  if (self->updating) {
    return;
  }
  BotViewer *viewer = self->viewer;
  int activeSensorNum = bot_gtk_param_widget_get_enum(pw, PARAM_FRAME_SELECT);
  if (!strcmp(name, PARAM_FRAME_SELECT)) {
    if (activeSensorNum > 0) {
      self->updating = 1;
      bot_viewer_set_status_bar_message(self->viewer, "Modify Calibration relative to %s", bot_frames_get_relative_to(
          self->frames, self->frameNames[activeSensorNum]));
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_X, 1);
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_Y, 1);
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_Z, 1);
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_ROLL, 1);
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_PITCH, 1);
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_YAW, 1);
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_SAVE, 1);
      frames_update_handler(self->frames, self->frameNames[activeSensorNum], bot_frames_get_relative_to(self->frames,
          self->frameNames[activeSensorNum]), bot_timestamp_now(), self);
    }
    else {
      bot_viewer_set_status_bar_message(self->viewer, "");
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_X, 0);
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_Y, 0);
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_Z, 0);
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_ROLL, 0);
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_PITCH, 0);
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_YAW, 0);
      bot_gtk_param_widget_set_enabled(self->pw, PARAM_SAVE, 0);
    }
  }
  else if (!strcmp(name, PARAM_SAVE) && activeSensorNum > 0) {
    char save_fname[1024];
    sprintf(save_fname, "manual_calib_%s.cfg", self->frameNames[activeSensorNum]);
    fprintf(stderr, "saving params to: %s\n", save_fname);
    FILE * f = fopen(save_fname, "w");
    double pos[3];
    pos[0] = bot_gtk_param_widget_get_double(self->pw, PARAM_X);
    pos[1] = bot_gtk_param_widget_get_double(self->pw, PARAM_Y);
    pos[2] = bot_gtk_param_widget_get_double(self->pw, PARAM_Z);
    double rpy[3];
    rpy[0] = bot_gtk_param_widget_get_double(self->pw, PARAM_ROLL);
    rpy[1] = bot_gtk_param_widget_get_double(self->pw, PARAM_PITCH);
    rpy[2] = bot_gtk_param_widget_get_double(self->pw, PARAM_YAW);

    double rpy_rad[3];
    for (int i = 0; i < 3; i++)
      rpy_rad[i] = bot_to_radians(rpy[i]);
    double quat[4];
    bot_roll_pitch_yaw_to_quat(rpy_rad, quat);
    double rod[3];
    bot_quat_to_rodrigues(quat, rod);

    fprintf(f, ""
      "%s {\n"
      "position = [%f, %f, %f];\n"
      "rpy = [%f, %f, %f];\n"
      "relative_to = \"%s\";\n"
      "}", self->frameNames[activeSensorNum], pos[0], pos[1], pos[2], rpy[0], rpy[1], rpy[2],
        bot_frames_get_relative_to(self->frames, self->frameNames[activeSensorNum]));
    fprintf(f, ""
      "\n"
      "%s {\n"
      "position = [%f, %f, %f];\n"
      "rodrigues = [%f, %f, %f];\n"
      "relative_to = \"%s\";\n"
      "}", self->frameNames[activeSensorNum], pos[0], pos[1], pos[2], rod[0], rod[1], rod[2],
        bot_frames_get_relative_to(self->frames, self->frameNames[activeSensorNum]));

    fclose(f);
    bot_viewer_set_status_bar_message(self->viewer, "Calibration saved to %s", save_fname);

  }
  else if (activeSensorNum > 0) {
    self->updating = 1;
    BotTrans curr;
    curr.trans_vec[0] = bot_gtk_param_widget_get_double(self->pw, PARAM_X);
    curr.trans_vec[1] = bot_gtk_param_widget_get_double(self->pw, PARAM_Y);
    curr.trans_vec[2] = bot_gtk_param_widget_get_double(self->pw, PARAM_Z);

    double rpy[3];
    rpy[0] = bot_to_radians(bot_gtk_param_widget_get_double(self->pw, PARAM_ROLL));
    rpy[1] = bot_to_radians(bot_gtk_param_widget_get_double(self->pw, PARAM_PITCH));
    rpy[2] = bot_to_radians(bot_gtk_param_widget_get_double(self->pw, PARAM_YAW));
    bot_roll_pitch_yaw_to_quat(rpy, curr.rot_quat);
    if (fabs(rpy[0]) > M_PI || fabs(rpy[1]) > M_PI || fabs(rpy[2]) > M_PI) {
      bot_gtk_param_widget_set_double(self->pw, PARAM_ROLL, bot_to_degrees(bot_mod2pi(rpy[0])));
      bot_gtk_param_widget_set_double(self->pw, PARAM_PITCH, bot_to_degrees(bot_mod2pi(rpy[1])));
      bot_gtk_param_widget_set_double(self->pw, PARAM_YAW, bot_to_degrees(bot_mod2pi(rpy[2])));
    }

    //and update the link
    const char * frame_name = self->frameNames[activeSensorNum];
    const char * relative_to = bot_frames_get_relative_to(self->frames, frame_name);
    bot_frames_update_frame(self->frames, frame_name, relative_to, &curr, bot_timestamp_now());
  }

  bot_viewer_request_redraw(self->viewer);
}
Exemple #6
0
void FilterPlanes::filterPlanes(vector<BasicPlane> &plane_stack){
  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_filtered (new pcl::PointCloud<pcl::PointXYZRGB>), cloud_p (new pcl::PointCloud<pcl::PointXYZRGB>);
  
  if (verbose_text>0){
    cout << "PointCloud before filtering: " << cloud->width * cloud->height << " data points." << std::endl;
  }
  
  #if DO_TIMING_PROFILE
    vector<int64_t> tic_toc;
    tic_toc.push_back(bot_timestamp_now());
  #endif

/*  Ptcoll_cfg ptcoll_cfg;
  ptcoll_cfg.point_lists_id =pose_element_id; //bot_timestamp_now();
  ptcoll_cfg.collection = pose_coll_id;
  ptcoll_cfg.element_id = pose_element_id; */
  
  //1. Downsample the dataset using a leaf size of 1cm
  // this is 99% of the cpu time
  pcl::VoxelGrid<pcl::PointXYZRGB> sor;
  sor.setInputCloud (cloud);
  // for table dataset:  sor.setLeafSize (0.01, 0.01, 0.01);
  sor.setLeafSize (0.05, 0.05, 0.05);
//  sor.setLeafSize (0.1, 0.1, 0.1);
  sor.filter (*cloud_filtered);
  if (verbose_text>0){
    cout << "PointCloud after filtering: " << cloud_filtered->width * cloud_filtered->height << " data points." << std::endl;
  }
  
  #if DO_TIMING_PROFILE
  tic_toc.push_back(bot_timestamp_now());
  #endif
  
  if (verbose_lcm>2){
/*  ptcoll_cfg.id = 200;
  ptcoll_cfg.reset=true;
  ptcoll_cfg.name ="cloud_downsampled";
  ptcoll_cfg.npoints =	 cloud_filtered->points.size();
  float colorm_temp0[] ={-1.0,-1.0,-1.0,-1.0};
  ptcoll_cfg.rgba.assign(colorm_temp0,colorm_temp0+4*sizeof(float));  
  ptcoll_cfg.type =1;
  pcdXYZRGB_to_lcm(publish_lcm,ptcoll_cfg, *cloud_filtered);*/
  }
  
  // 2. Set up the Ransac Plane Fitting Object:
  pcl::ModelCoefficients::Ptr coefficients (new pcl::ModelCoefficients ());
  pcl::PointIndices::Ptr inliers (new pcl::PointIndices ());
  pcl::SACSegmentation<pcl::PointXYZRGB> seg;
  // Optional
  seg.setOptimizeCoefficients (true);
  // Mandatory
  seg.setModelType (pcl::SACMODEL_PLANE);
  seg.setMethodType (pcl::SAC_RANSAC);
  seg.setMaxIterations (100); // was 4000
  
  seg.setDistanceThreshold (distance_threshold_); // 0.01 for table data set
  // Create the filtering object
  pcl::ExtractIndices<pcl::PointXYZRGB> extract;
  
  #if DO_TIMING_PROFILE
  tic_toc.push_back(bot_timestamp_now());
  #endif
  
  int n_major = 0, nr_points = cloud_filtered->points.size ();
  //vector<BasicPlaneX> plane_stack;
  BasicPlane one_plane;
  // Extract the primary planes:
  // major planes are the coarse planes extracted via ransac. they might contain disconnected points
  // these are broken up into minor planes which are portions of major planes
  
  if(verbose_lcm > 2){
    for (int i=0;i<7;i++){
      char n_major_char[10];
      sprintf(n_major_char,"%d",i);
 /*     ptcoll_cfg.id =210+ i+3;
      ptcoll_cfg.reset=true;
      ptcoll_cfg.name =(char*) "cloud_p ";
      ptcoll_cfg.name.append(n_major_char);
      ptcoll_cfg.npoints = 0;
      float colorm_temp0[] ={-1.0,-1.0,-1.0,-1.0};
      ptcoll_cfg.rgba.assign(colorm_temp0,colorm_temp0+4*sizeof(float));  
      ptcoll_cfg.type=1;
      pcdXYZRGB_to_lcm(publish_lcm,ptcoll_cfg, *cloud_filtered);   
*/
    }
    
    for (int i=0;i<7;i++){
/*
      Ptcoll_cfg ptcoll_cfg2;
      // the i below accounts for super planes in the same utime
      ptcoll_cfg2.point_lists_id =pose_element_id; //bot_timestamp_now();
      ptcoll_cfg2.collection = pose_coll_id;
      ptcoll_cfg2.element_id = pose_element_id;    
      if (i==0){
        ptcoll_cfg2.reset=true;
      }else{
        ptcoll_cfg2.reset=false;
      }
      ptcoll_cfg2.id =500;
      ptcoll_cfg2.name.assign("Grown Stack Final");  
      ptcoll_cfg2.npoints = 0;
      ptcoll_cfg2.type =1;      
      pcdXYZRGB_to_lcm(publish_lcm,ptcoll_cfg2, *cloud_filtered);  
*/
    }       

    // TODO: this will rest this object. need to add publish of reset
    // at start of this block and set rese ttofal
    for (int i=0;i<7;i++){
/*
      Ptcoll_cfg ptcoll_cfg2;
      // the i below accounts for super planes in the same utime
      ptcoll_cfg2.point_lists_id =pose_element_id; //bot_timestamp_now();
      ptcoll_cfg2.collection = pose_coll_id;
      ptcoll_cfg2.element_id = pose_element_id;
      if (i==0){
        ptcoll_cfg2.reset=true;
      }else{
        ptcoll_cfg2.reset=false;
      }
      ptcoll_cfg2.id =501;
      ptcoll_cfg2.name.assign("Grown Stack Final Hull");  
      ptcoll_cfg2.npoints = 0;
      ptcoll_cfg2.type =3;
      pcdXYZRGB_to_lcm(publish_lcm,ptcoll_cfg2, *cloud_filtered);  
*/
    }       
  }

  #if DO_TIMING_PROFILE
  tic_toc.push_back(bot_timestamp_now());
  #endif
  
  while (cloud_filtered->points.size () > stop_proportion_ * nr_points) { 
    // While XX% of the original cloud is still there
    char n_major_char[10];
    sprintf(n_major_char,"%d",n_major);
    
    if (n_major >6) {
      if (verbose_text >0){
	std::cout << n_major << " is the max number of planes to find, quitting" << std::endl;
      }
      break; 
    }

    //a Segment the largest planar component from the remaining cloud
    seg.setInputCloud (cloud_filtered);
    seg.segment (*inliers, *coefficients);
    if (inliers->indices.size () == 0)
    {
      std::cout << "Could not estimate a planar model for the given dataset." << std::endl;
      break;
    }
    
 
    if (inliers->indices.size () <  stop_cloud_size_) // stop when the plane is only a few points
    {
      //std::cerr << "No remaining planes in this set" << std::endl;
      break;
    }    
    
    //b Extract the inliers
    extract.setInputCloud (cloud_filtered);
    extract.setIndices (inliers);
    extract.setNegative (false);
    extract.filter (*cloud_p);
    if (verbose_text>0){
      std::cout << "PointCloud representing the planar component: " << cloud_p->width * cloud_p->height << " data points." << std::endl;
    }
    
    // Create the filtering object
    pcl::StatisticalOutlierRemoval<pcl::PointXYZRGB> sor;
    sor.setInputCloud (cloud_p);
    sor.setMeanK (30);
    sor.setStddevMulThresh (0.5); //was 1.0
    sor.filter (*cloud_p);

    if(verbose_lcm > 2){
/*
      ptcoll_cfg.id =210+ n_major+3;
      ptcoll_cfg.reset=true;
      ptcoll_cfg.name ="cloud_p ";
      ptcoll_cfg.name.append(n_major_char);
      ptcoll_cfg.npoints = cloud_p->points.size();
      float colorm_temp0[] ={-1.0,-1.0,-1.0,-1.0};
      ptcoll_cfg.rgba.assign(colorm_temp0,colorm_temp0+4*sizeof(float));  
      ptcoll_cfg.type=1;
      pcdXYZRGB_to_lcm(publish_lcm,ptcoll_cfg, *cloud_p); 
*/   
    }

    vector<BasicPlane> grown_stack;
    vector<BasicPlane> * grown_stack_ptr;
    grown_stack_ptr = &grown_stack;
    
    GrowCloud grow;
    grow.setInputCloud(cloud_p);
    grow.setMinCloudSize(stop_cloud_size_); // useing stop cloud size here too
    grow.setLCM(publish_lcm);
    grow.doGrowCloud(*grown_stack_ptr);
    
    if (verbose_text>0){
      cout << "grow_cloud new found " << grown_stack.size() << " seperate clouds\n";
    }
	
    // Spit raw clouds out to LCM:
    if(verbose_lcm > 2){
      for (int i=0;i<grown_stack.size();i++){
/*
	Ptcoll_cfg ptcoll_cfg2;
	ptcoll_cfg2.reset=false;
	// the i below accounts for super planes in the same utime
	ptcoll_cfg2.point_lists_id =10*n_major + i; //filterplanes->pose_element_id;
	ptcoll_cfg2.collection = pose_coll_id;
	ptcoll_cfg2.element_id = pose_element_id;    

	ptcoll_cfg2.id =500;
	ptcoll_cfg2.name.assign("Grown Stack Final");  
	ptcoll_cfg2.npoints = grown_stack[i].cloud.points.size ();
	ptcoll_cfg2.type =1;
	
	int id =  plane_stack.size() + i; //   10*n_major + i;
	float colorm_temp0[] ={colors[3*(id%num_colors)],colors[3*(id%num_colors)+1],colors[3*(id%num_colors)+2] ,0.0};
	ptcoll_cfg2.rgba.assign(colorm_temp0,colorm_temp0+4*sizeof(float));  
	pcdXYZRGB_to_lcm(publish_lcm,ptcoll_cfg2, grown_stack[i].cloud);  
*/
      }  
    }
    
    BasicPlane one_plane;
    int n_minor=0;
    BOOST_FOREACH( BasicPlane one_plane, grown_stack ){
      pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_projected (new pcl::PointCloud<pcl::PointXYZRGB>);
      pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_hull (new pcl::PointCloud<pcl::PointXYZRGB>);
      
      // c Project the model inliers (seems to be necessary to fitting convex hull
      pcl::ProjectInliers<pcl::PointXYZRGB> proj;
      proj.setModelType (pcl::SACMODEL_PLANE);
      pcl::PointCloud<pcl::PointXYZRGB>::Ptr temp (new pcl::PointCloud<pcl::PointXYZRGB> ());
      *temp = (one_plane.cloud);
      proj.setInputCloud (temp);
      proj.setModelCoefficients (coefficients);
      proj.filter (*cloud_projected);
      
      std::vector <pcl::Vertices> vertices;
      if (1==1){ // convex:
        pcl::ConvexHull<pcl::PointXYZRGB> chull;
        chull.setInputCloud (cloud_projected);
        chull.setDimension(2);
        chull.reconstruct (*cloud_hull,vertices);
      }else { // concave
        pcl::ConcaveHull<pcl::PointXYZRGB> chull;
        chull.setInputCloud (cloud_projected);
        chull.setKeepInformation(true);
        chull.setAlpha(0.5);  
        // for arch way:
        // 1.1 too few
        // 0.7 a little to few but much better
        chull.reconstruct (*cloud_hull,vertices);
      }
      
      //std::cout << "Hull has: " << cloud_hull->points.size () << " vertices." << std::endl;
      if (cloud_hull->points.size () ==0){
        cout <<"ERROR: CONVEX HULL HAS NO POINTS! - NEED TO RESOLVE THIS\n"; 
      }
      
      
      // d.2 Find the mean colour of the hull:
      int rgba[]={0,0,0,0};
      for(int i=0;i<temp->points.size();i++){
	int rgba_one = *reinterpret_cast<int*>(&temp->points[i].rgba);
	rgba[3] += ((rgba_one >> 24) & 0xff);
	rgba[2] += ((rgba_one >> 16) & 0xff);
	rgba[1] += ((rgba_one >> 8) & 0xff);
	rgba[0] += (rgba_one & 0xff);      
      }
      double scale = ((double) temp->points.size());
      rgba[3] =(int)  round(((double) rgba[3]) / scale);
      rgba[2] =(int)  round(((double) rgba[2]) / scale);
      rgba[1] =(int)  round(((double) rgba[1]) / scale);
      rgba[0] =(int)  round(((double) rgba[0]) / scale);
      
      // Write the plane to file for experimentation:
      //stringstream oss;
      //oss << ptcoll_cfg.element_id <<"_" << ptcoll_cfg.name << ".pcd";
      //writer.write (oss.str(), *this_hull, false);
      for(int i=0;i<cloud_hull->points.size();i++){
	unsigned char* rgba_ptr = (unsigned char*)&cloud_hull->points[i].rgba;
	(*rgba_ptr) =  rgba[0]  ;
	(*(rgba_ptr+1)) = rgba[1];
	(*(rgba_ptr+2)) = rgba[2];
	(*(rgba_ptr+3)) = rgba[3];	
	
      }
      
      (one_plane.coeffs) = *coefficients;
      one_plane.cloud = *cloud_hull;
      one_plane.utime = pose_element_id;
      one_plane.major = n_major;
      one_plane.minor = n_minor;
      one_plane.n_source_points = cloud_projected->points.size();
      
      
      plane_stack.push_back(one_plane);      
      n_minor++;
  
    //  int stop;
    //  cout << "int to cont: ";
    //  cin >> stop;      
    }
    
    
    // Create the filtering object
    extract.setNegative (true);
    extract.filter (*cloud_filtered);
    n_major++;
  }
Exemple #7
0
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
{
    IDeckLinkVideoFrame*				rightEyeFrame = NULL;
    IDeckLinkVideoFrame3DExtensions*	threeDExtensions = NULL;
    void*								frameBytes;
    void*								audioFrameBytes;

    // Handle Video Frame
    if (videoFrame)
    {
        // If 3D mode is enabled we retreive the 3D extensions interface which gives.
        // us access to the right eye frame by calling GetFrameForRightEye() .
        if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
             (threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
        {
            rightEyeFrame = NULL;
        }

        if (threeDExtensions)
            threeDExtensions->Release();

        if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
        {
            printf("Frame received (#%lu) - No input signal detected\n", g_frameCount);
        }
        else
        {
            const char *timecodeString = NULL;
            if (g_config.m_timecodeFormat != 0)
            {
                IDeckLinkTimecode *timecode;
                if (videoFrame->GetTimecode(g_config.m_timecodeFormat, &timecode) == S_OK)
                {
                    timecode->GetString(&timecodeString);
                }
            }

            int64_t timestampNow = bot_timestamp_now();

            if (g_config.m_lcmChannelName)
            {
                IDeckLinkMutableVideoFrame* outputFrame;
                g_deckLinkOutput->CreateVideoFrame(videoFrame->GetWidth(), videoFrame->GetHeight(), videoFrame->GetWidth()*4, bmdFormat8BitBGRA, bmdFrameFlagDefault, &outputFrame);
                HRESULT convertResult = g_conversionInst->ConvertFrame(videoFrame, outputFrame);

                frameConsumer.Queue.enqueue(FrameData(outputFrame, timestampNow));
            }

            static int64_t baseTime = timestampNow;
            static uint64_t frameCount = g_frameCount;
            double elapsedTime = (timestampNow - baseTime) * 1e-6;
            if (elapsedTime > 1.0)
            {
                printf("capturing at %.2f fps.\n", (g_frameCount - frameCount)/elapsedTime);
                baseTime = timestampNow;
                frameCount = g_frameCount;
            }

            if (timecodeString)
                free((void*)timecodeString);

            if (g_videoOutputFile != -1)
            {
                videoFrame->GetBytes(&frameBytes);
                write(g_videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());

                if (rightEyeFrame)
                {
                    rightEyeFrame->GetBytes(&frameBytes);
                    write(g_videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
                }
            }
        }

        if (rightEyeFrame)
            rightEyeFrame->Release();

        g_frameCount++;
    }

    // Handle Audio Frame
    if (audioFrame)
    {
        if (g_audioOutputFile != -1)
        {
            audioFrame->GetBytes(&audioFrameBytes);
            write(g_audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_config.m_audioChannels * (g_config.m_audioSampleDepth / 8));
        }
    }

    if (g_config.m_maxFrames > 0 && videoFrame && g_frameCount >= g_config.m_maxFrames)
    {
        g_do_exit = true;
        pthread_cond_signal(&g_sleepCond);
    }

    return S_OK;
}