bool VideoWriterUnit2::OpenStreams(StreamSet* set) {
		// Find video stream idx.
		video_stream_idx_ = FindStreamIdx(options_.video_stream_name, set);

		if (video_stream_idx_ < 0) {
			LOG(ERROR) << "Could not find Video stream!\n";
			return false;
		}

		const VideoStream& vid_stream = set->at(video_stream_idx_)->As<VideoStream>();

		const int frame_width = vid_stream.frame_width();
		const int frame_height = vid_stream.frame_height();
		const float fps = vid_stream.fps();

		writer_.open(video_file_, CV_FOURCC('F', 'M', 'P', '4'), fps, cv::Size(frame_width, frame_height));

		if(!writer_.isOpened())
			LOG(ERROR) << "Could not open video writer!\n";

		// Add stream.
		//DataStream* landmarks_stream = new DataStream(options_.stream_name);
		//set->push_back(shared_ptr<DataStream>(landmarks_stream));

		return true;
	}
bool VideoDisplayUnit::OpenStreams(StreamSet* set) {
  // Find video stream idx.
  video_stream_idx_ = FindStreamIdx(options_.stream_name, set);

  if (video_stream_idx_ < 0) {
    LOG(ERROR) << "Could not find Video stream!\n";
    return false;
  }

  const VideoStream& vid_stream = set->at(video_stream_idx_)->As<VideoStream>();

  const int frame_width = vid_stream.frame_width();
  const int frame_height = vid_stream.frame_height();

  if (options_.output_scale != 1.0f) {
    frame_buffer_.reset(new cv::Mat(frame_height * options_.output_scale,
                                    frame_width * options_.output_scale,
                                    CV_8UC3));
  }

  // Open display window.
  std::ostringstream os;
  os << "VideoDisplayUnit_" << display_unit_id_;
  window_name_ = os.str();

  cv::namedWindow(window_name_);
  cv::waitKey(10);
  return true;
}
예제 #3
0
  bool VideoUnit::OpenStreamsImpl(StreamSet* set, const VideoUnit* sender) {
    const int prev_stream_sz = set->size();
    if (!OpenStreamsFromSender(set, sender)) {
      return false;
    }

    // Remember stream size to check consistency.
    stream_sz_ = set->size();

    // Check for duplicate names.
    for (int i = prev_stream_sz; i < stream_sz_; ++i) {
      const string curr_stream_name = (*set)[i]->stream_name();
      if (FindStreamIdx(curr_stream_name, set) < i) {
        LOG(ERROR) << "Duplicate stream found: " << curr_stream_name;
        return false;
      }
    }

    // Search for any feedback streams and save their index.
    for(list<FeedbackInfo>::iterator f = feedback_info_.begin();
        f != feedback_info_.end();
        ++f) {
      f->stream_idx = FindStreamIdx(f->stream_name, set);
      if (f->stream_idx < 0) {
        LOG(ERROR) << "Could not find stream " << f->stream_name  << " for feedback.\n";
        return false;
      } else {
        // Notify target that a Feedback stream will be passed to it.
        f->target_idx = f->target->AddFeedbackStream(set->at(f->stream_idx));
      }
    }

    for (list<VideoUnit*>::iterator child = children_.begin();
         child != children_.end();
         ++child) {
      if (!(*child)->OpenStreamsImpl(set, this))
        return false;
    }

    return true;
  }
bool VideoDisplayQtUnit::OpenStreams(StreamSet* set) {
  // Find video stream idx.
  video_stream_idx_ = FindStreamIdx(options_.stream_name, set);

  if (video_stream_idx_ < 0) {
    LOG(ERROR) << "Could not find Video stream!\n";
    return false;
  }

  // Get video stream.
  const VideoStream* vid_stream =
    dynamic_cast<const VideoStream*>(set->at(video_stream_idx_).get());
  CHECK_NOTNULL(vid_stream);

  // Window setup.
  frame_width_ = vid_stream->frame_width();
  frame_height_ = vid_stream->frame_height();

  if (options_.upscale != 1.0f) {
    frame_width_ *= options_.upscale;
    frame_height_ *= options_.upscale;
    scaled_image_.reset(new cv::Mat(frame_height_, frame_width_, CV_8UC3));
  }

  // Qt Display.
  main_window_->SetSize(frame_width_, frame_height_);
  main_window_->move(g_summed_window_width, 0);
  main_window_->show();

  // Gap between windows.
  g_summed_window_width += frame_width_ + 50;

  // Fps control.
  last_update_time_ = boost::posix_time::microsec_clock::local_time();

  // Force refresh now.
  QApplication::processEvents();
  return true;
}
bool SegmentationDisplayUnit::OpenStreams(StreamSet* set) {
  // Find video stream idx.
  if (options_.video_stream_name.empty()) {
    if (options_.blend_alpha != 1.f) {
      options_.blend_alpha = 1.f;
      LOG(WARNING) << "No video stream requested. Setting blend alpha to 1.";
    }

    vid_stream_idx_ = -1;
  } else {
    vid_stream_idx_ = FindStreamIdx(options_.video_stream_name, set);

    if (vid_stream_idx_ < 0) {
      LOG(ERROR) << "Could not find Video stream!\n";
      return false;
    }

    // Get video stream info.
    const VideoStream* vid_stream =
      dynamic_cast<const VideoStream*>(set->at(vid_stream_idx_).get());
    CHECK_NOTNULL(vid_stream);

    frame_width_ = vid_stream->frame_width();
    frame_height_ = vid_stream->frame_height();
  }

  // Get segmentation stream.
  seg_stream_idx_ = FindStreamIdx(options_.segment_stream_name, set);

  if (seg_stream_idx_ < 0) {
    LOG(ERROR) << "SegmentationDisplayUnit::OpenStreams: "
               << "Could not find Segmentation stream!\n";
    return false;
  }

  const SegmentationStream* seg_stream =
    dynamic_cast<const SegmentationStream*>(set->at(seg_stream_idx_).get());
  if (frame_width_ == 0) {
    // Read dimensions from segmentation stream.
    frame_width_ = seg_stream->frame_width();
    frame_height_ = seg_stream->frame_height();
  } else {
    CHECK_EQ(frame_width_, seg_stream->frame_width());
    CHECK_EQ(frame_height_, seg_stream->frame_height());
  }

  // Allocate render buffer.
  render_frame_.reset(new cv::Mat(frame_height_, frame_width_, CV_8UC3));
  const int actual_height = frame_height_ * (options_.concat_with_source ? 2 : 1);
  const int actual_width = frame_width_;

  // Qt Display.
  main_window_->SetSize(actual_width * options_.upscale,
                        actual_height * options_.upscale);
  main_window_->move(g_summed_window_width, 0);
  main_window_->show();

  output_frame_.reset(new cv::Mat(actual_height, actual_width, CV_8UC3));
  if (options_.upscale != 1.0f) {
    scaled_image_.reset(new cv::Mat(actual_height * options_.upscale,
                                    actual_width * options_.upscale,
                                    CV_8UC3));
  }

  // Gap between windows.
  g_summed_window_width += frame_width_ * options_.upscale + 50;

  // FPS control.
  last_update_time_ = boost::posix_time::microsec_clock::local_time();

  fractional_level_ = options_.hierarchy_level;
  CHECK_GE(fractional_level_, 0.0f);
  CHECK_LE(fractional_level_, 1.0f);
  main_window_->SetLevel(fractional_level_);

  // Force refresh now.
  QApplication::processEvents();
  return true;
}
bool VideoWriterUnit::OpenStreams(StreamSet* set) {
  // Setup FFMPEG.
  if (!ffmpeg_initialized_) {
    ffmpeg_initialized_ = true;
    av_register_all();
  }

  // Find video stream index.
  video_stream_idx_ = FindStreamIdx(options_.stream_name, set);

  if (video_stream_idx_ < 0) {
    LOG(ERROR) << "Could not find Video stream!\n";
    return false;
  }

  const VideoStream& vid_stream = set->at(video_stream_idx_)->As<VideoStream>();

  frame_width_ = vid_stream.frame_width();
  frame_height_ = vid_stream.frame_height();
  if (!options_.fps) {
    options_.fps = vid_stream.fps();
  }

  if (!options_.output_format.empty()) {
    output_format_ = av_guess_format(options_.output_format.c_str(), NULL, NULL);
  } else {
    output_format_ = av_guess_format(NULL, video_file_.c_str(), NULL);
  }

  output_width_ = frame_width_;
  output_height_ = frame_height_;

  if (options_.scale != 1) {
    if (options_.scale_max_dim || options_.scale_min_dim) {
      LOG(WARNING) << "Scale set, ignoring scale_[max|min]_dim.";
    }
    output_width_ *= options_.scale;
    output_height_ *= options_.scale;
  } else {
    if (options_.scale_max_dim) {
      float max_dim = std::max(frame_width_, frame_height_);
      output_width_ = (float)frame_width_ / max_dim * options_.scale_max_dim;
      output_height_ = (float)frame_height_ / max_dim * options_.scale_max_dim;
    } else if (options_.scale_min_dim) {
      float min_dim = std::min(frame_width_, frame_height_);
      output_width_ = (float)frame_width_ / min_dim * options_.scale_min_dim;
      output_height_ = (float)frame_height_ / min_dim * options_.scale_min_dim;
    }
  }

  int w_reminder = output_width_ % options_.fraction;
  if (w_reminder > 0) {
    if (w_reminder < options_.fraction / 2) {
      output_width_ -= w_reminder;
    } else {
      output_width_ += (options_.fraction - w_reminder);
    }
  }

  int h_reminder = output_height_ % options_.fraction;
  if (h_reminder > 0) {
    if (h_reminder < options_.fraction / 2) {
      output_height_ -= h_reminder;
    } else {
      output_height_ += (options_.fraction - h_reminder);
    }
  }

  avformat_alloc_output_context2(&format_context_, output_format_, NULL,
                                 video_file_.c_str());
  if(!format_context_) {
    LOG(ERROR) << "Could not open format context.\n";
    return false;
  }

  // Add video stream.
  video_stream_ = avformat_new_stream(format_context_, NULL);
  if (!video_stream_) {
    LOG(ERROR) << "Could not allocate video stream.\n";
    return false;
  }

  // Set standard parameters.
  codec_context_ = video_stream_->codec;
  const std::string file_ending = video_file_.substr(video_file_.size() - 3);
  if (file_ending == "mp4" || file_ending == "mov") {
    codec_context_->codec_id = CODEC_ID_H264;
  } else {
    codec_context_->codec_id = output_format_->video_codec;
  }

  codec_context_->codec_type = CODEC_TYPE_VIDEO;
  codec_context_->bit_rate = options_.bit_rate;
  codec_context_->bit_rate_tolerance = options_.bit_rate / 5;
  codec_context_->width = output_width_;
  codec_context_->height = output_height_;

  LOG(INFO) << "Encoding with " << options_.fps << " fps.";
  codec_context_->time_base = av_d2q(1.0 / options_.fps, 1000);

  LOG(INFO) << "time base : " << codec_context_->time_base.num
            << " / " << codec_context_->time_base.den;

  codec_context_->pix_fmt = PIX_FMT_YUV420P;

  if (codec_context_->codec_id == CODEC_ID_MPEG2VIDEO) {
    codec_context_->max_b_frames = 2;
  }

  if (codec_context_->codec_id == CODEC_ID_MPEG1VIDEO) {
    codec_context_->mb_decision = 2;
  }

  if (codec_context_->codec_id == CODEC_ID_H264) {
    // H264 settings.
    codec_context_->coder_type = FF_CODER_TYPE_AC;
    codec_context_->flags |= CODEC_FLAG_LOOP_FILTER | CODEC_FLAG_GLOBAL_HEADER;
    codec_context_->profile = FF_PROFILE_H264_BASELINE;
    codec_context_->scenechange_threshold = 40;
    codec_context_->gop_size = 10;
    codec_context_->max_b_frames = 0;
    codec_context_->max_qdiff = 4;
    codec_context_->me_method = ME_HEX;
    codec_context_->me_range = 16;
    codec_context_->me_subpel_quality = 6;
    codec_context_->qmin = 10;
    codec_context_->qmax = 51;
    codec_context_->qcompress = 0.6;
    codec_context_->keyint_min = 10;
    codec_context_->trellis = 0;
    codec_context_->level = 30;
    codec_context_->refs = 1;
    av_opt_set(codec_context_->priv_data, "preset", "slow", 0);
    av_opt_set(codec_context_->priv_data, "vprofile", "baseline", 0);
  }

  // Find and open codec.
  codec_ = avcodec_find_encoder(codec_context_->codec_id);
  if (!codec_) {
    LOG(ERROR) << "Codec not found.";
    return false;
  }

  if (avcodec_open2(codec_context_, codec_, NULL) < 0) {
    LOG(ERROR) << "Could not open codec.";
    return false;
  }

  frame_encode_ = av_frame_alloc();
  frame_bgr_ = av_frame_alloc();

  if (!frame_bgr_ || !frame_encode_) {
    LOG(ERROR) << "Could not alloc tmp. images.\n";
    return false;
  }

  uint8_t* encode_buffer =
      (uint8_t*)av_malloc(avpicture_get_size(codec_context_->pix_fmt,
                                             codec_context_->width,
                                             codec_context_->height));

  avpicture_fill((AVPicture*)frame_encode_, encode_buffer, codec_context_->pix_fmt,
                 codec_context_->width, codec_context_->height);

  uint8_t* bgr_buffer = (uint8_t*)av_malloc(avpicture_get_size(PIX_FMT_BGR24,
                                                               frame_width_,
                                                               frame_height_));
  avpicture_fill((AVPicture*)frame_bgr_,
                 bgr_buffer,
                 PIX_FMT_BGR24,
                 frame_width_,
                 frame_height_);

  // Open output file, if needed.
  if(!(output_format_->flags & AVFMT_NOFILE)) {
    if (avio_open(&format_context_->pb, video_file_.c_str(), AVIO_FLAG_WRITE) < 0) {
      LOG(ERROR) << " Could not open" << video_file_;
      return false;
    }
  }

  avformat_write_header(format_context_,0);

  // Setup color conversion.
  sws_context_ = sws_getContext(frame_width_,
                                frame_height_,
                                PIX_FMT_BGR24,
                                codec_context_->width,
                                codec_context_->height,
                                codec_context_->pix_fmt,
                                SWS_BICUBIC,
                                NULL,
                                NULL,
                                NULL);

  if (!sws_context_) {
    LOG(ERROR) << "Could initialize sws_context.";
    return false;
  }

  frame_num_ = 0;
  return true;
}