Beispiel #1
0
int h264 (bench_t* req){
  omxcam_video_settings_t settings;
  
  omxcam_video_init (&settings);
  settings.on_ready = req->on_ready;
  settings.on_stop = req->on_stop;
  settings.camera.width = req->width;
  settings.camera.height = req->height;
  
  return omxcam_video_start (&settings, req->ms);
}
int main (int argc, char **argv)
{
	int rtpmode=1;	// デフォルトはRTPモード
	omxcam_video_settings_t videoset = {};
	if (argc < 3) {
		printf("Usage: %s <peer_addr> <recv_port>\n", argv[0]);
		exit(1);
	}

	if (strstr(argv[0], "udp") != NULL)
		rtpmode = 0;		// コマンド名にudpが入っているときはUDPモード

	// ソケット初期化
	udpsock = sock_createcl(argv[1], atoi(argv[2]));
	if (udpsock < 0)
		return __LINE__;

	// キャプチャ初期化
	omxcam_video_init(&videoset);
	videoset.on_data = rtpmode ? video_encoded : video_encoded_toudp;
	// カメラ設定
	videoset.camera.width = 1920;
	videoset.camera.height = 1080;
	videoset.camera.framerate = 30;
	// エンコーダ設定
	videoset.h264.bitrate = 12*1000*1000; //12Mbps
	videoset.h264.idr_period = 30;	//30フレームごとにIDR
	videoset.h264.inline_headers = OMXCAM_TRUE; // SPS/PPSを挿入

	if (rtpmode) {
		// RTP初期化
		rtpopen(&rtpsock, 1/*SSRC*/, 96/*payload_type*/, udpsock, &s_peer);
	}

	// キャプチャ開始
	omxcam_video_start(&videoset, OMXCAM_CAPTURE_FOREVER);

	if (rtpmode)
		rtpclose(rtpsock);
	else
		close(udpsock);
	return 0;
}
int main (){
  omxcam_video_settings_t settings;
  omxcam_video_init (&settings);
  
  settings.on_data = on_data;
  settings.camera.width = 640;
  settings.camera.height = 480;
  
  signal (SIGINT, signal_handler);
  signal (SIGTERM, signal_handler);
  signal (SIGQUIT, signal_handler);
  signal (SIGHUP, signal_handler);
  
  if (omxcam_video_start (&settings, OMXCAM_CAPTURE_FOREVER)){
    return log_error ();
  }
  
  return 0;
}
int save (char* filename, omxcam_video_settings_t* settings){
  fd = open (filename, O_WRONLY | O_CREAT | O_TRUNC | O_APPEND, 0666);
  if (fd == -1){
    fprintf (stderr, "error: open\n");
    return 1;
  }
  
  if (omxcam_video_start (settings, OMXCAM_CAPTURE_FOREVER)){
    return log_error ();
  }
  
  //Close the file
  if (close (fd)){
    fprintf (stderr, "error: close\n");
    return 1;
  }
  
  return 0;
}
Beispiel #5
0
int save (char* filename, omxcam_video_settings_t* settings){
  /*
  The RGB video comes in slices, that is, each buffer is part of a frame:
  buffer != frame -> buffer < frame. Take into account that a buffer can contain
  data from two consecutive frames because the frames are just concatenated one
  after the other. Therefore, you MUST control the storage/transmission of the
  frames because the video capture can be stopped at anytime, so it's likely
  that the last frame won't be received entirely, so the current received bytes
  MUST be discarded. For example:
  
  Note: Rf1 means channel Red of a pixel in the frame 1.
  
   ... Rf1 Gf1 Bf1 Rf2 Gf2 Bf2 ...
  |   |-----------|-----------|   |
  |   |last pixel |first pixel|   |
  |   |of frame 1 |of frame 2 |   |
  |-------------------------------|
  |            buffer             |
  */
  
  printf ("capturing %s\n", filename);
  
  fd = open (filename, O_WRONLY | O_CREAT | O_TRUNC | O_APPEND, 0666);
  if (fd == -1){
    fprintf (stderr, "error: open\n");
    return 1;
  }
  
  if (omxcam_video_start (settings, OMXCAM_CAPTURE_FOREVER)) log_error ();
  
  //Close the file
  if (close (fd)){
    fprintf (stderr, "error: close\n");
    return 1;
  }
  
  return 0;
}
Beispiel #6
0
int yuv_video (bench_t* req){
  current = 0;
  current_frames = 0;
  
  omxcam_video_settings_t settings;
  
  omxcam_video_init (&settings);
  settings.on_ready = req->on_ready;
  settings.on_data = on_data_video;
  settings.on_stop = req->on_stop;
  settings.format = OMXCAM_FORMAT_YUV420;
  settings.camera.width = req->width;
  settings.camera.height = req->height;
  
  omxcam_yuv_planes (req->width, req->height, &planes);
  
  frame_size = planes.offset_v + planes.length_v;
  total_frames = req->frames;
  
  int error = omxcam_video_start (&settings, OMXCAM_CAPTURE_FOREVER);
  
  return error ? error : bg_error;
}