Beispiel #1
0
/*
 * encoder loop (should run in a separate thread)
 * args:
 *    data - pointer to user data
 *
 * asserts:
 *   none
 *
 * returns: pointer to return code
 */
static void *encoder_loop(void *data)
{
	v4l2_dev_t *device = (v4l2_dev_t *) data;

	my_encoder_status = 1;
	
	if(debug_level > 1)
		printf("GUVCVIEW: encoder thread (tid: %u)\n",
			(unsigned int) syscall (SYS_gettid));

	/*get the audio context*/
	audio_context_t *audio_ctx = get_audio_context();

	__THREAD_TYPE encoder_audio_thread;

	int channels = 0;
	int samprate = 0;

	if(audio_ctx)
	{
		channels = audio_ctx->channels;
		samprate = audio_ctx->samprate;
	}

	if(debug_level > 0)
		printf("GUVCVIEW: audio [channels= %i; samprate= %i] \n",
			channels, samprate);

	/*create the encoder context*/
	encoder_context_t *encoder_ctx = encoder_get_context(
		device->requested_fmt,
		get_video_codec_ind(),
		get_audio_codec_ind(),
		get_video_muxer(),
		device->format.fmt.pix.width,
		device->format.fmt.pix.height,
		device->fps_num,
		device->fps_denom,
		channels,
		samprate);

	/*store external SPS and PPS data if needed*/
	if(encoder_ctx->video_codec_ind == 0 && /*raw - direct input*/
		device->requested_fmt == V4L2_PIX_FMT_H264)
	{
		/*request a IDR (key) frame*/
		v4l2core_h264_request_idr(device);

		if(debug_level > 0)
			printf("GUVCVIEW: storing external pps and sps data in encoder context\n");
		encoder_ctx->h264_pps_size = device->h264_PPS_size;
		if(encoder_ctx->h264_pps_size > 0)
		{
			encoder_ctx->h264_pps = calloc(device->h264_PPS_size, sizeof(uint8_t));
			if(encoder_ctx->h264_pps == NULL)
			{
				fprintf(stderr,"GUVCVIEW: FATAL memory allocation failure (encoder_loop): %s\n", strerror(errno));
				exit(-1);
			}
			memcpy(encoder_ctx->h264_pps, device->h264_PPS, device->h264_PPS_size);
		}

		encoder_ctx->h264_sps_size = device->h264_SPS_size;
		if(encoder_ctx->h264_sps_size > 0)
		{
			encoder_ctx->h264_sps = calloc(device->h264_SPS_size, sizeof(uint8_t));
			if(encoder_ctx->h264_sps == NULL)
			{
				fprintf(stderr,"GUVCVIEW: FATAL memory allocation failure (encoder_loop): %s\n", strerror(errno));
				exit(-1);
			}
			memcpy(encoder_ctx->h264_sps, device->h264_SPS, device->h264_SPS_size);
		}
	}

	uint32_t current_framerate = 0;
	if(device->requested_fmt == V4L2_PIX_FMT_H264)
	{
		/* store framerate since it may change due to scheduler*/
		current_framerate = v4l2core_get_h264_frame_rate_config(device);
	}

	char *video_filename = NULL;
	/*get_video_[name|path] always return a non NULL value*/
	char *name = strdup(get_video_name());
	char *path = strdup(get_video_path());

	if(get_video_sufix_flag())
	{
		char *new_name = add_file_suffix(path, name);
		free(name); /*free old name*/
		name = new_name; /*replace with suffixed name*/
	}
	int pathsize = strlen(path);
	if(path[pathsize] != '/')
		video_filename = smart_cat(path, '/', name);
	else
		video_filename = smart_cat(path, 0, name);

	snprintf(status_message, 79, _("saving video to %s"), video_filename);
	gui_status_message(status_message);

	/*muxer initialization*/
	encoder_muxer_init(encoder_ctx, video_filename);

	/*start video capture*/
	video_capture_save_video(1);

	int treshold = 102400; /*100 Mbytes*/
	int64_t last_check_pts = 0; /*last pts when disk supervisor called*/

	/*start audio processing thread*/
	if(encoder_ctx->enc_audio_ctx != NULL && audio_ctx->channels > 0)
	{
		if(debug_level > 1)
			printf("GUVCVIEW: starting encoder audio thread\n");
		
		int ret = __THREAD_CREATE(&encoder_audio_thread, audio_processing_loop, (void *) encoder_ctx);
		
		if(ret)
			fprintf(stderr, "GUVCVIEW: encoder audio thread creation failed (%i)\n", ret);
		else if(debug_level > 2)
			printf("GUVCVIEW: created audio encoder thread with tid: %u\n", 
				(unsigned int) encoder_audio_thread);
	}

	while(video_capture_get_save_video())
	{
		/*process the video buffer*/
		encoder_process_next_video_buffer(encoder_ctx);

		/*disk supervisor*/
		if(encoder_ctx->enc_video_ctx->pts - last_check_pts > 2 * NSEC_PER_SEC)
		{
			last_check_pts = encoder_ctx->enc_video_ctx->pts;

			if(!encoder_disk_supervisor(treshold, path))
			{
				/*stop capture*/
				gui_set_video_capture_button_status(0);
			}
		}
	}
	
	/*flush the video buffer*/
	encoder_flush_video_buffer(encoder_ctx);

	/*make sure the audio processing thread has stopped*/
	if(encoder_ctx->enc_audio_ctx != NULL && audio_ctx->channels > 0)
	{
		if(debug_level > 1)
			printf("GUVCVIEW: join encoder audio thread\n");
		__THREAD_JOIN(encoder_audio_thread);
	}

	/*close the muxer*/
	encoder_muxer_close(encoder_ctx);

	/*close the encoder context (clean up)*/
	encoder_close(encoder_ctx);

	if(device->requested_fmt == V4L2_PIX_FMT_H264)
	{
		/* restore framerate */
		v4l2core_set_h264_frame_rate_config(device, current_framerate);
	}

	/*clean string*/
	free(video_filename);
	free(path);
	free(name);

	my_encoder_status = 0;

	return ((void *) 0);
}
Beispiel #2
0
/*
 * capture loop (should run in a separate thread)
 * args:
 *    data - pointer to user data (device data + options data)
 *
 * asserts:
 *    device data is not null
 *
 * returns: pointer to return code
 */
void *capture_loop(void *data)
{
	capture_loop_data_t *cl_data = (capture_loop_data_t *) data;
	v4l2_dev_t *device = (v4l2_dev_t *) cl_data->device;
	options_t *my_options = (options_t *) cl_data->options;
	//config_t *my_config = (config_t *) cl_data->config;

	uint64_t my_last_photo_time = 0; /*timer count*/
	int my_photo_npics = 0;/*no npics*/

	/*asserts*/
	assert(device != NULL);

	/*reset quit flag*/
	quit = 0;
	
	if(debug_level > 1)
		printf("GUVCVIEW: capture thread (tid: %u)\n", 
			(unsigned int) syscall (SYS_gettid));

	int ret = 0;
	
	int render_flags = 0;
	
	if (strcasecmp(my_options->render_flag, "full") == 0)
		render_flags = 1;
	else if (strcasecmp(my_options->render_flag, "max") == 0)
		render_flags = 2;
	
	render_set_verbosity(debug_level);
	
	if(render_init(render, device->format.fmt.pix.width, device->format.fmt.pix.height, render_flags) < 0)
		render = RENDER_NONE;
	else
	{
		render_set_event_callback(EV_QUIT, &quit_callback, NULL);
		render_set_event_callback(EV_KEY_V, &key_V_callback, device);
		render_set_event_callback(EV_KEY_I, &key_I_callback, NULL);
		render_set_event_callback(EV_KEY_UP, &key_UP_callback, device);
		render_set_event_callback(EV_KEY_DOWN, &key_DOWN_callback, device);
		render_set_event_callback(EV_KEY_LEFT, &key_LEFT_callback, device);
		render_set_event_callback(EV_KEY_RIGHT, &key_RIGHT_callback, device);
	}

	/*add a video capture timer*/
	if(my_options->video_timer > 0)
	{
		my_video_timer = NSEC_PER_SEC * my_options->video_timer;
		my_video_begin_time = v4l2core_time_get_timestamp(); /*timer count*/
		/*if are not saving video start it*/
		if(!get_encoder_status())
			start_encoder_thread(device);
	}

	/*add a photo capture timer*/
	if(my_options->photo_timer > 0)
	{
		my_photo_timer = NSEC_PER_SEC * my_options->photo_timer;
		my_last_photo_time = v4l2core_time_get_timestamp(); /*timer count*/
	}

	if(my_options->photo_npics > 0)
		my_photo_npics = my_options->photo_npics;

	v4l2core_start_stream(device);

	while(!quit)
	{
		if(restart)
		{
			restart = 0; /*reset*/
			v4l2core_stop_stream(device);

			/*close render*/
			render_close();

			v4l2core_clean_buffers(device);

			/*try new format (values prepared by the request callback)*/
			ret = v4l2core_update_current_format(device);
			/*try to set the video stream format on the device*/
			if(ret != E_OK)
			{
				fprintf(stderr, "GUCVIEW: could not set the defined stream format\n");
				fprintf(stderr, "GUCVIEW: trying first listed stream format\n");

				v4l2core_prepare_valid_format(device);
				v4l2core_prepare_valid_resolution(device);
				ret = v4l2core_update_current_format(device);

				if(ret != E_OK)
				{
					fprintf(stderr, "GUCVIEW: also could not set the first listed stream format\n");

					gui_error(device, "Guvcview error", "could not start a video stream in the device", 1);

					return ((void *) -1);
				}
			}

			/*restart the render with new format*/
			if(render_init(render, device->format.fmt.pix.width, device->format.fmt.pix.height, render_flags) < 0)
				render = RENDER_NONE;
			else
			{
				render_set_event_callback(EV_QUIT, &quit_callback, NULL);
				render_set_event_callback(EV_KEY_V, &key_V_callback, device);
				render_set_event_callback(EV_KEY_I, &key_I_callback, NULL);
				render_set_event_callback(EV_KEY_UP, &key_UP_callback, device);
				render_set_event_callback(EV_KEY_DOWN, &key_DOWN_callback, device);
				render_set_event_callback(EV_KEY_LEFT, &key_LEFT_callback, device);
				render_set_event_callback(EV_KEY_RIGHT, &key_RIGHT_callback, device);
			}


			if(debug_level > 0)
				printf("GUVCVIEW: reset to pixelformat=%x width=%i and height=%i\n", device->requested_fmt, device->format.fmt.pix.width, device->format.fmt.pix.height);

			v4l2core_start_stream(device);

		}

		if( v4l2core_get_frame(device) == E_OK)
		{
			/*decode the raw frame*/
			if(v4l2core_frame_decode(device) != E_OK)
			{
				fprintf(stderr, "GUVCIEW: Error - Couldn't decode frame\n");
			}

			/*run software autofocus (must be called after frame_decode)*/
			if(do_soft_autofocus || do_soft_focus)
				do_soft_focus = v4l2core_soft_autofocus_run(device);

			/*render the decoded frame*/
			snprintf(render_caption, 29, "Guvcview  (%2.2f fps)", v4l2core_get_realfps());
			render_set_caption(render_caption);
			render_frame(device->yuv_frame, my_render_mask);

      /* Save frame to file */
      char filename[50];
      sprintf(filename, "/home/cobra/Desktop/frame/%d", device->frame_index);
      save_image_bmp(device, filename);

			if(check_photo_timer())
			{
				if((device->timestamp - my_last_photo_time) > my_photo_timer)
				{
					save_image = 1;
					my_last_photo_time = device->timestamp;

					if(my_options->photo_npics > 0)
					{
						if(my_photo_npics > 0)
							my_photo_npics--;
						else
							stop_photo_timer(); /*close timer*/
					}
				}
			}

			if(check_video_timer())
			{
				if((device->timestamp - my_video_begin_time) > my_video_timer)
					stop_video_timer(device);
			}

			if(save_image)
			{
				char *img_filename = NULL;

				/*get_photo_[name|path] always return a non NULL value*/
				char *name = strdup(get_photo_name());
				char *path = strdup(get_photo_path());

				if(get_photo_sufix_flag())
				{
					char *new_name = add_file_suffix(path, name);
					free(name); /*free old name*/
					name = new_name; /*replace with suffixed name*/
				}
				int pathsize = strlen(path);
				if(path[pathsize] != '/')
					img_filename = smart_cat(path, '/', name);
				else
					img_filename = smart_cat(path, 0, name);

				//if(debug_level > 1)
				//	printf("GUVCVIEW: saving image to %s\n", img_filename);

				snprintf(status_message, 79, _("saving image to %s"), img_filename);
				gui_status_message(status_message);

				v4l2core_save_image(device, img_filename, get_photo_format());

				free(path);
				free(name);
				free(img_filename);

				save_image = 0; /*reset*/
			}

			if(video_capture_get_save_video())
			{
#ifdef USE_PLANAR_YUV
				int size = (device->format.fmt.pix.width * device->format.fmt.pix.height * 3) / 2;
#else
				int size = device->format.fmt.pix.width * device->format.fmt.pix.height * 2;
#endif
				uint8_t *input_frame = device->yuv_frame;
				/*
				 * TODO: check codec_id, format and frame flags
				 * (we may want to store a compressed format
				 */
				if(get_video_codec_ind() == 0)
				{
					switch(device->requested_fmt)
					{
						case  V4L2_PIX_FMT_H264:
							input_frame = device->h264_frame;
							size = (int) device->h264_frame_size;
							break;
						default:
							input_frame = device->raw_frame;
							size = (int) device->raw_frame_size;
							break;
					}

				}
				encoder_add_video_frame(input_frame, size, device->timestamp, device->isKeyframe);

				/*
				 * exponencial scheduler
				 *  with 50% threshold (nanosec)
				 *  and max value of 250 ms (4 fps)
				 */
				int time_sched = encoder_buff_scheduler(ENCODER_SCHED_EXP, 0.5, 250);
				if(time_sched > 0)
				{
					switch(device->requested_fmt)
					{
						case  V4L2_PIX_FMT_H264:
						{
							uint32_t framerate = time_sched; /*nanosec*/
							v4l2core_set_h264_frame_rate_config(device, framerate);
							break;
						}
						default:
						{
							struct timespec req = {
								.tv_sec = 0,
								.tv_nsec = time_sched};/*nanosec*/
							nanosleep(&req, NULL);
							break;
						}
					}
				}
			}
			/*we are done with the frame buffer release it*/
			v4l2core_release_frame(device);
		}
	}

	v4l2core_stop_stream(device);

	render_close();

	return ((void *) 0);
}

/*
 * start the encoder thread
 * args:
 *   data - pointer to user data
 *
 * asserts:
 *   none
 *
 * returns: error code
 */
int start_encoder_thread(void *data)
{
	int ret = __THREAD_CREATE(&encoder_thread, encoder_loop, data);
	
	if(ret)
		fprintf(stderr, "GUVCVIEW: encoder thread creation failed (%i)\n", ret);
	else if(debug_level > 2)
		printf("GUVCVIEW: created encoder thread with tid: %u\n", 
			(unsigned int) encoder_thread);

	return ret;
}
Beispiel #3
0
/*
 * encoder loop (should run in a separate thread)
 * args:
 *    data - pointer to user data
 *
 * asserts:
 *   none
 *
 * returns: pointer to return code
 */
static void *encoder_loop(void *data)
{
	my_encoder_status = 1;
	
	if(debug_level > 1)
		printf("GUVCVIEW: encoder thread (tid: %u)\n",
			(unsigned int) syscall (SYS_gettid));

	/*get the audio context*/
	audio_context_t *audio_ctx = get_audio_context();

	__THREAD_TYPE encoder_audio_thread;

	int channels = 0;
	int samprate = 0;

	if(audio_ctx)
	{
		channels = audio_ctx->channels;
		samprate = audio_ctx->samprate;
	}

	if(debug_level > 0)
		printf("GUVCVIEW: audio [channels= %i; samprate= %i] \n",
			channels, samprate);

	/*create the encoder context*/
	encoder_context_t *encoder_ctx = encoder_get_context(
		v4l2core_get_requested_frame_format(),
		get_video_codec_ind(),
		get_audio_codec_ind(),
		get_video_muxer(),
		v4l2core_get_frame_width(),
		v4l2core_get_frame_height(),
		v4l2core_get_fps_num(),
		v4l2core_get_fps_denom(),
		channels,
		samprate);

	/*store external SPS and PPS data if needed*/
	if(encoder_ctx->video_codec_ind == 0 && /*raw - direct input*/
		v4l2core_get_requested_frame_format() == V4L2_PIX_FMT_H264)
	{
		/*request a IDR (key) frame*/
		v4l2core_h264_request_idr();

		if(debug_level > 0)
			printf("GUVCVIEW: storing external pps and sps data in encoder context\n");
		encoder_ctx->h264_pps_size = v4l2core_get_h264_pps_size();
		if(encoder_ctx->h264_pps_size > 0)
		{
			encoder_ctx->h264_pps = calloc(encoder_ctx->h264_pps_size, sizeof(uint8_t));
			if(encoder_ctx->h264_pps == NULL)
			{
				fprintf(stderr,"GUVCVIEW: FATAL memory allocation failure (encoder_loop): %s\n", strerror(errno));
				exit(-1);
			}
			memcpy(encoder_ctx->h264_pps, v4l2core_get_h264_pps(), encoder_ctx->h264_pps_size);
		}

		encoder_ctx->h264_sps_size = v4l2core_get_h264_sps_size();
		if(encoder_ctx->h264_sps_size > 0)
		{
			encoder_ctx->h264_sps = calloc(encoder_ctx->h264_sps_size, sizeof(uint8_t));
			if(encoder_ctx->h264_sps == NULL)
			{
				fprintf(stderr,"GUVCVIEW: FATAL memory allocation failure (encoder_loop): %s\n", strerror(errno));
				exit(-1);
			}
			memcpy(encoder_ctx->h264_sps, v4l2core_get_h264_sps(), encoder_ctx->h264_sps_size);
		}
	}

	uint32_t current_framerate = 0;
	if(v4l2core_get_requested_frame_format() == V4L2_PIX_FMT_H264)
	{
		/* store framerate since it may change due to scheduler*/
		current_framerate = v4l2core_get_h264_frame_rate_config();
	}

	char *video_filename = NULL;
	/*get_video_[name|path] always return a non NULL value*/
	char *name = strdup(get_video_name());
	char *path = strdup(get_video_path());

	if(get_video_sufix_flag())
	{
		char *new_name = add_file_suffix(path, name);
		free(name); /*free old name*/
		name = new_name; /*replace with suffixed name*/
	}
	int pathsize = strlen(path);
	if(path[pathsize] != '/')
		video_filename = smart_cat(path, '/', name);
	else
		video_filename = smart_cat(path, 0, name);

	snprintf(status_message, 79, _("saving video to %s"), video_filename);
	gui_status_message(status_message);

	/*muxer initialization*/
	encoder_muxer_init(encoder_ctx, video_filename);

	/*start video capture*/
	video_capture_save_video(1);

	int treshold = 102400; /*100 Mbytes*/
	int64_t last_check_pts = 0; /*last pts when disk supervisor called*/

	/*start audio processing thread*/
	if(encoder_ctx->enc_audio_ctx != NULL && audio_ctx->channels > 0)
	{
		if(debug_level > 1)
			printf("GUVCVIEW: starting encoder audio thread\n");
		
		int ret = __THREAD_CREATE(&encoder_audio_thread, audio_processing_loop, (void *) encoder_ctx);
		
		if(ret)
			fprintf(stderr, "GUVCVIEW: encoder audio thread creation failed (%i)\n", ret);
		else if(debug_level > 2)
			printf("GUVCVIEW: created audio encoder thread with tid: %u\n", 
				(unsigned int) encoder_audio_thread);
	}

	while(video_capture_get_save_video())
	{
		/*process the video buffer*/
		if(encoder_process_next_video_buffer(encoder_ctx) > 0)
		{
			/* 
			 * no buffers to process
			 * sleep a couple of milisec
			 */
			 struct timespec req = {
				.tv_sec = 0,
				.tv_nsec = 1000000};/*nanosec*/
			 nanosleep(&req, NULL);
			 
		}	

		/*disk supervisor*/
		if(encoder_ctx->enc_video_ctx->pts - last_check_pts > 2 * NSEC_PER_SEC)
		{
			last_check_pts = encoder_ctx->enc_video_ctx->pts;

			if(!encoder_disk_supervisor(treshold, path))
			{
				/*stop capture*/
				gui_set_video_capture_button_status(0);
			}
		}
	}
	
	/*flush the video buffer*/
	encoder_flush_video_buffer(encoder_ctx);

	/*make sure the audio processing thread has stopped*/
	if(encoder_ctx->enc_audio_ctx != NULL && audio_ctx->channels > 0)
	{
		if(debug_level > 1)
			printf("GUVCVIEW: join encoder audio thread\n");
		__THREAD_JOIN(encoder_audio_thread);
	}

	/*close the muxer*/
	encoder_muxer_close(encoder_ctx);

	/*close the encoder context (clean up)*/
	encoder_close(encoder_ctx);

	if(v4l2core_get_requested_frame_format() == V4L2_PIX_FMT_H264)
	{
		/* restore framerate */
		v4l2core_set_h264_frame_rate_config(current_framerate);
	}

	/*clean string*/
	free(video_filename);
	free(path);
	free(name);

	my_encoder_status = 0;

	return ((void *) 0);
}

/*
 * capture loop (should run in a separate thread)
 * args:
 *    data - pointer to user data (options data)
 *
 * asserts:
 *    none
 *
 * returns: pointer to return code
 */
void *capture_loop(void *data)
{
	capture_loop_data_t *cl_data = (capture_loop_data_t *) data;
	options_t *my_options = (options_t *) cl_data->options;
	//config_t *my_config = (config_t *) cl_data->config;

	uint64_t my_last_photo_time = 0; /*timer count*/
	int my_photo_npics = 0;/*no npics*/

	/*reset quit flag*/
	quit = 0;
	
	if(debug_level > 1)
		printf("GUVCVIEW: capture thread (tid: %u)\n", 
			(unsigned int) syscall (SYS_gettid));

	int ret = 0;
	
	int render_flags = 0;
	
	if (strcasecmp(my_options->render_flag, "full") == 0)
		render_flags = 1;
	else if (strcasecmp(my_options->render_flag, "max") == 0)
		render_flags = 2;
	
	render_set_verbosity(debug_level);
	
	if(render_init(render, v4l2core_get_frame_width(), v4l2core_get_frame_height(), render_flags) < 0)
		render = RENDER_NONE;
	else
	{
		render_set_event_callback(EV_QUIT, &quit_callback, NULL);
		render_set_event_callback(EV_KEY_V, &key_V_callback, NULL);
		render_set_event_callback(EV_KEY_I, &key_I_callback, NULL);
		render_set_event_callback(EV_KEY_UP, &key_UP_callback, NULL);
		render_set_event_callback(EV_KEY_DOWN, &key_DOWN_callback, NULL);
		render_set_event_callback(EV_KEY_LEFT, &key_LEFT_callback, NULL);
		render_set_event_callback(EV_KEY_RIGHT, &key_RIGHT_callback, NULL);
	}

	/*add a video capture timer*/
	if(my_options->video_timer > 0)
	{
		my_video_timer = NSEC_PER_SEC * my_options->video_timer;
		my_video_begin_time = v4l2core_time_get_timestamp(); /*timer count*/
		/*if are not saving video start it*/
		if(!get_encoder_status())
			start_encoder_thread();
	}

	/*add a photo capture timer*/
	if(my_options->photo_timer > 0)
	{
		my_photo_timer = NSEC_PER_SEC * my_options->photo_timer;
		my_last_photo_time = v4l2core_time_get_timestamp(); /*timer count*/
	}

	if(my_options->photo_npics > 0)
		my_photo_npics = my_options->photo_npics;

	v4l2core_start_stream();
	
	v4l2_frame_buff_t *frame = NULL; //pointer to frame buffer

	while(!quit)
	{
		if(restart)
		{
			restart = 0; /*reset*/
			v4l2core_stop_stream();

			/*close render*/
			render_close();

			v4l2core_clean_buffers();

			/*try new format (values prepared by the request callback)*/
			ret = v4l2core_update_current_format();
			/*try to set the video stream format on the device*/
			if(ret != E_OK)
			{
				fprintf(stderr, "GUCVIEW: could not set the defined stream format\n");
				fprintf(stderr, "GUCVIEW: trying first listed stream format\n");

				v4l2core_prepare_valid_format();
				v4l2core_prepare_valid_resolution();
				ret = v4l2core_update_current_format();

				if(ret != E_OK)
				{
					fprintf(stderr, "GUCVIEW: also could not set the first listed stream format\n");

					gui_error("Guvcview error", "could not start a video stream in the device", 1);

					return ((void *) -1);
				}
			}

			/*restart the render with new format*/
			if(render_init(render, v4l2core_get_frame_width(), v4l2core_get_frame_height(), render_flags) < 0)
				render = RENDER_NONE;
			else
			{
				render_set_event_callback(EV_QUIT, &quit_callback, NULL);
				render_set_event_callback(EV_KEY_V, &key_V_callback, NULL);
				render_set_event_callback(EV_KEY_I, &key_I_callback, NULL);
				render_set_event_callback(EV_KEY_UP, &key_UP_callback, NULL);
				render_set_event_callback(EV_KEY_DOWN, &key_DOWN_callback, NULL);
				render_set_event_callback(EV_KEY_LEFT, &key_LEFT_callback, NULL);
				render_set_event_callback(EV_KEY_RIGHT, &key_RIGHT_callback, NULL);
			}


			if(debug_level > 0)
				printf("GUVCVIEW: reset to pixelformat=%x width=%i and height=%i\n", v4l2core_get_requested_frame_format(), v4l2core_get_frame_width(), v4l2core_get_frame_height());

			v4l2core_start_stream();

		}

		frame = v4l2core_get_decoded_frame();
		if( frame != NULL)
		{
			/*run software autofocus (must be called after frame was grabbed and decoded)*/
			if(do_soft_autofocus || do_soft_focus)
				do_soft_focus = v4l2core_soft_autofocus_run(frame);

			/*render the decoded frame*/
			snprintf(render_caption, 29, "Guvcview  (%2.2f fps)", v4l2core_get_realfps());
			render_set_caption(render_caption);
			render_frame(frame->yuv_frame, my_render_mask);

			if(check_photo_timer())
			{
				if((frame->timestamp - my_last_photo_time) > my_photo_timer)
				{
					save_image = 1;
					my_last_photo_time = frame->timestamp;

					if(my_options->photo_npics > 0)
					{
						if(my_photo_npics > 0)
							my_photo_npics--;
						else
							stop_photo_timer(); /*close timer*/
					}
				}
			}

			if(check_video_timer())
			{
				if((frame->timestamp - my_video_begin_time) > my_video_timer)
					stop_video_timer();
			}

			if(save_image)
			{
				char *img_filename = NULL;

				/*get_photo_[name|path] always return a non NULL value*/
				char *name = strdup(get_photo_name());
				char *path = strdup(get_photo_path());

				if(get_photo_sufix_flag())
				{
					char *new_name = add_file_suffix(path, name);
					free(name); /*free old name*/
					name = new_name; /*replace with suffixed name*/
				}
				int pathsize = strlen(path);
				if(path[pathsize] != '/')
					img_filename = smart_cat(path, '/', name);
				else
					img_filename = smart_cat(path, 0, name);

				//if(debug_level > 1)
				//	printf("GUVCVIEW: saving image to %s\n", img_filename);

				snprintf(status_message, 79, _("saving image to %s"), img_filename);
				gui_status_message(status_message);

				v4l2core_save_image(frame, img_filename, get_photo_format());

				free(path);
				free(name);
				free(img_filename);

				save_image = 0; /*reset*/
			}

			if(video_capture_get_save_video())
			{
#ifdef USE_PLANAR_YUV
				int size = (v4l2core_get_frame_width() * v4l2core_get_frame_height() * 3) / 2;
#else
				int size = v4l2core_get_frame_width() * v4l2core_get_frame_height() * 2;
#endif
				uint8_t *input_frame = frame->yuv_frame;
				/*
				 * TODO: check codec_id, format and frame flags
				 * (we may want to store a compressed format
				 */
				if(get_video_codec_ind() == 0) //raw frame
				{
					switch(v4l2core_get_requested_frame_format())
					{
						case  V4L2_PIX_FMT_H264:
							input_frame = frame->h264_frame;
							size = (int) frame->h264_frame_size;
							break;
						default:
							input_frame = frame->raw_frame;
							size = (int) frame->raw_frame_size;
							break;
					}

				}
				encoder_add_video_frame(input_frame, size, frame->timestamp, frame->isKeyframe);

				/*
				 * exponencial scheduler
				 *  with 50% threshold (nanosec)
				 *  and max value of 250 ms (4 fps)
				 */
				int time_sched = encoder_buff_scheduler(ENCODER_SCHED_EXP, 0.5, 250);
				if(time_sched > 0)
				{
					switch(v4l2core_get_requested_frame_format())
					{
						case  V4L2_PIX_FMT_H264:
						{
							uint32_t framerate = time_sched; /*nanosec*/
							v4l2core_set_h264_frame_rate_config(framerate);
							break;
						}
						default:
						{
							struct timespec req = {
								.tv_sec = 0,
								.tv_nsec = time_sched};/*nanosec*/
							nanosleep(&req, NULL);
							break;
						}
					}
				}
			}
			/*we are done with the frame buffer release it*/
			v4l2core_release_frame(frame);
		}
	}

	v4l2core_stop_stream();
	
	/*if we are still saving video then stop it*/
	if(video_capture_get_save_video())
		stop_encoder_thread();

	render_close();

	return ((void *) 0);
}

/*
 * start the encoder thread
 * args:
 *   data - pointer to user data
 *
 * asserts:
 *   none
 *
 * returns: error code
 */
int start_encoder_thread(void *data)
{
	int ret = __THREAD_CREATE(&encoder_thread, encoder_loop, data);
	
	if(ret)
		fprintf(stderr, "GUVCVIEW: encoder thread creation failed (%i)\n", ret);
	else if(debug_level > 2)
		printf("GUVCVIEW: created encoder thread with tid: %u\n", 
			(unsigned int) encoder_thread);

	return ret;
}