示例#1
0
static inline struct source_frame *get_closest_frame(obs_source_t source,
		uint64_t sys_time, int *audio_time_refs)
{
	struct source_frame *next_frame = source->video_frames.array[0];
	struct source_frame *frame      = NULL;
	uint64_t sys_offset = sys_time - source->last_sys_timestamp;
	uint64_t frame_time = next_frame->timestamp;
	uint64_t frame_offset = 0;

	/* account for timestamp invalidation */
	if (frame_out_of_bounds(source, frame_time)) {
		source->last_frame_ts = next_frame->timestamp;
		(*audio_time_refs)++;
	} else {
		frame_offset = frame_time - source->last_frame_ts;
		source->last_frame_ts += sys_offset;
	}

	while (frame_offset <= sys_offset) {
		source_frame_destroy(frame);

		frame = next_frame;
		da_erase(source->video_frames, 0);

		if (!source->video_frames.num)
			break;

		next_frame = source->video_frames.array[0];

		/* more timestamp checking and compensating */
		if ((next_frame->timestamp - frame_time) > MAX_TIMESTAMP_JUMP) {
			source->last_frame_ts =
				next_frame->timestamp - frame_offset;
			(*audio_time_refs)++;
		}

		frame_time   = next_frame->timestamp;
		frame_offset = frame_time - source->last_frame_ts;
	}

	return frame;
}
示例#2
0
static bool new_frame_ready(obs_source_t source, uint64_t sys_time)
{
	struct source_frame *next_frame = source->video_frames.array[0];
	struct source_frame *frame      = NULL;
	uint64_t sys_offset = sys_time - source->last_sys_timestamp;
	uint64_t frame_time = next_frame->timestamp;
	uint64_t frame_offset = 0;

	/* account for timestamp invalidation */
	if (frame_out_of_bounds(source, frame_time)) {
		source->last_frame_ts = next_frame->timestamp;
		os_atomic_inc_long(&source->av_sync_ref);
	} else {
		frame_offset = frame_time - source->last_frame_ts;
		source->last_frame_ts += frame_offset;
	}

	while (frame_offset <= sys_offset) {
		source_frame_destroy(frame);

		if (source->video_frames.num == 1)
			return true;

		frame = next_frame;
		da_erase(source->video_frames, 0);
		next_frame = source->video_frames.array[0];

		/* more timestamp checking and compensating */
		if ((next_frame->timestamp - frame_time) > MAX_TIMESTAMP_JUMP) {
			source->last_frame_ts =
				next_frame->timestamp - frame_offset;
			os_atomic_inc_long(&source->av_sync_ref);
		}

		frame_time   = next_frame->timestamp;
		frame_offset = frame_time - source->last_frame_ts;
	}

	return frame != NULL;
}
static bool ready_deinterlace_frames(obs_source_t *source, uint64_t sys_time)
{
	struct obs_source_frame *next_frame = source->async_frames.array[0];
	struct obs_source_frame *prev_frame = NULL;
	struct obs_source_frame *frame      = NULL;
	uint64_t sys_offset = sys_time - source->last_sys_timestamp;
	uint64_t frame_time = next_frame->timestamp;
	uint64_t frame_offset = 0;
	size_t idx = 1;

	if ((source->flags & OBS_SOURCE_FLAG_UNBUFFERED) != 0) {
		while (source->async_frames.num > 2) {
			da_erase(source->async_frames, 0);
			remove_async_frame(source, next_frame);
			next_frame = source->async_frames.array[0];
		}

		if (source->async_frames.num == 2)
			source->async_frames.array[0]->prev_frame = true;
		source->deinterlace_offset = 0;
		return true;
	}

	/* account for timestamp invalidation */
	if (frame_out_of_bounds(source, frame_time)) {
		source->last_frame_ts = next_frame->timestamp;
		source->deinterlace_offset = 0;
		return true;
	} else {
		frame_offset = frame_time - source->last_frame_ts;
		source->last_frame_ts += sys_offset;
	}

	while (source->last_frame_ts > next_frame->timestamp) {

		/* this tries to reduce the needless frame duplication, also
		 * helps smooth out async rendering to frame boundaries.  In
		 * other words, tries to keep the framerate as smooth as
		 * possible */
		if ((source->last_frame_ts - next_frame->timestamp) < 2000000)
			break;

		if (prev_frame) {
			da_erase(source->async_frames, 0);
			remove_async_frame(source, prev_frame);
		}

		if (source->async_frames.num <= 2) {
			bool exit = true;

			if (prev_frame) {
				prev_frame->prev_frame = true;

			} else if (!frame && source->async_frames.num == 2) {
				exit = false;
			}

			if (exit) {
				source->deinterlace_offset = 0;
				return true;
			}
		}

		if (frame)
			idx = 2;
		else
			idx = 1;

		prev_frame = frame;
		frame = next_frame;
		next_frame = source->async_frames.array[idx];

		/* more timestamp checking and compensating */
		if ((next_frame->timestamp - frame_time) > MAX_TS_VAR) {
			source->last_frame_ts =
				next_frame->timestamp - frame_offset;
			source->deinterlace_offset = 0;
		}

		frame_time   = next_frame->timestamp;
		frame_offset = frame_time - source->last_frame_ts;
	}

	if (prev_frame)
		prev_frame->prev_frame = true;

	return frame != NULL;
}