static void obs_source_destroy(obs_source_t source) { size_t i; if (source->filter_parent) obs_source_filter_remove(source->filter_parent, source); for (i = 0; i < source->filters.num; i++) obs_source_release(source->filters.array[i]); for (i = 0; i < source->audio_wait_buffer.num; i++) audiobuf_free(source->audio_wait_buffer.array+i); for (i = 0; i < source->video_frames.num; i++) source_frame_destroy(source->video_frames.array[i]); gs_entercontext(obs->video.graphics); texture_destroy(source->output_texture); gs_leavecontext(); if (source->data) source->callbacks.destroy(source->data); bfree(source->audio_data.data); audio_line_destroy(source->audio_line); audio_resampler_destroy(source->resampler); da_free(source->video_frames); da_free(source->audio_wait_buffer); da_free(source->filters); pthread_mutex_destroy(&source->filter_mutex); pthread_mutex_destroy(&source->audio_mutex); pthread_mutex_destroy(&source->video_mutex); dstr_free(&source->settings); bfree(source); }
void obs_source_releaseframe(obs_source_t source, struct source_frame *frame) { if (frame) { source_frame_destroy(frame); obs_source_release(source); } }
/* * Ensures that cached frames are displayed on time. If multiple frames * were cached between renders, then releases the unnecessary frames and uses * the frame with the closest timing to ensure sync. */ struct source_frame *obs_source_getframe(obs_source_t source) { uint64_t last_frame_time = source->last_frame_timestamp; struct source_frame *frame = NULL; struct source_frame *next_frame; uint64_t sys_time, frame_time; pthread_mutex_lock(&source->video_mutex); if (!source->video_frames.num) goto unlock; next_frame = source->video_frames.array[0]; sys_time = os_gettime_ns(); frame_time = next_frame->timestamp; if (!source->last_frame_timestamp) { frame = next_frame; da_erase(source->video_frames, 0); source->last_frame_timestamp = frame_time; } else { uint64_t sys_offset, frame_offset; sys_offset = sys_time - source->last_sys_timestamp; frame_offset = frame_time - last_frame_time; source->last_frame_timestamp += sys_offset; while (frame_offset <= sys_offset) { if (frame) source_frame_destroy(frame); frame = next_frame; da_erase(source->video_frames, 0); if (!source->video_frames.num) break; next_frame = source->video_frames.array[0]; frame_time = next_frame->timestamp; frame_offset = frame_time - last_frame_time; } } source->last_sys_timestamp = sys_time; unlock: pthread_mutex_unlock(&source->video_mutex); if (frame != NULL) obs_source_addref(source); return frame; }
void obs_source_destroy(struct obs_source *source) { size_t i; if (!source) return; obs_context_data_remove(&source->context); obs_source_dosignal(source, "source_destroy", "destroy"); if (source->context.data) source->info.destroy(source->context.data); if (source->filter_parent) obs_source_filter_remove(source->filter_parent, source); for (i = 0; i < source->filters.num; i++) obs_source_release(source->filters.array[i]); for (i = 0; i < source->video_frames.num; i++) source_frame_destroy(source->video_frames.array[i]); gs_entercontext(obs->video.graphics); texrender_destroy(source->async_convert_texrender); texture_destroy(source->async_texture); gs_leavecontext(); for (i = 0; i < MAX_AV_PLANES; i++) bfree(source->audio_data.data[i]); audio_line_destroy(source->audio_line); audio_resampler_destroy(source->resampler); texrender_destroy(source->filter_texrender); da_free(source->video_frames); da_free(source->filters); pthread_mutex_destroy(&source->filter_mutex); pthread_mutex_destroy(&source->audio_mutex); pthread_mutex_destroy(&source->video_mutex); obs_context_data_free(&source->context); bfree(source); }
static inline struct source_frame *get_closest_frame(obs_source_t source, uint64_t sys_time, int *audio_time_refs) { struct source_frame *next_frame = source->video_frames.array[0]; struct source_frame *frame = NULL; uint64_t sys_offset = sys_time - source->last_sys_timestamp; uint64_t frame_time = next_frame->timestamp; uint64_t frame_offset = 0; /* account for timestamp invalidation */ if (frame_out_of_bounds(source, frame_time)) { source->last_frame_ts = next_frame->timestamp; (*audio_time_refs)++; } else { frame_offset = frame_time - source->last_frame_ts; source->last_frame_ts += sys_offset; } while (frame_offset <= sys_offset) { source_frame_destroy(frame); frame = next_frame; da_erase(source->video_frames, 0); if (!source->video_frames.num) break; next_frame = source->video_frames.array[0]; /* more timestamp checking and compensating */ if ((next_frame->timestamp - frame_time) > MAX_TIMESTAMP_JUMP) { source->last_frame_ts = next_frame->timestamp - frame_offset; (*audio_time_refs)++; } frame_time = next_frame->timestamp; frame_offset = frame_time - source->last_frame_ts; } return frame; }
static bool new_frame_ready(obs_source_t source, uint64_t sys_time) { struct source_frame *next_frame = source->video_frames.array[0]; struct source_frame *frame = NULL; uint64_t sys_offset = sys_time - source->last_sys_timestamp; uint64_t frame_time = next_frame->timestamp; uint64_t frame_offset = 0; /* account for timestamp invalidation */ if (frame_out_of_bounds(source, frame_time)) { source->last_frame_ts = next_frame->timestamp; os_atomic_inc_long(&source->av_sync_ref); } else { frame_offset = frame_time - source->last_frame_ts; source->last_frame_ts += frame_offset; } while (frame_offset <= sys_offset) { source_frame_destroy(frame); if (source->video_frames.num == 1) return true; frame = next_frame; da_erase(source->video_frames, 0); next_frame = source->video_frames.array[0]; /* more timestamp checking and compensating */ if ((next_frame->timestamp - frame_time) > MAX_TIMESTAMP_JUMP) { source->last_frame_ts = next_frame->timestamp - frame_offset; os_atomic_inc_long(&source->av_sync_ref); } frame_time = next_frame->timestamp; frame_offset = frame_time - source->last_frame_ts; } return frame != NULL; }
static void obs_source_destroy(obs_source_t source) { size_t i; obs_source_dosignal(source, "source-destroy"); if (source->filter_parent) obs_source_filter_remove(source->filter_parent, source); for (i = 0; i < source->filters.num; i++) obs_source_release(source->filters.array[i]); for (i = 0; i < source->video_frames.num; i++) source_frame_destroy(source->video_frames.array[i]); gs_entercontext(obs->video.graphics); texture_destroy(source->output_texture); gs_leavecontext(); if (source->data) source->callbacks.destroy(source->data); bfree(source->audio_data.data); audio_line_destroy(source->audio_line); audio_resampler_destroy(source->resampler); proc_handler_destroy(source->procs); signal_handler_destroy(source->signals); da_free(source->video_frames); da_free(source->filters); pthread_mutex_destroy(&source->filter_mutex); pthread_mutex_destroy(&source->audio_mutex); pthread_mutex_destroy(&source->video_mutex); obs_data_release(source->settings); bfree(source->name); bfree(source); }