static bool video_frame_hwaccel(struct ff_frame *frame, struct ffmpeg_source *s, struct obs_source_frame *obs_frame) { // 4th plane is pixelbuf reference for mac for (int i = 0; i < 3; i++) { obs_frame->data[i] = frame->frame->data[i]; obs_frame->linesize[i] = frame->frame->linesize[i]; } if (!set_obs_frame_colorprops(frame, s, obs_frame)) return false; obs_source_output_video(s->source, obs_frame); return true; }
static bool video_frame_direct(struct ff_frame *frame, struct ffmpeg_source *s, struct obs_source_frame *obs_frame) { int i; for (i = 0; i < MAX_AV_PLANES; i++) { obs_frame->data[i] = frame->frame->data[i]; obs_frame->linesize[i] = frame->frame->linesize[i]; } if (!set_obs_frame_colorprops(frame, s, obs_frame)) return false; obs_source_output_video(s->source, obs_frame); return true; }
static bool video_frame_scale(struct ff_frame *frame, struct ffmpeg_source *s, struct obs_source_frame *obs_frame) { if (!update_sws_context(s, frame->frame)) return false; sws_scale( s->sws_ctx, (uint8_t const *const *)frame->frame->data, frame->frame->linesize, 0, frame->frame->height, &s->sws_data, &s->sws_linesize ); obs_frame->data[0] = s->sws_data; obs_frame->linesize[0] = s->sws_linesize; obs_frame->format = VIDEO_FORMAT_BGRA; obs_source_output_video(s->source, obs_frame); return true; }
void DeckLinkDeviceInstance::HandleVideoFrame( IDeckLinkVideoInputFrame *videoFrame, const uint64_t timestamp) { if (videoFrame == nullptr) return; void *bytes; if (videoFrame->GetBytes(&bytes) != S_OK) { LOG(LOG_WARNING, "Failed to get video frame data"); return; } currentFrame.data[0] = (uint8_t *)bytes; currentFrame.linesize[0] = (uint32_t)videoFrame->GetRowBytes(); currentFrame.width = (uint32_t)videoFrame->GetWidth(); currentFrame.height = (uint32_t)videoFrame->GetHeight(); currentFrame.timestamp = timestamp; video_format_get_parameters(VIDEO_CS_601, VIDEO_RANGE_PARTIAL, currentFrame.color_matrix, currentFrame.color_range_min, currentFrame.color_range_max); obs_source_output_video(decklink->GetSource(), ¤tFrame); }
/* * Worker thread to get video data */ static void *v4l2_thread(void *vptr) { V4L2_DATA(vptr); int r; fd_set fds; uint8_t *start; uint64_t frames; uint64_t first_ts; struct timeval tv; struct v4l2_buffer buf; struct obs_source_frame out; size_t plane_offsets[MAX_AV_PLANES]; if (v4l2_start_capture(data->dev, &data->buffers) < 0) goto exit; frames = 0; first_ts = 0; v4l2_prep_obs_frame(data, &out, plane_offsets); while (os_event_try(data->event) == EAGAIN) { FD_ZERO(&fds); FD_SET(data->dev, &fds); tv.tv_sec = 1; tv.tv_usec = 0; r = select(data->dev + 1, &fds, NULL, NULL, &tv); if (r < 0) { if (errno == EINTR) continue; blog(LOG_DEBUG, "select failed"); break; } else if (r == 0) { blog(LOG_DEBUG, "select timeout"); continue; } buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; if (v4l2_ioctl(data->dev, VIDIOC_DQBUF, &buf) < 0) { if (errno == EAGAIN) continue; blog(LOG_DEBUG, "failed to dequeue buffer"); break; } out.timestamp = timeval2ns(buf.timestamp); if (!frames) first_ts = out.timestamp; out.timestamp -= first_ts; start = (uint8_t *) data->buffers.info[buf.index].start; for (uint_fast32_t i = 0; i < MAX_AV_PLANES; ++i) out.data[i] = start + plane_offsets[i]; obs_source_output_video(data->source, &out); if (v4l2_ioctl(data->dev, VIDIOC_QBUF, &buf) < 0) { blog(LOG_DEBUG, "failed to enqueue buffer"); break; } frames++; } blog(LOG_INFO, "Stopped capture after %"PRIu64" frames", frames); exit: v4l2_stop_capture(data->dev); return NULL; }
static void get_frame(void *opaque, struct obs_source_frame *f) { struct ffmpeg_source *s = opaque; obs_source_output_video(s->source, f); }