/* GObject vmethod implementations */ static void gst_vdp_vpp_get_property (GObject * object, guint property_id, GValue * value, GParamSpec * pspec) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (object); switch (property_id) { case PROP_FORCE_ASPECT_RATIO: g_value_set_boolean (value, vpp->force_aspect_ratio); break; case PROP_DEINTERLACE_MODE: g_value_set_enum (value, vpp->mode); break; case PROP_DEINTERLACE_METHOD: g_value_set_enum (value, vpp->method); break; case PROP_NOISE_REDUCTION: g_value_set_float (value, vpp->noise_reduction); break; case PROP_SHARPENING: g_value_set_float (value, vpp->sharpening); break; case PROP_INVERSE_TELECINE: g_value_set_boolean (value, vpp->inverse_telecine); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); break; } }
static GstStateChangeReturn gst_vdp_vpp_change_state (GstElement * element, GstStateChange transition) { GstVdpVideoPostProcess *vpp; GstStateChangeReturn ret; vpp = GST_VDP_VIDEO_POST_PROCESS (element); switch (transition) { case GST_STATE_CHANGE_READY_TO_PAUSED: if (!gst_vdp_vpp_start (vpp)) return GST_STATE_CHANGE_FAILURE; break; default: break; } ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); switch (transition) { case GST_STATE_CHANGE_PAUSED_TO_READY: if (!gst_vdp_vpp_stop (vpp)) ret = GST_STATE_CHANGE_FAILURE; break; default: break; } return ret; }
static gboolean gst_vdp_vpp_src_event (GstPad * pad, GstEvent * event) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); gboolean res; switch (GST_EVENT_TYPE (event)) { case GST_EVENT_QOS: { gdouble proportion; GstClockTimeDiff diff; GstClockTime timestamp; gst_event_parse_qos (event, &proportion, &diff, ×tamp); GST_OBJECT_LOCK (vpp); vpp->earliest_time = timestamp + diff; GST_OBJECT_UNLOCK (vpp); res = gst_pad_event_default (pad, event); break; } default: res = gst_pad_event_default (pad, event); } gst_object_unref (vpp); return res; }
static gboolean gst_vdp_vpp_sink_event (GstPad * pad, GstEvent * event) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); gboolean res; switch (GST_EVENT_TYPE (event)) { case GST_EVENT_FLUSH_STOP: { GST_DEBUG_OBJECT (vpp, "flush stop"); gst_vdp_vpp_flush (vpp); res = gst_pad_push_event (vpp->srcpad, event); break; } default: res = gst_pad_event_default (pad, event); } gst_object_unref (vpp); return res; }
static gboolean gst_vdp_vpp_sink_setcaps (GstPad * pad, GstCaps * caps) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstStructure *structure; GstCaps *output_caps, *allowed_caps, *src_caps; gboolean res; /* extract interlaced flag */ structure = gst_caps_get_structure (caps, 0); gst_structure_get_boolean (structure, "interlaced", &vpp->interlaced); allowed_caps = gst_pad_get_allowed_caps (vpp->srcpad); structure = gst_caps_get_structure (allowed_caps, 0); output_caps = gst_vdp_video_to_output_caps (caps); src_caps = gst_caps_intersect (output_caps, allowed_caps); gst_caps_truncate (src_caps); if (gst_caps_is_empty (src_caps)) goto invalid_caps; GST_DEBUG ("output_caps: %" GST_PTR_FORMAT " allowed_caps: %" GST_PTR_FORMAT " src_caps: %" GST_PTR_FORMAT, output_caps, allowed_caps, src_caps); gst_caps_unref (output_caps); gst_caps_unref (allowed_caps); if (gst_vdp_vpp_is_interlaced (vpp)) { gint fps_n, fps_d; structure = gst_caps_get_structure (src_caps, 0); if (!gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) { gst_caps_unref (src_caps); goto invalid_caps; } gst_fraction_double (&fps_n, &fps_d); gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL); gst_structure_remove_field (structure, "interlaced"); vpp->field_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n); } res = gst_pad_set_caps (vpp->srcpad, src_caps); done: gst_object_unref (vpp); return res; invalid_caps: GST_ERROR_OBJECT (vpp, "invalid caps: %" GST_PTR_FORMAT, caps); res = FALSE; goto done; }
static GstFlowReturn gst_vdp_vpp_sink_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstVdpOutputBuffer *outbuf; GstFlowReturn ret = GST_FLOW_ERROR; GstVdpDevice *device = NULL; GstStructure *structure; gint width, height; gint chroma_type; if (!vpp->device) { /* if we haven't got a device yet we must alloc a buffer downstream to get it */ GstCaps *src_caps = gst_pad_get_allowed_caps (vpp->srcpad); gst_pad_fixate_caps (vpp->srcpad, src_caps); ret = gst_pad_alloc_buffer (vpp->srcpad, 0, 0, src_caps, (GstBuffer **) & outbuf); gst_caps_unref (src_caps); if (ret != GST_FLOW_OK) goto error; device = outbuf->device; gst_buffer_unref (GST_BUFFER (outbuf)); } else device = vpp->device; structure = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (structure, "width", &width) || !gst_structure_get_int (structure, "height", &height) || !gst_structure_get_int (structure, "chroma-type", &chroma_type)) goto error; *buf = GST_BUFFER (gst_vdp_video_buffer_new (device, chroma_type, width, height)); if (*buf == NULL) goto error; GST_BUFFER_SIZE (*buf) = size; GST_BUFFER_OFFSET (*buf) = offset; gst_buffer_set_caps (*buf, caps); ret = GST_FLOW_OK; error: gst_object_unref (vpp); return ret; }
static GstCaps * gst_vdp_vpp_sink_getcaps (GstPad * pad) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstCaps *caps; if (vpp->device) caps = gst_vdp_video_buffer_get_allowed_video_caps (vpp->device); else caps = gst_static_pad_template_get_caps (&sink_template); gst_object_unref (vpp); return caps; }
static gboolean gst_vdp_vpp_sink_event (GstPad * pad, GstEvent * event) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); gboolean res; switch (GST_EVENT_TYPE (event)) { case GST_EVENT_FLUSH_STOP: { GST_DEBUG_OBJECT (vpp, "flush stop"); gst_vdp_vpp_flush (vpp); res = gst_pad_event_default (pad, event); break; } case GST_EVENT_NEWSEGMENT: { gboolean update; gdouble rate, applied_rate; GstFormat format; gint64 start, stop, time; gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate, &format, &start, &stop, &time); GST_OBJECT_LOCK (vpp); gst_segment_set_newsegment_full (&vpp->segment, update, rate, applied_rate, format, start, stop, time); GST_OBJECT_UNLOCK (vpp); res = gst_pad_event_default (pad, event); break; } default: res = gst_pad_event_default (pad, event); } gst_object_unref (vpp); return res; }
static GstCaps * gst_vdp_vpp_sink_getcaps (GstPad * pad) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstCaps *caps; if (vpp->device) { caps = gst_vdp_video_buffer_get_allowed_caps (vpp->device); } else { GstElementClass *element_class = GST_ELEMENT_GET_CLASS (vpp); GstPadTemplate *sink_template; sink_template = gst_element_class_get_pad_template (element_class, "sink"); caps = gst_caps_copy (gst_pad_template_get_caps (sink_template)); } GST_DEBUG ("returning caps: %" GST_PTR_FORMAT, caps); gst_object_unref (vpp); return caps; }
static GstFlowReturn gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstClockTime qostime; GstFlowReturn ret = GST_FLOW_OK; GError *err; GST_DEBUG ("chain"); /* can only do QoS if the segment is in TIME */ if (vpp->segment.format != GST_FORMAT_TIME) goto no_qos; /* QOS is done on the running time of the buffer, get it now */ qostime = gst_segment_to_running_time (&vpp->segment, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer)); if (qostime != -1) { gboolean need_skip; GstClockTime earliest_time; /* lock for getting the QoS parameters that are set (in a different thread) * with the QOS events */ GST_OBJECT_LOCK (vpp); earliest_time = vpp->earliest_time; /* check for QoS, don't perform conversion for buffers * that are known to be late. */ need_skip = GST_CLOCK_TIME_IS_VALID (earliest_time) && qostime != -1 && qostime <= earliest_time; GST_OBJECT_UNLOCK (vpp); if (need_skip) { GST_DEBUG_OBJECT (vpp, "skipping transform: qostime %" GST_TIME_FORMAT " <= %" GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time)); /* mark discont for next buffer */ vpp->discont = TRUE; gst_buffer_unref (buffer); return GST_FLOW_OK; } } no_qos: if (vpp->discont) { GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); vpp->discont = FALSE; } if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) { GST_DEBUG_OBJECT (vpp, "Received discont buffer"); gst_vdp_vpp_flush (vpp); } if (!vpp->native_input) { GstVdpVideoBuffer *video_buf; err = NULL; video_buf = (GstVdpVideoBuffer *) gst_vdp_buffer_pool_get_buffer (vpp->vpool, &err); if (G_UNLIKELY (!video_buf)) goto video_buf_error; if (!gst_vdp_video_buffer_upload (video_buf, buffer, vpp->fourcc, vpp->width, vpp->height)) { gst_buffer_unref (GST_BUFFER (video_buf)); GST_ELEMENT_ERROR (vpp, RESOURCE, READ, ("Couldn't upload YUV data to vdpau"), (NULL)); ret = GST_FLOW_ERROR; goto error; } gst_buffer_copy_metadata (GST_BUFFER (video_buf), buffer, GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS); gst_buffer_unref (buffer); buffer = GST_BUFFER (video_buf); } if (G_UNLIKELY (vpp->mixer == VDP_INVALID_HANDLE)) { ret = gst_vdp_vpp_create_mixer (vpp); if (ret != GST_FLOW_OK) goto error; } gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer)); ret = gst_vdp_vpp_drain (vpp); done: gst_object_unref (vpp); return ret; error: gst_buffer_unref (buffer); goto done; video_buf_error: gst_buffer_unref (GST_BUFFER (buffer)); gst_vdp_vpp_post_error (vpp, err); ret = GST_FLOW_ERROR; goto done; }
static gboolean gst_vdp_vpp_sink_setcaps (GstPad * pad, GstCaps * caps) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstStructure *structure; GstCaps *video_caps = NULL; gboolean res = FALSE; GstCaps *allowed_caps, *output_caps, *src_caps; /* check if the input is non native */ structure = gst_caps_get_structure (caps, 0); if (gst_structure_has_name (structure, "video/x-raw-yuv")) { if (!gst_structure_get_fourcc (structure, "format", &vpp->fourcc)) goto done; vpp->native_input = FALSE; video_caps = gst_vdp_yuv_to_video_caps (caps); if (!video_caps) goto done; if (!vpp->vpool) vpp->vpool = gst_vdp_video_buffer_pool_new (vpp->device); gst_vdp_buffer_pool_set_caps (vpp->vpool, video_caps); } else { vpp->native_input = TRUE; video_caps = gst_caps_ref (caps); if (vpp->vpool) { g_object_unref (vpp->vpool); vpp->vpool = NULL; } } structure = gst_caps_get_structure (video_caps, 0); if (!gst_structure_get_int (structure, "width", &vpp->width) || !gst_structure_get_int (structure, "height", &vpp->height) || !gst_structure_get_int (structure, "chroma-type", (gint *) & vpp->chroma_type)) goto done; /* get interlaced flag */ gst_structure_get_boolean (structure, "interlaced", &vpp->interlaced); /* extract par */ if (gst_structure_has_field_typed (structure, "pixel-aspect-ratio", GST_TYPE_FRACTION)) { gst_structure_get_fraction (structure, "pixel-aspect-ratio", &vpp->par_n, &vpp->par_d); vpp->got_par = TRUE; } else vpp->got_par = FALSE; allowed_caps = gst_pad_get_allowed_caps (vpp->srcpad); if (G_UNLIKELY (!allowed_caps)) goto null_allowed_caps; if (G_UNLIKELY (gst_caps_is_empty (allowed_caps))) goto empty_allowed_caps; GST_DEBUG ("allowed_caps: %" GST_PTR_FORMAT, allowed_caps); output_caps = gst_vdp_video_to_output_caps (video_caps); src_caps = gst_caps_intersect (output_caps, allowed_caps); gst_caps_unref (allowed_caps); gst_caps_unref (output_caps); if (gst_caps_is_empty (src_caps)) goto not_negotiated; gst_pad_fixate_caps (vpp->srcpad, src_caps); if (gst_vdp_vpp_is_interlaced (vpp)) { gint fps_n, fps_d; if (gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) { gst_fraction_double (&fps_n, &fps_d); gst_caps_set_simple (src_caps, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL); vpp->field_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n); } gst_caps_set_simple (src_caps, "interlaced", G_TYPE_BOOLEAN, FALSE, NULL); } GST_DEBUG ("src_caps: %" GST_PTR_FORMAT, src_caps); res = gst_pad_set_caps (vpp->srcpad, src_caps); gst_caps_unref (src_caps); done: gst_object_unref (vpp); if (video_caps) gst_caps_unref (video_caps); return res; null_allowed_caps: GST_ERROR_OBJECT (vpp, "Got null from gst_pad_get_allowed_caps"); goto done; empty_allowed_caps: GST_ERROR_OBJECT (vpp, "Got EMPTY caps from gst_pad_get_allowed_caps"); gst_caps_unref (allowed_caps); goto done; not_negotiated: gst_caps_unref (src_caps); GST_ERROR_OBJECT (vpp, "Couldn't find suitable output format"); goto done; }
/* GObject vmethod implementations */ static void gst_vdp_vpp_set_property (GObject * object, guint property_id, const GValue * value, GParamSpec * pspec) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (object); switch (property_id) { case PROP_DISPLAY: g_free (vpp->display); vpp->display = g_value_dup_string (value); break; case PROP_FORCE_ASPECT_RATIO: vpp->force_aspect_ratio = g_value_get_boolean (value); break; case PROP_DEINTERLACE_MODE: vpp->mode = g_value_get_enum (value); break; case PROP_DEINTERLACE_METHOD: { GstVdpDeinterlaceMethods oldvalue; oldvalue = vpp->method; vpp->method = g_value_get_enum (value); if (oldvalue == vpp->method) break; if (vpp->mixer != VDP_INVALID_HANDLE) { if (oldvalue != GST_VDP_DEINTERLACE_METHOD_BOB) gst_vdp_vpp_activate_deinterlace_method (vpp, oldvalue, FALSE); if (vpp->method != GST_VDP_DEINTERLACE_METHOD_BOB) gst_vdp_vpp_activate_deinterlace_method (vpp, oldvalue, TRUE); } break; } case PROP_NOISE_REDUCTION: { gfloat old_value; old_value = vpp->noise_reduction; vpp->noise_reduction = g_value_get_float (value); if (vpp->noise_reduction == old_value) break; if (vpp->mixer != VDP_INVALID_HANDLE) { if (vpp->noise_reduction == 0.0) gst_vdp_vpp_activate_feature (vpp, VDP_VIDEO_MIXER_FEATURE_NOISE_REDUCTION, FALSE); if (old_value == 0.0) gst_vdp_vpp_activate_feature (vpp, VDP_VIDEO_MIXER_FEATURE_NOISE_REDUCTION, TRUE); gst_vdp_vpp_set_attribute_float (vpp, VDP_VIDEO_MIXER_ATTRIBUTE_NOISE_REDUCTION_LEVEL, vpp->noise_reduction); } break; } case PROP_SHARPENING: { gfloat old_value; old_value = vpp->sharpening; vpp->sharpening = g_value_get_float (value); if (vpp->sharpening == old_value) break; if (vpp->mixer != VDP_INVALID_HANDLE) { if (vpp->sharpening == 0.0) gst_vdp_vpp_activate_feature (vpp, VDP_VIDEO_MIXER_FEATURE_SHARPNESS, FALSE); if (old_value == 0.0) gst_vdp_vpp_activate_feature (vpp, VDP_VIDEO_MIXER_FEATURE_SHARPNESS, TRUE); gst_vdp_vpp_set_attribute_float (vpp, VDP_VIDEO_MIXER_ATTRIBUTE_SHARPNESS_LEVEL, vpp->sharpening); } break; } case PROP_INVERSE_TELECINE: { vpp->inverse_telecine = g_value_get_boolean (value); if (vpp->mixer != VDP_INVALID_HANDLE) { gst_vdp_vpp_activate_feature (vpp, VDP_VIDEO_MIXER_FEATURE_INVERSE_TELECINE, vpp->inverse_telecine); } break; } default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); break; } }
static GstFlowReturn gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstFlowReturn ret = GST_FLOW_OK; GstVdpPicture current_pic; guint32 video_surfaces_past_count; VdpVideoSurface video_surfaces_past[MAX_PICTURES]; guint32 video_surfaces_future_count; VdpVideoSurface video_surfaces_future[MAX_PICTURES]; if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) { GST_DEBUG_OBJECT (vpp, "Received discont buffer"); gst_vdp_vpp_flush (vpp); } gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer)); while (gst_vdp_vpp_get_next_picture (vpp, ¤t_pic, &video_surfaces_past_count, video_surfaces_past, &video_surfaces_future_count, video_surfaces_future)) { GstVdpOutputBuffer *outbuf; GstStructure *structure; GstVideoRectangle src_r = { 0, } , dest_r = { 0,}; gint par_n, par_d; VdpRect rect; GstVdpDevice *device; VdpStatus status; ret = gst_vdp_vpp_alloc_output_buffer (vpp, GST_PAD_CAPS (vpp->srcpad), &outbuf); if (ret != GST_FLOW_OK) break; structure = gst_caps_get_structure (GST_BUFFER_CAPS (buffer), 0); if (!gst_structure_get_int (structure, "width", &src_r.w) || !gst_structure_get_int (structure, "height", &src_r.h)) goto invalid_caps; if (gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n, &par_d)) { gint new_width; new_width = gst_util_uint64_scale_int (src_r.w, par_n, par_d); src_r.x += (src_r.w - new_width) / 2; src_r.w = new_width; } structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0); if (!gst_structure_get_int (structure, "width", &dest_r.w) || !gst_structure_get_int (structure, "height", &dest_r.h)) goto invalid_caps; if (vpp->force_aspect_ratio) { GstVideoRectangle res_r; gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE); rect.x0 = res_r.x; rect.x1 = res_r.w + res_r.x; rect.y0 = res_r.y; rect.y1 = res_r.h + res_r.y; } else { rect.x0 = 0; rect.x1 = dest_r.w; rect.y0 = 0; rect.y1 = dest_r.h; } device = vpp->device; status = device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL, current_pic.structure, video_surfaces_past_count, video_surfaces_past, current_pic.buf->surface, video_surfaces_future_count, video_surfaces_future, NULL, outbuf->surface, NULL, &rect, 0, NULL); if (status != VDP_STATUS_OK) { GST_ELEMENT_ERROR (vpp, RESOURCE, READ, ("Could not post process frame"), ("Error returned from vdpau was: %s", device->vdp_get_error_string (status))); ret = GST_FLOW_ERROR; goto done; } GST_BUFFER_TIMESTAMP (outbuf) = current_pic.timestamp; if (gst_vdp_vpp_is_interlaced (vpp)) GST_BUFFER_DURATION (outbuf) = vpp->field_duration; else GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (current_pic.buf); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_DISCONT)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_PREROLL)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_PREROLL); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_GAP)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); ret = gst_pad_push (vpp->srcpad, GST_BUFFER (outbuf)); if (ret != GST_FLOW_OK) break; continue; invalid_caps: gst_buffer_unref (GST_BUFFER (outbuf)); ret = GST_FLOW_ERROR; break; } done: gst_object_unref (vpp); return ret; }