/** * Set the value of Java's Context::contextHandle. */ static void put_context (JNIEnv *env, jobject obj, void *s) { ensure_context (env, obj); env->SetLongField (obj, contextptrFID, (jlong) s); }
static gboolean gst_msdkvpp_start (GstBaseTransform * trans) { if (!ensure_context (trans)) return FALSE; return TRUE; }
static GstVaapiDecoderStatus decode_picture (GstVaapiDecoderVp8 * decoder, const guchar * buf, guint buf_size) { GstVaapiDecoderVp8Private *const priv = &decoder->priv; GstVaapiPicture *picture; GstVaapiDecoderStatus status; status = ensure_context (decoder); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) return status; /* Create new picture */ picture = GST_VAAPI_PICTURE_NEW (VP8, decoder); if (!picture) { GST_ERROR ("failed to allocate picture"); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } gst_vaapi_picture_replace (&priv->current_picture, picture); gst_vaapi_picture_unref (picture); status = ensure_quant_matrix (decoder, picture); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) return status; status = ensure_probability_table (decoder, picture); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) return status; init_picture (decoder, picture); if (!fill_picture (decoder, picture)) return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; return decode_slice (decoder, picture, buf, buf_size); }
/** * Set the value of Java's Context::contextHandle. */ static void put_context (JNIEnv *env, jobject obj, void *s) { ensure_context (env, obj); env->SetLongField (obj, ctx_handle_fid, (jlong) s); }
/** * Get the value of Java's Context::contextHandle. */ static void *get_context (JNIEnv *env, jobject obj) { ensure_context (env, obj); void *s = (void*) env->GetLongField (obj, ctx_handle_fid); return s; }
EGLContext gst_vaapi_display_egl_get_gl_context (GstVaapiDisplayEGL * display) { g_return_val_if_fail (GST_VAAPI_IS_DISPLAY_EGL (display), EGL_NO_CONTEXT); return ensure_context (display) ? display->egl_context->base.handle.p : EGL_NO_CONTEXT; }
/** * Get the value of Java's Context::contextHandle. */ static void *get_context (JNIEnv *env, jobject obj) { ensure_context (env, obj); void *s = (void*) env->GetLongField (obj, contextptrFID); return s; }
static guintptr gst_vaapi_display_egl_get_visual_id (GstVaapiDisplay * base_display, GstVaapiWindow * window) { GstVaapiDisplayEGL *display = GST_VAAPI_DISPLAY_EGL (base_display); if (!ensure_context (display)) return 0; return display->egl_context->config->visual_id; }
static GstVaapiDecoderStatus decode_picture( GstVaapiDecoderJpeg *decoder, guint8 profile, guchar *buf, guint buf_size, GstClockTime pts ) { GstVaapiDecoderJpegPrivate * const priv = decoder->priv; GstJpegFrameHdr * const frame_hdr = &priv->frame_hdr; GstVaapiPicture *picture; GstVaapiDecoderStatus status; switch (profile) { case GST_JPEG_MARKER_SOF_MIN: priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE; break; default: GST_ERROR("unsupported profile %d", profile); return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE; } memset(frame_hdr, 0, sizeof(*frame_hdr)); if (!gst_jpeg_parse_frame_hdr(frame_hdr, buf, buf_size, 0)) { GST_ERROR("failed to parse image"); return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER; } priv->height = frame_hdr->height; priv->width = frame_hdr->width; status = ensure_context(decoder); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) { GST_ERROR("failed to reset context"); return status; } if (priv->current_picture && !decode_current_picture(decoder)) return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; picture = GST_VAAPI_PICTURE_NEW(JPEGBaseline, decoder); if (!picture) { GST_ERROR("failed to allocate picture"); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } gst_vaapi_picture_replace(&priv->current_picture, picture); gst_vaapi_picture_unref(picture); if (!fill_picture(decoder, picture, frame_hdr)) return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; /* Update presentation time */ picture->pts = pts; return GST_VAAPI_DECODER_STATUS_SUCCESS; }
static gboolean stereosplit_decide_allocation (GstGLStereoSplit * self, GstQuery * query) { if (!ensure_context (self)) return FALSE; if (self->upload) gst_object_replace ((GstObject **) & self->upload, NULL); if (self->convert) gst_object_replace ((GstObject **) & self->convert, NULL); return TRUE; }
static gboolean egl_context_init (EglContext * ctx, EglDisplay * display, EglConfig * config, EGLContext gl_parent_context) { egl_object_replace (&ctx->display, display); egl_object_replace (&ctx->config, config); if (config) eglBindAPI (config->gl_api); if (!ensure_vtable (ctx)) return FALSE; if (!ensure_context (ctx, gl_parent_context)) return FALSE; return TRUE; }
static GstCaps * stereosplit_transform_caps (GstGLStereoSplit * self, GstPadDirection direction, GstCaps * caps, GstCaps * filter) { GstCaps *next_caps; /* FIXME: Is this the right way to ensure a context here ? */ if (!ensure_context (self)) return NULL; if (direction == GST_PAD_SINK) { next_caps = gst_gl_upload_transform_caps (self->context, direction, caps, filter); caps = next_caps; next_caps = gst_gl_color_convert_transform_caps (self->context, direction, caps, NULL); gst_caps_unref (caps); caps = next_caps; next_caps = gst_gl_view_convert_transform_caps (self->viewconvert, direction, caps, NULL); gst_caps_unref (caps); } else { next_caps = gst_gl_view_convert_transform_caps (self->viewconvert, direction, caps, filter); caps = next_caps; next_caps = gst_gl_color_convert_transform_caps (self->context, direction, caps, NULL); gst_caps_unref (caps); caps = next_caps; next_caps = gst_gl_upload_transform_caps (self->context, direction, caps, NULL); gst_caps_unref (caps); } return next_caps; }
static GstVaapiDecoderStatus gst_vaapi_decoder_jpeg_start_frame (GstVaapiDecoder * base_decoder, GstVaapiDecoderUnit * base_unit) { GstVaapiDecoderJpeg *const decoder = GST_VAAPI_DECODER_JPEG_CAST (base_decoder); GstVaapiDecoderJpegPrivate *const priv = &decoder->priv; GstVaapiPicture *picture; GstVaapiDecoderStatus status; if (!VALID_STATE (decoder, GOT_SOF)) return GST_VAAPI_DECODER_STATUS_SUCCESS; status = ensure_context (decoder); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) { GST_ERROR ("failed to reset context"); return status; } picture = GST_VAAPI_PICTURE_NEW (JPEGBaseline, decoder); if (!picture) { GST_ERROR ("failed to allocate picture"); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } gst_vaapi_picture_replace (&priv->current_picture, picture); gst_vaapi_picture_unref (picture); if (!fill_picture (decoder, picture, &priv->frame_hdr)) return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; status = fill_quantization_table (decoder, picture); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) return status; /* Update presentation time */ picture->pts = GST_VAAPI_DECODER_CODEC_FRAME (decoder)->pts; return GST_VAAPI_DECODER_STATUS_SUCCESS; }
EglContext * gst_vaapi_display_egl_get_context (GstVaapiDisplayEGL * display) { return ensure_context (display) ? display->egl_context : NULL; }
static GstVaapiDecoderStatus decode_frame(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu) { GstVaapiDecoderVC1Private * const priv = decoder->priv; GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr; GstVC1AdvancedSeqHdr *advanced = &seq_hdr->advanced; GstVC1FrameHdr * const frame_hdr = &priv->frame_hdr; GstVC1ParserResult result; GstVaapiPicture *picture; GstVaapiSlice *slice; GstVaapiDecoderStatus status; VASliceParameterBufferVC1 *slice_param; GstClockTime pts; gint32 poc; status = ensure_context(decoder); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) { GST_DEBUG("failed to reset context"); return status; } if (priv->current_picture ) { if(!decode_current_picture(decoder)) return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; } picture = GST_VAAPI_PICTURE_NEW(VC1, decoder); if (!picture) { GST_DEBUG("failed to allocate picture"); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } if (!gst_vc1_bitplanes_ensure_size(priv->bitplanes, seq_hdr)) { GST_DEBUG("failed to allocate bitplanes"); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } memset(frame_hdr, 0, sizeof(*frame_hdr)); result = gst_vc1_parse_frame_header( rbdu->data + rbdu->offset, rbdu->size, frame_hdr, seq_hdr, priv->bitplanes ); if (result != GST_VC1_PARSER_OK) { GST_DEBUG("failed to parse frame layer"); return get_status(result); } switch (frame_hdr->ptype) { case GST_VC1_PICTURE_TYPE_I: picture->type = GST_VAAPI_PICTURE_TYPE_I; GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE); break; case GST_VC1_PICTURE_TYPE_SKIPPED: case GST_VC1_PICTURE_TYPE_P: picture->type = GST_VAAPI_PICTURE_TYPE_P; GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE); break; case GST_VC1_PICTURE_TYPE_B: picture->type = GST_VAAPI_PICTURE_TYPE_B; priv->successive_bfrm_cnt += 1; break; case GST_VC1_PICTURE_TYPE_BI: picture->type = GST_VAAPI_PICTURE_TYPE_BI; priv->successive_bfrm_cnt += 1; break; default: GST_DEBUG("unsupported picture type %d", frame_hdr->ptype); return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; } /*As far as dpb is only storing the reference pictures, always increesing poc should work fine. But we should explictly calculate the pts of frames*/ if(!GST_VAAPI_PICTURE_IS_REFERENCE(picture)) picture->poc = priv->frm_cnt - 1; else picture->poc = priv->frm_cnt; gst_vaapi_decoder_get_framerate(GST_VAAPI_DECODER_CAST(decoder), &priv->fps_n, &priv->fps_d); pts = gst_util_uint64_scale(picture->poc, GST_SECOND * priv->fps_d, priv->fps_n); if(GST_VAAPI_PICTURE_IS_REFERENCE(picture)){ if (priv->successive_bfrm_cnt) { poc = priv->frm_cnt - priv->successive_bfrm_cnt - 1; pts = gst_util_uint64_scale(picture->poc, GST_SECOND * priv->fps_d, priv->fps_n); gst_vaapi_dpb_reset_pts (priv->dpb, poc, pts); priv->successive_bfrm_cnt = 0; } } picture->pts = pts; priv->frm_cnt++; if (!fill_picture(decoder, picture)) return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; slice = GST_VAAPI_SLICE_NEW( VC1, decoder, ebdu->data + ebdu->sc_offset, ebdu->size + ebdu->offset - ebdu->sc_offset ); if (!slice) { GST_DEBUG("failed to allocate slice"); return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; } gst_vaapi_picture_add_slice(picture, slice); /* Fill in VASliceParameterBufferVC1 */ slice_param = slice->param; slice_param->macroblock_offset = 8 * (ebdu->offset - ebdu->sc_offset) + frame_hdr->header_size; slice_param->slice_vertical_position = 0; gst_vaapi_picture_replace(&priv->current_picture, picture); gst_vaapi_picture_unref(picture); return GST_VAAPI_DECODER_STATUS_SUCCESS; }