/* Method: requires_clock? * Returns: true if the element requires a clock, false otherwise. */ static VALUE rg_requires_clock_p(VALUE self) { return CBOOL2RVAL(gst_element_requires_clock(SELF(self))); }
static VALUE rb_gst_bin_polling_p(VALUE self) { return CBOOL2RVAL(SELF(self)->polling); }
/* * Method: provided_clock * * Gets the current clock of the (scheduler of the) bin, * as a Gst::Clock object. * * Returns: a Gst::Clock object, or nil. */ static VALUE rb_gst_bin_get_provided_clock(VALUE self) { return GST_CLOCK2RVAL(SELF(self)->provided_clock); }
static VALUE rg_get(VALUE self) { return GST_CAPS2RVAL(gst_static_caps_get(SELF(self))); }
static VALUE rb_gst_bin_get_children_cookie(VALUE self) { return UINT2NUM(GST_BIN_CHILDREN_COOKIE(SELF(self))); }
static VALUE get_source(VALUE self) { return GOBJ2RVAL(GST_MESSAGE_SRC(SELF(self))); }
static VALUE get_structure(VALUE self) { return GST_STRUCT2RVAL((gpointer)gst_message_get_structure(SELF(self))); }
/* * Method: set_index(index) * index: the index to set, as a Gst::Index. * * Sets the specified index on the element. * * Returns: self. */ static VALUE rg_set_index(VALUE self, VALUE index) { gst_element_set_index(SELF(self), RGST_INDEX(index)); return self; }
/* * Method: remove_pad(pad) * pad: the Gst::Pad to remove from the element. * * Removes the given pad from the element. * * Returns: self. */ static VALUE rg_remove_pad(VALUE self, VALUE pad) { gst_element_remove_pad(SELF(self), RGST_PAD(pad)); return self; }
/* * Method: base_time * * Queries the element's time. * * Returns: the current stream time (in nanoseconds) in Gst::Element::STATE_PLAYING, the * element base time in Gst::Element::STATE_PAUSED, or -1 otherwise. */ static VALUE rg_base_time(VALUE self) { return ULL2NUM(gst_element_get_base_time(SELF(self))); }
/* * Method: set_base_time(time) * time: time to set (in nanoseconds). * * Sets the current time of the element. This method can be used when handling discont events. * You can only call this method on an element with a clock in Gst::Element::STATE_PAUSED or * Gst::Element::STATE_PLAYING. You might want to have a look at Gst::Element#adjust_time, if * you want to adjust by a difference as that is more accurate. * * Returns: self. */ static VALUE rg_set_base_time(VALUE self, VALUE time) { gst_element_set_base_time(SELF(self), NUM2ULL(time)); return Qnil; }
/* Method: indexable? * Returns: true if the element can be indexed, false otherwise. */ static VALUE rg_indexable_p(VALUE self) { return CBOOL2RVAL(gst_element_is_indexable(SELF(self))); }
/* * Method: set_clock(clock) * clock: the Gst::Clock to set for the element. * * Sets the clock for the element. * * Returns: self. */ static VALUE rg_set_clock(VALUE self, VALUE clock) { gst_element_set_clock(SELF(self), RVAL2GST_CLOCK(clock)); return self; }
/* Method: provides_clock? * Returns: true if the element provides a clock, false otherwise. */ static VALUE rg_provides_clock_p(VALUE self) { return CBOOL2RVAL(gst_element_provides_clock(SELF(self))); }
static VALUE get_timestamp(VALUE self) { return ULL2NUM(GST_MESSAGE_TIMESTAMP(SELF(self))); }
static VALUE get_type(VALUE self) { return GST_MSG_TYPE2RVAL(GST_MESSAGE_TYPE(SELF(self))); }
static VALUE set_timestamp(VALUE self, VALUE timestamp) { GST_MESSAGE_TIMESTAMP(SELF(self)) = NUM2ULL(timestamp); return Qnil; }
static VALUE set_type(VALUE self, VALUE type) { GST_MESSAGE_TYPE(SELF(self)) = RVAL2GST_MSG_TYPE(type); return Qnil; }
static VALUE set_source(VALUE self, VALUE source) { GST_MESSAGE_SRC(SELF(self)) = RVAL2GST_OBJ(source); return Qnil; }
static VALUE have_mutex_p(VALUE self) { return GST_MESSAGE_GET_LOCK(SELF(self)) ? Qtrue : Qfalse; }
static VALUE rg_description(VALUE self) { return CSTR2RVAL(SELF(self)->string); }
static VALUE unlock(VALUE self) { GST_MESSAGE_UNLOCK(SELF(self)); return Qnil; }
/***************************************************************************** * FUNCTION * media_player_do_recover * DESCRIPTION * The function is to do recover * PARAMETERS * void * RETURNS * void *****************************************************************************/ media_error_t media_player_do_recover(media_player_t* mp, media_time_t key_frame_time) { /*----------------------------------------------------------------*/ /* Local Variables */ /*----------------------------------------------------------------*/ media_player_cntx_struct* self = SELF(mp, media_player_cntx_struct, itf); VIDEO_DECODER_QUERY_I_FRM_T query; VIDEO_ERROR_TYPE_T error; kal_bool found; kal_uint64 decode_time, diff; media_data_t *node = NULL, *head = NULL, *found_node = NULL; VIDEO_COMPONENT_TYPE_T *dec_handle = self->decoder_handle; kal_uint32 event_group, format_index; media_codec_type_t codec_type; /*----------------------------------------------------------------*/ /* Code Body */ /*----------------------------------------------------------------*/ kal_trace(TRACE_FUNC, VID_TRC_MEDIA_PLAYER_JUMP_TO_I_FRAME, self->state, (kal_uint32)key_frame_time); if (self->state == MPLY_STATE_CLOSED || self->state == MPLY_STATE_OPENED || self->state == MPLY_STATE_PREPARED || self->state == MPLY_STATE_SEEKING || self->state == MPLY_STATE_SEEK_DONE) { return MED_E_WRONG_STATE; } ASSERT(self->vid_stream_index != MEDIA_PLAYER_INVALID_INDEX); kal_take_mutex(media_player_mutex); /************************************************** ** Jump to I frame in file **************************************************/ if (key_frame_time > 0) { /* Flush frame */ error = dec_handle->pfnSetParameter(VIDEO_PARAM_FLUSH_FRM, NULL); ASSERT(error == VIDEO_ERROR_NONE || error == VIDEO_ERROR_NO_FRAME_TO_FLUSH); /* Wait until flush frame is done */ if (error == VIDEO_ERROR_NONE) { /* release mutex for decoder to release frame */ kal_give_mutex(media_player_mutex); MPLY_SET_FLAG(MPLY_FLAG_WAIT_FLUSH_FRAME); kal_retrieve_eg_events(media_player_comp_eg, MEDIA_PLAYER_EG_DECODER_FLUSH_FRAME_DONE, KAL_OR_CONSUME, &event_group, KAL_SUSPEND); MPLY_UNSET_FLAG(MPLY_FLAG_WAIT_FLUSH_FRAME); kal_take_mutex(media_player_mutex); } /* Flush frame in queue */ media_player_reset_queue(mp, MPLY_QUEUE_VIDEO_WRITTEN); media_player_reset_queue(mp, MPLY_QUEUE_VIDEO); self->vid_life_cycle_id ++; self->vid_queue_num = 0; MPLY_UNSET_FLAG(MPLY_FLAG_VID_BUFF_FULL); /* Enable skip non-I frame */ MPLY_SET_FLAG(MPLY_FLAG_SKIP_NON_I_FRAME); self->key_frame_time = key_frame_time; goto finish; } /************************************************** ** Jump to I frame in queue **************************************************/ codec_type = self->vid_stream.dec_config.codec; format_index = mpl_get_vid_custom_format_index(codec_type); diff = mply_custom_get_cust(format_index, 2); ASSERT(diff > 0); error = self->decoder_handle->pfnGetParameter(VIDEO_PARAM_QUERY_DECODE_TIME, &decode_time); decode_time = MEDIA_PLAYER_COMPTIME_TO_TIME(decode_time); found = KAL_FALSE; /* Search I frame with max T from queue's head */ if (self->video_queue) { node = head = self->video_queue->next; do { if ((node->flags & MEDIA_DATA_FLAG_KEY_FRAME) && (node->display_time <= decode_time + diff)) { found = KAL_TRUE; found_node = node; } node = node->next; } while (node != head); } kal_trace(TRACE_FUNC, VID_TRC_MEDIA_PLAYER_JUMP_INFO, found, (kal_uint32)decode_time, (kal_uint32)diff); /* If I frame is found, drop P, B frame */ if (found && found_node != NULL) { /* Flush all the frame in decoder */ error = dec_handle->pfnSetParameter(VIDEO_PARAM_FLUSH_FRM, NULL); ASSERT(error == VIDEO_ERROR_NONE || error == VIDEO_ERROR_NO_FRAME_TO_FLUSH); /* Wait until flush frame is done */ if (error == VIDEO_ERROR_NONE) { kal_give_mutex(media_player_mutex); MPLY_SET_FLAG(MPLY_FLAG_WAIT_FLUSH_FRAME); kal_retrieve_eg_events(media_player_comp_eg, MEDIA_PLAYER_EG_DECODER_FLUSH_FRAME_DONE, KAL_OR_CONSUME, &event_group, KAL_SUSPEND); MPLY_UNSET_FLAG(MPLY_FLAG_WAIT_FLUSH_FRAME); kal_take_mutex(media_player_mutex); } /* Flush frame in queue */ media_player_reset_queue(mp, MPLY_QUEUE_VIDEO_WRITTEN); self->vid_life_cycle_id ++; self->vid_queue_num = 0; MPLY_UNSET_FLAG(MPLY_FLAG_VID_BUFF_FULL); /* Flush P, B frame in queue */ head = self->video_queue->next; while(head != found_node) { media_player_remove_from_queue(&self->video_queue, head); head->release(head); self->vid_queue_num --; head = self->video_queue->next; } goto finish; } /************************************************** ** Jump to I frame in decoder **************************************************/ /* Check if decoder input queue has I frame */ query.u8TimeDifference = MEDIA_PLAYER_TIME_TO_COMPTIME(diff); query.fgRet = KAL_FALSE; error = self->decoder_handle->pfnGetParameter(VIDEO_PARAM_IS_I_FRM_IN_QUEUE, &query); ASSERT(error == VIDEO_ERROR_NONE); if (query.fgRet) { error = self->decoder_handle->pfnSetParameter(VIDEO_PARAM_FLUSH_FRM_BEFORE_I, NULL); goto finish; } finish: mpl_start_timer( MPL_PLAYER_RECOVER_TIMER, MPLY_RESET_RECOVER_ELAPSE, media_player_reset_recover, self); kal_give_mutex(media_player_mutex); return MED_S_OK; }
static VALUE have_cond_p(VALUE self) { return GST_MESSAGE_COND(SELF(self)) ? Qtrue : Qfalse; }
static VALUE rb_gst_bin_get_child_bus(VALUE self) { return GST_BUS2RVAL(SELF(self)->child_bus); }
static VALUE cond_wait(VALUE self) { GST_MESSAGE_WAIT(SELF(self)); return Qnil; }
static VALUE rb_gst_bin_clock_dirty_p(VALUE self) { return CBOOL2RVAL(SELF(self)->clock_dirty); }
static VALUE cond_signal(VALUE self) { GST_MESSAGE_SIGNAL(SELF(self)); return Qnil; }
static VALUE rb_gst_bin_get_clock_provider(VALUE self) { return GST_ELEMENT2RVAL(SELF(self)->clock_provider); }
static VALUE annot_markup_get_opacity(VALUE self) { return rb_float_new(poppler_annot_markup_get_opacity(SELF(self))); }