static void gst_dirac_parse_init (GstDiracParse * diracparse) { gst_base_parse_set_min_frame_size (GST_BASE_PARSE (diracparse), 13); gst_base_parse_set_pts_interpolation (GST_BASE_PARSE (diracparse), FALSE); GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (diracparse)); GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (diracparse)); }
static void gst_ac3_parse_init (GstAc3Parse * ac3parse) { gst_base_parse_set_min_frame_size (GST_BASE_PARSE (ac3parse), 8); gst_ac3_parse_reset (ac3parse); ac3parse->baseparse_chainfunc = GST_BASE_PARSE_SINK_PAD (GST_BASE_PARSE (ac3parse))->chainfunc; GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (ac3parse)); GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (ac3parse)); }
static void gst_mpegv_parse_init (GstMpegvParse * parse) { parse->config_flags = FLAG_NONE; gst_base_parse_set_pts_interpolation (GST_BASE_PARSE (parse), FALSE); }
static void gst_dirac_parse_init (GstDiracParse * diracparse, GstDiracParseClass * diracparse_class) { gst_base_parse_set_min_frame_size (GST_BASE_PARSE (diracparse), 13); }
/** * gst_amrparse_sink_setcaps: * @sinkpad: GstPad * @caps: GstCaps * * Returns: TRUE on success. */ static gboolean gst_amrparse_sink_setcaps (GstBaseParse * parse, GstCaps * caps) { GstAmrParse *amrparse; GstStructure *structure; const gchar *name; amrparse = GST_AMRPARSE (parse); structure = gst_caps_get_structure (caps, 0); name = gst_structure_get_name (structure); GST_DEBUG_OBJECT (amrparse, "setcaps: %s", name); if (!strncmp (name, "audio/x-amr-wb-sh", 17)) { amrparse->block_size = block_size_wb; amrparse->wide = 1; } else if (!strncmp (name, "audio/x-amr-nb-sh", 17)) { amrparse->block_size = block_size_nb; amrparse->wide = 0; } else { GST_WARNING ("Unknown caps"); return FALSE; } amrparse->need_header = FALSE; gst_base_parse_set_frame_props (GST_BASE_PARSE (amrparse), 50, 1, 2, 2); gst_amrparse_set_src_caps (amrparse); return TRUE; }
/** * gst_amrparse_init: * @amrparse: #GstAmrParse * @klass: #GstAmrParseClass. * */ static void gst_amrparse_init (GstAmrParse * amrparse, GstAmrParseClass * klass) { /* init rest */ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (amrparse), 62); GST_DEBUG ("initialized"); }
/** * gst_amr_parse_init: * @amrparse: #GstAmrParse * @klass: #GstAmrParseClass. * */ static void gst_amr_parse_init (GstAmrParse * amrparse) { /* init rest */ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (amrparse), 62); GST_DEBUG ("initialized"); GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (amrparse)); GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (amrparse)); }
/** * gst_amrparse_set_src_caps: * @amrparse: #GstAmrParse. * * Set source pad caps according to current knowledge about the * audio stream. * * Returns: TRUE if caps were successfully set. */ static gboolean gst_amrparse_set_src_caps (GstAmrParse * amrparse) { GstCaps *src_caps = NULL; gboolean res = FALSE; if (amrparse->wide) { GST_DEBUG_OBJECT (amrparse, "setting srcpad caps to AMR-WB"); src_caps = gst_caps_new_simple ("audio/AMR-WB", "channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, 16000, NULL); } else { GST_DEBUG_OBJECT (amrparse, "setting srcpad caps to AMR-NB"); /* Max. size of NB frame is 31 bytes, so we can set the min. frame size to 32 (+1 for next frame header) */ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (amrparse), 32); src_caps = gst_caps_new_simple ("audio/AMR", "channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, 8000, NULL); } gst_pad_use_fixed_caps (GST_BASE_PARSE (amrparse)->srcpad); res = gst_pad_set_caps (GST_BASE_PARSE (amrparse)->srcpad, src_caps); gst_caps_unref (src_caps); return res; }
/** * gst_amr_parse_check_valid_frame: * @parse: #GstBaseParse. * @buffer: #GstBuffer. * @framesize: Output variable where the found frame size is put. * @skipsize: Output variable which tells how much data needs to be skipped * until a frame header is found. * * Implementation of "check_valid_frame" vmethod in #GstBaseParse class. * * Returns: TRUE if the given data contains valid frame. */ gboolean gst_amr_parse_check_valid_frame (GstBaseParse * parse, GstBaseParseFrame * frame, guint * framesize, gint * skipsize) { GstBuffer *buffer; const guint8 *data; gint fsize, mode, dsize; GstAmrParse *amrparse; amrparse = GST_AMR_PARSE (parse); buffer = frame->buffer; data = GST_BUFFER_DATA (buffer); dsize = GST_BUFFER_SIZE (buffer); GST_LOG ("buffer: %d bytes", dsize); if (amrparse->need_header) { if (dsize >= AMR_MIME_HEADER_SIZE && gst_amr_parse_parse_header (amrparse, data, skipsize)) { amrparse->need_header = FALSE; gst_base_parse_set_frame_rate (GST_BASE_PARSE (amrparse), 50, 1, 2, 2); } else { GST_WARNING ("media doesn't look like a AMR format"); } /* We return FALSE, so this frame won't get pushed forward. Instead, the "skip" value is set, so next time we will receive a valid frame. */ return FALSE; } /* Does this look like a possible frame header candidate? */ if ((data[0] & 0x83) == 0) { /* Yep. Retrieve the frame size */ mode = (data[0] >> 3) & 0x0F; fsize = amrparse->block_size[mode] + 1; /* +1 for the header byte */ /* We recognize this data as a valid frame when: * - We are in sync. There is no need for extra checks then * - We are in EOS. There might not be enough data to check next frame * - Sync is lost, but the following data after this frame seem * to contain a valid header as well (and there is enough data to * perform this check) */ if (fsize && (!GST_BASE_PARSE_LOST_SYNC (parse) || GST_BASE_PARSE_DRAINING (parse) || (dsize > fsize && (data[fsize] & 0x83) == 0))) { *framesize = fsize; return TRUE; } }
static GstFlowReturn gst_h263_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame) { GstH263Parse *h263parse; GstBuffer *buffer; GstFlowReturn res; H263Params params = { 0, }; h263parse = GST_H263_PARSE (parse); buffer = frame->buffer; res = gst_h263_parse_get_params (¶ms, buffer, TRUE, &h263parse->state); if (res != GST_FLOW_OK) goto out; if (h263parse->state == PASSTHROUGH || h263parse->state == PARSING) { /* There's a feature we don't support, or we didn't have enough data to * parse the header, which should not be possible. Either way, go into * passthrough mode and let downstream handle it if it can. */ GST_WARNING ("Couldn't parse header - setting passthrough mode"); gst_base_parse_set_passthrough (parse, TRUE); goto out; } /* h263parse->state is now GOT_HEADER */ gst_buffer_set_caps (buffer, GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (GST_BASE_PARSE (h263parse)))); if (gst_h263_parse_is_delta_unit (¶ms)) GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); else GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); out: return res; }
static gboolean gst_raw_base_parse_start (GstBaseParse * parse) { GstBaseParse *base_parse = GST_BASE_PARSE (parse); GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (parse); GstRawBaseParseClass *klass = GST_RAW_BASE_PARSE_GET_CLASS (parse); g_assert (klass->set_current_config); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (raw_base_parse); /* If the config is ready from the start, set the min frame size * (this will happen with the properties config) */ if (klass->is_config_ready (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT)) { gsize frame_size = klass->get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT); gst_base_parse_set_min_frame_size (base_parse, frame_size); } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse); return TRUE; }
static void gst_raw_audio_parse_set_property (GObject * object, guint prop_id, GValue const *value, GParamSpec * pspec) { GstBaseParse *base_parse = GST_BASE_PARSE (object); GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (object); GstRawAudioParse *raw_audio_parse = GST_RAW_AUDIO_PARSE (object); /* All properties are handled similarly: * - if the new value is the same as the current value, nothing is done * - the parser lock is held while the new value is set * - if the properties config is the current config, the source caps are * invalidated to ensure that the code in handle_frame pushes a new CAPS * event out * - properties that affect the bpf value call the function to update * the bpf and also call gst_base_parse_set_min_frame_size() to ensure * that the minimum frame size can hold 1 frame (= one sample for each * channel) */ switch (prop_id) { case PROP_FORMAT: { GstRawAudioParseFormat new_format = g_value_get_enum (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_format != raw_audio_parse->properties_config.format) { raw_audio_parse->properties_config.format = new_format; gst_raw_audio_parse_update_config_bpf (& (raw_audio_parse->properties_config)); if (!gst_raw_audio_parse_is_using_sink_caps (raw_audio_parse)) { gst_raw_base_parse_invalidate_src_caps (raw_base_parse); gst_base_parse_set_min_frame_size (base_parse, raw_audio_parse->properties_config.bpf); } } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_PCM_FORMAT: { GstAudioFormat new_pcm_format = g_value_get_enum (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_pcm_format != raw_audio_parse->properties_config.pcm_format) { raw_audio_parse->properties_config.pcm_format = new_pcm_format; gst_raw_audio_parse_update_config_bpf (& (raw_audio_parse->properties_config)); if (!gst_raw_audio_parse_is_using_sink_caps (raw_audio_parse)) { gst_raw_base_parse_invalidate_src_caps (raw_base_parse); gst_base_parse_set_min_frame_size (base_parse, raw_audio_parse->properties_config.bpf); } } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_SAMPLE_RATE: { guint new_sample_rate = g_value_get_int (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_sample_rate != raw_audio_parse->properties_config.sample_rate) { raw_audio_parse->properties_config.sample_rate = new_sample_rate; if (!gst_raw_audio_parse_is_using_sink_caps (raw_audio_parse)) gst_raw_base_parse_invalidate_src_caps (raw_base_parse); } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_NUM_CHANNELS: { guint new_num_channels = g_value_get_int (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_num_channels != raw_audio_parse->properties_config.num_channels) { gst_raw_audio_parse_set_config_channels (& (raw_audio_parse->properties_config), new_num_channels, 0, TRUE); raw_audio_parse->properties_config.num_channels = new_num_channels; gst_raw_audio_parse_update_config_bpf (& (raw_audio_parse->properties_config)); if (!gst_raw_audio_parse_is_using_sink_caps (raw_audio_parse)) { gst_raw_base_parse_invalidate_src_caps (raw_base_parse); gst_base_parse_set_min_frame_size (base_parse, raw_audio_parse->properties_config.bpf); } } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_INTERLEAVED: { gboolean new_interleaved = g_value_get_boolean (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_interleaved != raw_audio_parse->properties_config.interleaved) { raw_audio_parse->properties_config.interleaved = new_interleaved; if (!gst_raw_audio_parse_is_using_sink_caps (raw_audio_parse)) gst_raw_base_parse_invalidate_src_caps (raw_base_parse); } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_CHANNEL_POSITIONS: { GValueArray *valarray = g_value_get_boxed (value); GstRawAudioParseConfig *config = &(raw_audio_parse->properties_config); /* Sanity check - reject empty arrays */ if ((valarray != NULL) && (valarray->n_values == 0)) { GST_ELEMENT_ERROR (raw_audio_parse, LIBRARY, SETTINGS, ("channel position property holds an empty array"), (NULL)); break; } GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if ((valarray == NULL) && (config->num_channels > 0)) { /* NULL value given, and number of channels is nonzero. * Use the default GStreamer positioning. Call * set_config_channels with the set_positions parameter * set to TRUE to ensure the position values are filled. */ gst_raw_audio_parse_set_config_channels (& (raw_audio_parse->properties_config), config->num_channels, 0, TRUE); } else { /* Non-NULL value given. Make sure the channel_positions * array in the properties config has enough room, and that * the num_channels value equals the array length. Then copy * the values from the valarray to channel_positions, and * produce a copy of that array in case its channel positions * are not in a valid GStreamer order (to be able to apply * channel reordering later). */ guint i; if (valarray->n_values != config->num_channels) { /* Call with set_positions == FALSE to ensure that * the array is properly allocated but not filled * (it is filled below) */ gst_raw_audio_parse_set_config_channels (config, valarray->n_values, 0, FALSE); } for (i = 0; i < config->num_channels; ++i) { GValue *val = g_value_array_get_nth (valarray, i); config->channel_positions[i] = g_value_get_enum (val); } gst_raw_audio_parse_update_channel_reordering_flag (config); } gst_raw_audio_parse_update_config_bpf (& (raw_audio_parse->properties_config)); if (!gst_raw_audio_parse_is_using_sink_caps (raw_audio_parse)) { gst_raw_base_parse_invalidate_src_caps (raw_base_parse); gst_base_parse_set_min_frame_size (base_parse, raw_audio_parse->properties_config.bpf); } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_ac3_parse_init (GstAc3Parse * ac3parse) { gst_base_parse_set_min_frame_size (GST_BASE_PARSE (ac3parse), 6); gst_ac3_parse_reset (ac3parse); }
/** * gst_amr_parse_check_valid_frame: * @parse: #GstBaseParse. * @buffer: #GstBuffer. * @framesize: Output variable where the found frame size is put. * @skipsize: Output variable which tells how much data needs to be skipped * until a frame header is found. * * Implementation of "check_valid_frame" vmethod in #GstBaseParse class. * * Returns: TRUE if the given data contains valid frame. */ static GstFlowReturn gst_amr_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame, gint * skipsize) { GstBuffer *buffer; GstMapInfo map; gint fsize = 0, mode, dsize; GstAmrParse *amrparse; GstFlowReturn ret = GST_FLOW_OK; gboolean found = FALSE; amrparse = GST_AMR_PARSE (parse); buffer = frame->buffer; gst_buffer_map (buffer, &map, GST_MAP_READ); dsize = map.size; GST_LOG ("buffer: %d bytes", dsize); if (amrparse->need_header) { if (dsize >= AMR_MIME_HEADER_SIZE && gst_amr_parse_parse_header (amrparse, map.data, skipsize)) { amrparse->need_header = FALSE; gst_base_parse_set_frame_rate (GST_BASE_PARSE (amrparse), 50, 1, 2, 2); } else { GST_WARNING ("media doesn't look like a AMR format"); } /* We return FALSE, so this frame won't get pushed forward. Instead, the "skip" value is set, so next time we will receive a valid frame. */ goto done; } *skipsize = 1; /* Does this look like a possible frame header candidate? */ if ((map.data[0] & 0x83) == 0) { /* Yep. Retrieve the frame size */ mode = (map.data[0] >> 3) & 0x0F; fsize = amrparse->block_size[mode] + 1; /* +1 for the header byte */ /* We recognize this data as a valid frame when: * - We are in sync. There is no need for extra checks then * - We are in EOS. There might not be enough data to check next frame * - Sync is lost, but the following data after this frame seem * to contain a valid header as well (and there is enough data to * perform this check) */ if (fsize) { *skipsize = 0; /* in sync, no further check */ if (!GST_BASE_PARSE_LOST_SYNC (parse)) { found = TRUE; } else if (dsize > fsize) { /* enough data, check for next sync */ if ((map.data[fsize] & 0x83) == 0) found = TRUE; } else if (GST_BASE_PARSE_DRAINING (parse)) { /* not enough, but draining, so ok */ found = TRUE; } } }
static void gst_h264_parse_update_src_caps (GstH264Parse * h264parse) { GstH264ParamsSPS *sps; GstCaps *caps = NULL, *sink_caps; gboolean modified = FALSE; GstBuffer *buf = NULL; if (G_UNLIKELY (!GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (h264parse)))) modified = TRUE; else if (G_UNLIKELY (!h264parse->update_caps)) return; /* carry over input caps as much as possible; override with our own stuff */ sink_caps = GST_PAD_CAPS (GST_BASE_PARSE_SINK_PAD (h264parse)); if (sink_caps) gst_caps_ref (sink_caps); else sink_caps = gst_caps_new_simple ("video/x-h264", NULL); sps = h264parse->params->sps; GST_DEBUG_OBJECT (h264parse, "sps: %p", sps); /* only codec-data for nice-and-clean au aligned packetized avc format */ if (h264parse->format == GST_H264_PARSE_FORMAT_AVC && h264parse->align == GST_H264_PARSE_ALIGN_AU) { buf = gst_h264_parse_make_codec_data (h264parse); if (buf && h264parse->codec_data) { if (GST_BUFFER_SIZE (buf) != GST_BUFFER_SIZE (h264parse->codec_data) || memcmp (GST_BUFFER_DATA (buf), GST_BUFFER_DATA (h264parse->codec_data), GST_BUFFER_SIZE (buf))) modified = TRUE; } else { if (h264parse->codec_data) buf = gst_buffer_ref (h264parse->codec_data); modified = TRUE; } } if (G_UNLIKELY (!sps)) { caps = gst_caps_copy (sink_caps); } else if (G_UNLIKELY (h264parse->width != sps->width || h264parse->height != sps->height || h264parse->fps_num != sps->fps_num || h264parse->fps_den != sps->fps_den || modified)) { caps = gst_caps_copy (sink_caps); /* sps should give this */ gst_caps_set_simple (caps, "width", G_TYPE_INT, sps->width, "height", G_TYPE_INT, sps->height, NULL); h264parse->height = sps->height; h264parse->width = sps->width; /* but not necessarily or reliably this */ if ((!h264parse->fps_num || !h264parse->fps_den) && sps->fps_num > 0 && sps->fps_den > 0) { gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, sps->fps_num, sps->fps_den, NULL); h264parse->fps_num = sps->fps_num; h264parse->fps_den = sps->fps_den; gst_base_parse_set_frame_props (GST_BASE_PARSE (h264parse), h264parse->fps_num, h264parse->fps_den, 0, 0); } } if (caps) { gst_caps_set_simple (caps, "parsed", G_TYPE_BOOLEAN, TRUE, "stream-format", G_TYPE_STRING, gst_h264_parse_get_string (h264parse, TRUE, h264parse->format), "alignment", G_TYPE_STRING, gst_h264_parse_get_string (h264parse, FALSE, h264parse->align), NULL); if (buf) { gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, buf, NULL); gst_buffer_replace (&h264parse->codec_data, buf); gst_buffer_unref (buf); buf = NULL; } else { GstStructure *s; /* remove any left-over codec-data hanging around */ s = gst_caps_get_structure (caps, 0); gst_structure_remove_field (s, "codec_data"); } gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (h264parse), caps); gst_caps_unref (caps); } gst_caps_unref (sink_caps); if (buf) gst_buffer_unref (buf); }
static void gst_irtsp_parse_init (GstIRTSPParse * IRTSPParse, GstIRTSPParseClass * klass) { gst_base_parse_set_min_frame_size (GST_BASE_PARSE (IRTSPParse), 4); gst_irtsp_parse_reset (IRTSPParse); }
static void gst_mpegv_parse_update_src_caps (GstMpegvParse * mpvparse) { GstCaps *caps = NULL; /* only update if no src caps yet or explicitly triggered */ if (G_LIKELY (gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD (mpvparse)) && !mpvparse->update_caps)) return; /* carry over input caps as much as possible; override with our own stuff */ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (mpvparse)); if (caps) { caps = gst_caps_make_writable (caps); } else { caps = gst_caps_new_empty_simple ("video/mpeg"); } /* typically we don't output buffers until we have properly parsed some * config data, so we should at least know about version. * If not, it means it has been requested not to drop data, and * upstream and/or app must know what they are doing ... */ gst_caps_set_simple (caps, "mpegversion", G_TYPE_INT, (mpvparse->config_flags & FLAG_MPEG2) ? 2 : 1, NULL); gst_caps_set_simple (caps, "systemstream", G_TYPE_BOOLEAN, FALSE, "parsed", G_TYPE_BOOLEAN, TRUE, NULL); if (mpvparse->sequencehdr.width > 0 && mpvparse->sequencehdr.height > 0) { gst_caps_set_simple (caps, "width", G_TYPE_INT, mpvparse->sequencehdr.width, "height", G_TYPE_INT, mpvparse->sequencehdr.height, NULL); } /* perhaps we have a framerate */ if (mpvparse->fps_num > 0 && mpvparse->fps_den > 0) { gint fps_num = mpvparse->fps_num; gint fps_den = mpvparse->fps_den; GstClockTime latency = gst_util_uint64_scale (GST_SECOND, fps_den, fps_num); gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, fps_num, fps_den, NULL); gst_base_parse_set_frame_rate (GST_BASE_PARSE (mpvparse), fps_num, fps_den, 0, 0); gst_base_parse_set_latency (GST_BASE_PARSE (mpvparse), latency, latency); } /* or pixel-aspect-ratio */ if (mpvparse->sequencehdr.par_w && mpvparse->sequencehdr.par_h > 0) { gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION, mpvparse->sequencehdr.par_w, mpvparse->sequencehdr.par_h, NULL); } if (mpvparse->config != NULL) { gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, mpvparse->config, NULL); } if (mpvparse->config_flags & FLAG_SEQUENCE_EXT) { const guint profile_c = mpvparse->sequenceext.profile; const guint level_c = mpvparse->sequenceext.level; const gchar *profile = NULL, *level = NULL; /* * Profile indication - 1 => High, 2 => Spatially Scalable, * 3 => SNR Scalable, 4 => Main, 5 => Simple * 4:2:2 and Multi-view have profile = 0, with the escape bit set to 1 */ const gchar *const profiles[] = { "high", "spatial", "snr", "main", "simple" }; /* * Level indication - 4 => High, 6 => High-1440, 8 => Main, 10 => Low, * except in the case of profile = 0 */ const gchar *const levels[] = { "high", "high-1440", "main", "low" }; if (profile_c > 0 && profile_c < 6) profile = profiles[profile_c - 1]; if ((level_c > 3) && (level_c < 11) && (level_c % 2 == 0)) level = levels[(level_c >> 1) - 2]; if (profile_c == 8) { /* Non-hierarchical profile */ switch (level_c) { case 2: level = levels[0]; case 5: level = levels[2]; profile = "4:2:2"; break; case 10: level = levels[0]; case 11: level = levels[1]; case 13: level = levels[2]; case 14: level = levels[3]; profile = "multiview"; break; default: break; } } /* FIXME does it make sense to expose profile/level in the caps ? */ GST_DEBUG_OBJECT (mpvparse, "profile:'%s' level:'%s'", profile, level); if (profile) gst_caps_set_simple (caps, "profile", G_TYPE_STRING, profile, NULL); else GST_DEBUG_OBJECT (mpvparse, "Invalid profile - %u", profile_c); if (level) gst_caps_set_simple (caps, "level", G_TYPE_STRING, level, NULL); else GST_DEBUG_OBJECT (mpvparse, "Invalid level - %u", level_c); gst_caps_set_simple (caps, "interlace-mode", G_TYPE_STRING, (mpvparse->sequenceext.progressive ? "progressive" : "mixed"), NULL); } gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (mpvparse), caps); gst_caps_unref (caps); mpvparse->update_caps = FALSE; }
static void gst_raw_video_parse_set_property (GObject * object, guint prop_id, GValue const *value, GParamSpec * pspec) { GstBaseParse *base_parse = GST_BASE_PARSE (object); GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (object); GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (object); GstRawVideoParseConfig *props_cfg = &(raw_video_parse->properties_config); /* All properties are handled similarly: * - if the new value is the same as the current value, nothing is done * - the parser lock is held while the new value is set * - if the properties config is the current config, the source caps are * invalidated to ensure that the code in handle_frame pushes a new CAPS * event out * - properties that affect the video frame size call the function to update * the info and also call gst_base_parse_set_min_frame_size() to ensure * that the minimum frame size can hold 1 frame (= one sample for each * channel); to ensure that the min frame size includes any extra padding, * it is set to the result of gst_raw_video_parse_get_config_frame_size() * - property configuration values that require video info updates aren't * written directory into the video info structure, but in the extra * fields instead (gst_raw_video_parse_update_info() then copies the values * from these fields into the video info); see the documentation inside * gst_raw_video_parse_update_info() for the reason why */ switch (prop_id) { case PROP_WIDTH: { gint new_width = g_value_get_int (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_width != props_cfg->width) { props_cfg->width = new_width; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) { gst_raw_base_parse_invalidate_src_caps (raw_base_parse); gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); } } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_HEIGHT: { gint new_height = g_value_get_int (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_height != props_cfg->height) { props_cfg->height = new_height; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) { gst_raw_base_parse_invalidate_src_caps (raw_base_parse); gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); } } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_FORMAT: { GstVideoFormat new_format = g_value_get_enum (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_format != props_cfg->format) { props_cfg->format = new_format; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) { gst_raw_base_parse_invalidate_src_caps (raw_base_parse); gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); } } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_PIXEL_ASPECT_RATIO: { GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); /* The pixel aspect ratio does not affect the video frame size, * so it is just set directly without any updates */ props_cfg->pixel_aspect_ratio_n = GST_VIDEO_INFO_PAR_N (&(props_cfg->info)) = gst_value_get_fraction_numerator (value); props_cfg->pixel_aspect_ratio_d = GST_VIDEO_INFO_PAR_D (&(props_cfg->info)) = gst_value_get_fraction_denominator (value); GST_DEBUG_OBJECT (raw_video_parse, "setting pixel aspect ratio to %u/%u", props_cfg->pixel_aspect_ratio_n, props_cfg->pixel_aspect_ratio_d); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_FRAMERATE: { GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); /* The framerate does not affect the video frame size, * so it is just set directly without any updates */ props_cfg->framerate_n = GST_VIDEO_INFO_FPS_N (&(props_cfg->info)) = gst_value_get_fraction_numerator (value); props_cfg->framerate_d = GST_VIDEO_INFO_FPS_D (&(props_cfg->info)) = gst_value_get_fraction_denominator (value); GST_DEBUG_OBJECT (raw_video_parse, "setting framerate to %u/%u", props_cfg->framerate_n, props_cfg->framerate_d); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_INTERLACED: { GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); /* Interlacing does not affect the video frame size, * so it is just set directly without any updates */ props_cfg->interlaced = g_value_get_boolean (value); GST_VIDEO_INFO_INTERLACE_MODE (&(props_cfg->info)) = props_cfg->interlaced ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED : GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_TOP_FIELD_FIRST: { /* The top-field-first flag is a detail related to * interlacing, so no video info update is needed */ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); props_cfg->top_field_first = g_value_get_boolean (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_PLANE_STRIDES: { GValueArray *valarray = g_value_get_boxed (value); guint n_planes; guint i; /* If no valarray is given, then disable custom * plane strides & offsets and stick to the * standard computed ones */ if (valarray == NULL) { GST_DEBUG_OBJECT (raw_video_parse, "custom plane strides & offsets disabled"); props_cfg->custom_plane_strides = FALSE; gst_raw_video_parse_update_info (props_cfg); break; } /* Sanity check - reject empty arrays */ if ((valarray != NULL) && (valarray->n_values == 0)) { GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS, ("plane strides property holds an empty array"), (NULL)); break; } GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); n_planes = GST_VIDEO_INFO_N_PLANES (&(props_cfg->info)); /* Check that the valarray holds the right number of values */ if (valarray->n_values != n_planes) { GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS, ("incorrect number of elements in plane strides property"), ("expected: %u, got: %u", n_planes, valarray->n_values)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } /* Copy the values to the stride array */ for (i = 0; i < n_planes; ++i) { GValue *val = g_value_array_get_nth (valarray, i); props_cfg->plane_strides[i] = g_value_get_uint (val); GST_DEBUG_OBJECT (raw_video_parse, "plane #%u stride: %d", i, props_cfg->plane_strides[i]); } props_cfg->custom_plane_strides = TRUE; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_PLANE_OFFSETS: { GValueArray *valarray = g_value_get_boxed (value); guint n_planes; guint i; /* If no valarray is given, then disable custom * plane strides & offsets and stick to the * standard computed ones */ if (valarray == NULL) { GST_DEBUG_OBJECT (raw_video_parse, "custom plane strides & offsets disabled"); props_cfg->custom_plane_strides = FALSE; gst_raw_video_parse_update_info (props_cfg); break; } /* Sanity check - reject empty arrays */ if ((valarray != NULL) && (valarray->n_values == 0)) { GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS, ("plane offsets property holds an empty array"), (NULL)); break; } GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); n_planes = GST_VIDEO_INFO_N_PLANES (&(props_cfg->info)); /* Check that the valarray holds the right number of values */ if (valarray->n_values != n_planes) { GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS, ("incorrect number of elements in plane offsets property"), ("expected: %u, got: %u", n_planes, valarray->n_values)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } /* Copy the values to the offset array */ for (i = 0; i < n_planes; ++i) { GValue *val = g_value_array_get_nth (valarray, i); props_cfg->plane_offsets[i] = g_value_get_uint (val); GST_DEBUG_OBJECT (raw_video_parse, "plane #%u offset: %" G_GSIZE_FORMAT, i, props_cfg->plane_offsets[i]); } props_cfg->custom_plane_strides = TRUE; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_FRAME_STRIDE: { /* The frame stride does not affect the video frame size, * so it is just set directly without any updates */ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); props_cfg->frame_stride = g_value_get_uint (value); gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_raw_base_parse_set_property (GObject * object, guint prop_id, GValue const *value, GParamSpec * pspec) { GstBaseParse *base_parse = GST_BASE_PARSE (object); GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (object); GstRawBaseParseClass *klass = GST_RAW_BASE_PARSE_GET_CLASS (object); g_assert (klass->is_config_ready); g_assert (klass->set_current_config); switch (prop_id) { case PROP_USE_SINK_CAPS: { gboolean new_state, cur_state; GstRawBaseParseConfig new_config; GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); /* Check to ensure nothing is done if the value stays the same */ new_state = g_value_get_boolean (value); cur_state = gst_raw_base_parse_is_using_sink_caps (raw_base_parse); if (new_state == cur_state) { GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } GST_DEBUG_OBJECT (raw_base_parse, "switching to %s config", new_state ? "sink caps" : "properties"); new_config = new_state ? GST_RAW_BASE_PARSE_CONFIG_SINKCAPS : GST_RAW_BASE_PARSE_CONFIG_PROPERTIES; if (!klass->set_current_config (raw_base_parse, new_config)) { GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); GST_ELEMENT_ERROR (raw_base_parse, STREAM, FAILED, ("could not set new current config"), ("use-sink-caps property: %d", new_state)); break; } /* Update the minimum frame size if the config is ready. This ensures that * the next buffer that is passed to handle_frame contains complete frames. * If the current config is the properties config, then it will always be * ready, and its frame size will be valid. Ensure that the baseparse minimum * frame size is set properly then. * If the current config is the sink caps config, then it will initially not * be ready until the sink caps are set, so the minimum frame size cannot be * set right here. However, since the caps always come in *before* the actual * data, the config will be readied in the set_sink_caps function, and be ready * by the time handle_frame is called. There, the minimum frame size is set as * well. */ if (klass->is_config_ready (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT)) { gsize frame_size = klass->get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT); gst_base_parse_set_min_frame_size (base_parse, frame_size); } /* Since the current config was switched, the source caps change. Ensure the * new caps are pushed downstream by setting src_caps_set to FALSE: This way, * the next handle_frame call will take care of that. */ raw_base_parse->src_caps_set = FALSE; GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_dirac_parse_init (GstDiracParse * diracparse) { gst_base_parse_set_min_frame_size (GST_BASE_PARSE (diracparse), 13); gst_base_parse_set_pts_interpolation (GST_BASE_PARSE (diracparse), FALSE); }
static void gst_h263_parse_set_src_caps (GstH263Parse * h263parse, const H263Params * params) { GstStructure *st; GstCaps *caps, *sink_caps; gint fr_num, fr_denom; g_assert (h263parse->state == PASSTHROUGH || h263parse->state == GOT_HEADER); caps = GST_PAD_CAPS (GST_BASE_PARSE_SINK_PAD (h263parse)); if (caps) { caps = gst_caps_copy (caps); } else { caps = gst_caps_new_simple ("video/x-h263", "variant", G_TYPE_STRING, "itu", NULL); } gst_caps_set_simple (caps, "parsed", G_TYPE_BOOLEAN, TRUE, NULL); sink_caps = GST_PAD_CAPS (GST_BASE_PARSE_SINK_PAD (h263parse)); if (sink_caps && (st = gst_caps_get_structure (sink_caps, 0)) && gst_structure_get_fraction (st, "framerate", &fr_num, &fr_denom)) { /* Got it in caps - nothing more to do */ GST_DEBUG_OBJECT (h263parse, "sink caps override framerate from headers"); } else { /* Caps didn't have the framerate - get it from params */ gst_h263_parse_get_framerate (params, &fr_num, &fr_denom); } gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, fr_num, fr_denom, NULL); if (params->width && params->height) gst_caps_set_simple (caps, "width", G_TYPE_INT, params->width, "height", G_TYPE_INT, params->height, NULL); if (h263parse->state == GOT_HEADER) { gst_caps_set_simple (caps, "annex-d", G_TYPE_BOOLEAN, (params->features & H263_OPTION_UMV_MODE), "annex-e", G_TYPE_BOOLEAN, (params->features & H263_OPTION_SAC_MODE), "annex-f", G_TYPE_BOOLEAN, (params->features & H263_OPTION_AP_MODE), "annex-g", G_TYPE_BOOLEAN, (params->features & H263_OPTION_PB_MODE), "annex-i", G_TYPE_BOOLEAN, (params->features & H263_OPTION_AIC_MODE), "annex-j", G_TYPE_BOOLEAN, (params->features & H263_OPTION_DF_MODE), "annex-k", G_TYPE_BOOLEAN, (params->features & H263_OPTION_SS_MODE), "annex-m", G_TYPE_BOOLEAN, (params->type == PICTURE_IMPROVED_PB), "annex-n", G_TYPE_BOOLEAN, (params->features & H263_OPTION_RPS_MODE), "annex-q", G_TYPE_BOOLEAN, (params->features & H263_OPTION_RRU_MODE), "annex-r", G_TYPE_BOOLEAN, (params->features & H263_OPTION_ISD_MODE), "annex-s", G_TYPE_BOOLEAN, (params->features & H263_OPTION_AIV_MODE), "annex-t", G_TYPE_BOOLEAN, (params->features & H263_OPTION_MQ_MODE), "annex-u", G_TYPE_BOOLEAN, (params->features & H263_OPTION_ERPS_MODE), "annex-v", G_TYPE_BOOLEAN, (params->features & H263_OPTION_DPS_MODE), NULL); h263parse->profile = gst_h263_parse_get_profile (params); if (h263parse->profile != -1) gst_caps_set_simple (caps, "profile", G_TYPE_UINT, h263parse->profile, NULL); h263parse->level = gst_h263_parse_get_level (params, h263parse->profile, h263parse->bitrate, fr_num, fr_denom); if (h263parse->level != -1) gst_caps_set_simple (caps, "level", G_TYPE_UINT, h263parse->level, NULL); } gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (GST_BASE_PARSE (h263parse)), caps); gst_caps_unref (caps); }