static gboolean gst_dirac_parse_start (GstBaseParse * parse) { gst_base_parse_set_min_frame_size (parse, 13); return TRUE; }
static void gst_dirac_parse_init (GstDiracParse * diracparse, GstDiracParseClass * diracparse_class) { gst_base_parse_set_min_frame_size (GST_BASE_PARSE (diracparse), 13); }
/** * gst_amrparse_init: * @amrparse: #GstAmrParse * @klass: #GstAmrParseClass. * */ static void gst_amrparse_init (GstAmrParse * amrparse, GstAmrParseClass * klass) { /* init rest */ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (amrparse), 62); GST_DEBUG ("initialized"); }
static void gst_dirac_parse_init (GstDiracParse * diracparse) { gst_base_parse_set_min_frame_size (GST_BASE_PARSE (diracparse), 13); gst_base_parse_set_pts_interpolation (GST_BASE_PARSE (diracparse), FALSE); GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (diracparse)); GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (diracparse)); }
/** * gst_amr_parse_init: * @amrparse: #GstAmrParse * @klass: #GstAmrParseClass. * */ static void gst_amr_parse_init (GstAmrParse * amrparse) { /* init rest */ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (amrparse), 62); GST_DEBUG ("initialized"); GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (amrparse)); GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (amrparse)); }
static void gst_ac3_parse_init (GstAc3Parse * ac3parse) { gst_base_parse_set_min_frame_size (GST_BASE_PARSE (ac3parse), 8); gst_ac3_parse_reset (ac3parse); ac3parse->baseparse_chainfunc = GST_BASE_PARSE_SINK_PAD (GST_BASE_PARSE (ac3parse))->chainfunc; GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (ac3parse)); GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (ac3parse)); }
static gboolean gst_dirac_parse_start (GstBaseParse * parse) { GstDiracParse *diracparse = GST_DIRAC_PARSE (parse); gst_base_parse_set_min_frame_size (parse, 13); diracparse->sent_codec_tag = FALSE; return TRUE; }
static gboolean gst_mpegv_parse_start (GstBaseParse * parse) { GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse); GST_DEBUG_OBJECT (parse, "start"); gst_mpegv_parse_reset (mpvparse); /* at least this much for a valid frame */ gst_base_parse_set_min_frame_size (parse, 6); return TRUE; }
static GstFlowReturn gst_ivf_parse_handle_frame_start (GstIvfParse * ivf, GstBaseParseFrame * frame, gint * skipsize) { GstBuffer *const buffer = frame->buffer; GstMapInfo map; GstFlowReturn ret = GST_FLOW_OK; gst_buffer_map (buffer, &map, GST_MAP_READ); if (map.size >= IVF_FILE_HEADER_SIZE) { guint32 magic = GST_READ_UINT32_LE (map.data); guint16 version = GST_READ_UINT16_LE (map.data + 4); guint16 header_size = GST_READ_UINT16_LE (map.data + 6); guint32 fourcc = GST_READ_UINT32_LE (map.data + 8); guint16 width = GST_READ_UINT16_LE (map.data + 12); guint16 height = GST_READ_UINT16_LE (map.data + 14); guint32 fps_n = GST_READ_UINT32_LE (map.data + 16); guint32 fps_d = GST_READ_UINT32_LE (map.data + 20); #ifndef GST_DISABLE_GST_DEBUG guint32 num_frames = GST_READ_UINT32_LE (map.data + 24); #endif if (magic != GST_MAKE_FOURCC ('D', 'K', 'I', 'F') || version != 0 || header_size != 32 || fourcc_to_media_type (fourcc) == NULL) { GST_ELEMENT_ERROR (ivf, STREAM, WRONG_TYPE, (NULL), (NULL)); ret = GST_FLOW_ERROR; goto end; } ivf->fourcc = fourcc; gst_ivf_parse_set_size (ivf, width, height); gst_ivf_parse_set_framerate (ivf, fps_n, fps_d); GST_LOG_OBJECT (ivf, "Stream has %d frames", num_frames); /* move along */ ivf->state = GST_IVF_PARSE_DATA; gst_base_parse_set_min_frame_size (GST_BASE_PARSE_CAST (ivf), IVF_FRAME_HEADER_SIZE); *skipsize = IVF_FILE_HEADER_SIZE; } else { GST_LOG_OBJECT (ivf, "Header data not yet available."); *skipsize = 0; } end: gst_buffer_unmap (buffer, &map); return ret; }
static gboolean gst_h264_parse_start (GstBaseParse * parse) { GstH264Parse *h264parse = GST_H264_PARSE (parse); GST_DEBUG ("Start"); gst_h264_parse_reset (h264parse); gst_h264_params_create (&h264parse->params, GST_ELEMENT (h264parse)); gst_base_parse_set_min_frame_size (parse, 512); return TRUE; }
static gboolean gst_ivf_parse_start (GstBaseParse * parse) { GstIvfParse *const ivf = GST_IVF_PARSE (parse); gst_ivf_parse_reset (ivf); /* Minimal file header size needed at start */ gst_base_parse_set_min_frame_size (parse, IVF_FILE_HEADER_SIZE); /* No sync code to detect frame boundaries */ gst_base_parse_set_syncable (parse, FALSE); return TRUE; }
static gboolean gst_png_parse_start (GstBaseParse * parse) { GstPngParse *pngparse = GST_PNG_PARSE (parse); GST_DEBUG_OBJECT (pngparse, "start"); /* the start code and at least 2 empty frames (IHDR and IEND) */ gst_base_parse_set_min_frame_size (parse, 8 + 12 + 12); pngparse->width = 0; pngparse->height = 0; return TRUE; }
static gboolean gst_h263_parse_start (GstBaseParse * parse) { GstH263Parse *h263parse = GST_H263_PARSE (parse); GST_DEBUG_OBJECT (h263parse, "start"); h263parse->bitrate = 0; h263parse->profile = -1; h263parse->level = -1; h263parse->state = PARSING; gst_base_parse_set_min_frame_size (parse, 4); return TRUE; }
static gboolean gst_png_parse_event (GstBaseParse * parse, GstEvent * event) { gboolean res; res = GST_BASE_PARSE_CLASS (parent_class)->sink_event (parse, event); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_FLUSH_STOP: /* the start code and at least 2 empty frames (IHDR and IEND) */ gst_base_parse_set_min_frame_size (parse, 8 + 12 + 12); break; default: break; } return res; }
static GstFlowReturn gst_dirac_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame) { //GstDiracParse * diracparse = GST_DIRAC_PARSE (parse); /* Called when processing incoming buffers. Function should parse a checked frame. */ /* MUST implement */ if (GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (parse)) == NULL) { GstCaps *caps = gst_caps_new_simple ("video/x-dirac", NULL); gst_buffer_set_caps (frame->buffer, caps); gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps); gst_caps_unref (caps); } gst_base_parse_set_min_frame_size (parse, 13); return GST_FLOW_OK; }
/** * gst_amrparse_set_src_caps: * @amrparse: #GstAmrParse. * * Set source pad caps according to current knowledge about the * audio stream. * * Returns: TRUE if caps were successfully set. */ static gboolean gst_amrparse_set_src_caps (GstAmrParse * amrparse) { GstCaps *src_caps = NULL; gboolean res = FALSE; if (amrparse->wide) { GST_DEBUG_OBJECT (amrparse, "setting srcpad caps to AMR-WB"); src_caps = gst_caps_new_simple ("audio/AMR-WB", "channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, 16000, NULL); } else { GST_DEBUG_OBJECT (amrparse, "setting srcpad caps to AMR-NB"); /* Max. size of NB frame is 31 bytes, so we can set the min. frame size to 32 (+1 for next frame header) */ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (amrparse), 32); src_caps = gst_caps_new_simple ("audio/AMR", "channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, 8000, NULL); } gst_pad_use_fixed_caps (GST_BASE_PARSE (amrparse)->srcpad); res = gst_pad_set_caps (GST_BASE_PARSE (amrparse)->srcpad, src_caps); gst_caps_unref (src_caps); return res; }
static gboolean gst_raw_base_parse_start (GstBaseParse * parse) { GstBaseParse *base_parse = GST_BASE_PARSE (parse); GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (parse); GstRawBaseParseClass *klass = GST_RAW_BASE_PARSE_GET_CLASS (parse); g_assert (klass->set_current_config); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (raw_base_parse); /* If the config is ready from the start, set the min frame size * (this will happen with the properties config) */ if (klass->is_config_ready (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT)) { gsize frame_size = klass->get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT); gst_base_parse_set_min_frame_size (base_parse, frame_size); } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse); return TRUE; }
static GstFlowReturn gst_dirac_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame, gint * skipsize) { int off; guint32 next_header; GstMapInfo map; guint8 *data; gsize size; gboolean have_picture = FALSE; int offset; guint framesize = 0; gst_buffer_map (frame->buffer, &map, GST_MAP_READ); data = map.data; size = map.size; if (G_UNLIKELY (size < 13)) { *skipsize = 1; goto out; } GST_DEBUG ("%" G_GSIZE_FORMAT ": %02x %02x %02x %02x", size, data[0], data[1], data[2], data[3]); if (GST_READ_UINT32_BE (data) != 0x42424344) { GstByteReader reader; gst_byte_reader_init (&reader, data, size); off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff, 0x42424344, 0, size); if (off < 0) { *skipsize = size - 3; goto out; } GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off); GST_DEBUG ("skipping %d", off); *skipsize = off; goto out; } /* have sync, parse chunks */ offset = 0; while (!have_picture) { GST_DEBUG ("offset %d:", offset); if (offset + 13 >= size) { framesize = offset + 13; goto out; } GST_DEBUG ("chunk type %02x", data[offset + 4]); if (GST_READ_UINT32_BE (data + offset) != 0x42424344) { GST_DEBUG ("bad header"); *skipsize = 3; goto out; } next_header = GST_READ_UINT32_BE (data + offset + 5); GST_DEBUG ("next_header %d", next_header); if (next_header == 0) next_header = 13; if (SCHRO_PARSE_CODE_IS_PICTURE (data[offset + 4])) { have_picture = TRUE; } offset += next_header; if (offset >= size) { framesize = offset; goto out; } } gst_buffer_unmap (frame->buffer, &map); framesize = offset; GST_DEBUG ("framesize %d", framesize); g_assert (framesize <= size); if (data[4] == SCHRO_PARSE_CODE_SEQUENCE_HEADER) { GstCaps *caps; GstDiracParse *diracparse = GST_DIRAC_PARSE (parse); DiracSequenceHeader sequence_header; int ret; ret = dirac_sequence_header_parse (&sequence_header, data + 13, size - 13); if (ret) { memcpy (&diracparse->sequence_header, &sequence_header, sizeof (sequence_header)); caps = gst_caps_new_simple ("video/x-dirac", "width", G_TYPE_INT, sequence_header.width, "height", G_TYPE_INT, sequence_header.height, "framerate", GST_TYPE_FRACTION, sequence_header.frame_rate_numerator, sequence_header.frame_rate_denominator, "pixel-aspect-ratio", GST_TYPE_FRACTION, sequence_header.aspect_ratio_numerator, sequence_header.aspect_ratio_denominator, "interlace-mode", G_TYPE_STRING, sequence_header.interlaced ? "interleaved" : "progressive", "profile", G_TYPE_STRING, get_profile_name (sequence_header.profile), "level", G_TYPE_STRING, get_level_name (sequence_header.level), NULL); gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps); gst_caps_unref (caps); gst_base_parse_set_frame_rate (parse, sequence_header.frame_rate_numerator, sequence_header.frame_rate_denominator, 0, 0); } } gst_base_parse_set_min_frame_size (parse, 13); return gst_base_parse_finish_frame (parse, frame, framesize); out: gst_buffer_unmap (frame->buffer, &map); if (framesize) gst_base_parse_set_min_frame_size (parse, framesize); return GST_FLOW_OK; }
static GstFlowReturn gst_png_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame, gint * skipsize) { GstPngParse *pngparse = GST_PNG_PARSE (parse); GstMapInfo map; GstByteReader reader; GstFlowReturn ret = GST_FLOW_OK; guint64 signature; guint width = 0, height = 0; gst_buffer_map (frame->buffer, &map, GST_MAP_READ); gst_byte_reader_init (&reader, map.data, map.size); if (!gst_byte_reader_peek_uint64_be (&reader, &signature)) goto beach; if (signature != PNG_SIGNATURE) { for (;;) { guint offset; offset = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff, 0x89504E47, 0, gst_byte_reader_get_remaining (&reader)); if (offset == -1) { *skipsize = gst_byte_reader_get_remaining (&reader) - 4; goto beach; } gst_byte_reader_skip (&reader, offset); if (!gst_byte_reader_peek_uint64_be (&reader, &signature)) goto beach; if (signature == PNG_SIGNATURE) { /* We're skipping, go out, we'll be back */ *skipsize = gst_byte_reader_get_pos (&reader); goto beach; } gst_byte_reader_skip (&reader, 4); } } gst_byte_reader_skip (&reader, 8); for (;;) { guint32 length; guint32 code; if (!gst_byte_reader_get_uint32_be (&reader, &length)) goto beach; if (!gst_byte_reader_get_uint32_le (&reader, &code)) goto beach; GST_TRACE_OBJECT (parse, "%" GST_FOURCC_FORMAT " chunk, %u bytes", GST_FOURCC_ARGS (code), length); if (code == GST_MAKE_FOURCC ('I', 'H', 'D', 'R')) { if (!gst_byte_reader_get_uint32_be (&reader, &width)) goto beach; if (!gst_byte_reader_get_uint32_be (&reader, &height)) goto beach; length -= 8; } else if (code == GST_MAKE_FOURCC ('I', 'D', 'A', 'T')) { gst_base_parse_set_min_frame_size (parse, gst_byte_reader_get_pos (&reader) + 4 + length + 12); } if (!gst_byte_reader_skip (&reader, length + 4)) goto beach; if (code == GST_MAKE_FOURCC ('I', 'E', 'N', 'D')) { /* the start code and at least 2 empty frames (IHDR and IEND) */ gst_base_parse_set_min_frame_size (parse, 8 + 12 + 12); if (pngparse->width != width || pngparse->height != height) { GstCaps *caps, *sink_caps; pngparse->height = height; pngparse->width = width; caps = gst_caps_new_simple ("image/png", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); sink_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (pngparse)); if (sink_caps) { GstStructure *st; gint fr_num, fr_denom; st = gst_caps_get_structure (sink_caps, 0); if (st && gst_structure_get_fraction (st, "framerate", &fr_num, &fr_denom)) { gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, fr_num, fr_denom, NULL); } else { GST_WARNING_OBJECT (pngparse, "No framerate set"); } gst_caps_unref (sink_caps); } if (!gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps)) ret = GST_FLOW_NOT_NEGOTIATED; gst_caps_unref (caps); if (ret != GST_FLOW_OK) goto beach; } gst_buffer_unmap (frame->buffer, &map); return gst_base_parse_finish_frame (parse, frame, gst_byte_reader_get_pos (&reader)); } } beach: gst_buffer_unmap (frame->buffer, &map); return ret; }
static void gst_ac3_parse_init (GstAc3Parse * ac3parse) { gst_base_parse_set_min_frame_size (GST_BASE_PARSE (ac3parse), 6); gst_ac3_parse_reset (ac3parse); }
static gboolean gst_dirac_parse_check_valid_frame (GstBaseParse * parse, GstBaseParseFrame * frame, guint * framesize, gint * skipsize) { GstByteReader reader = GST_BYTE_READER_INIT_FROM_BUFFER (frame->buffer); GstDiracParse *diracparse = GST_DIRAC_PARSE (parse); int off; guint32 next_header; gboolean sync; gboolean drain; if (G_UNLIKELY (GST_BUFFER_SIZE (frame->buffer) < 13)) return FALSE; off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff, 0x42424344, 0, GST_BUFFER_SIZE (frame->buffer)); if (off < 0) { *skipsize = GST_BUFFER_SIZE (frame->buffer) - 3; return FALSE; } GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off); if (off > 0) { GST_ERROR ("skipping %d", off); *skipsize = off; return FALSE; } if (!gst_dirac_parse_frame_header (diracparse, frame->buffer, framesize)) { GST_ERROR ("bad header"); *skipsize = 3; return FALSE; } GST_LOG ("framesize %d", *framesize); sync = GST_BASE_PARSE_FRAME_SYNC (frame); drain = GST_BASE_PARSE_FRAME_DRAIN (frame); if (!sync && !drain) { guint32 next_sync_word = 0; next_header = GST_READ_UINT32_BE (GST_BUFFER_DATA (frame->buffer) + 5); GST_LOG ("next header %d", next_header); if (!gst_byte_reader_skip (&reader, next_header) || !gst_byte_reader_get_uint32_be (&reader, &next_sync_word)) { gst_base_parse_set_min_frame_size (parse, next_header + 4); *skipsize = 0; return FALSE; } else { if (next_sync_word != 0x42424344) { *skipsize = 3; return FALSE; } else { gst_base_parse_set_min_frame_size (parse, next_header); } } } return TRUE; }
static GstFlowReturn gst_ivf_parse_handle_frame_data (GstIvfParse * ivf, GstBaseParseFrame * frame, gint * skipsize) { GstBuffer *const buffer = frame->buffer; GstMapInfo map; GstFlowReturn ret = GST_FLOW_OK; GstBuffer *out_buffer; gst_buffer_map (buffer, &map, GST_MAP_READ); if (map.size >= IVF_FILE_HEADER_SIZE) { guint32 frame_size = GST_READ_UINT32_LE (map.data); guint64 frame_pts = GST_READ_UINT64_LE (map.data + 4); GST_LOG_OBJECT (ivf, "Read frame header: size %u, pts %" G_GUINT64_FORMAT, frame_size, frame_pts); if (map.size < IVF_FRAME_HEADER_SIZE + frame_size) { gst_base_parse_set_min_frame_size (GST_BASE_PARSE_CAST (ivf), IVF_FRAME_HEADER_SIZE + frame_size); gst_buffer_unmap (buffer, &map); *skipsize = 0; goto end; } gst_buffer_unmap (buffer, &map); /* Eventually, we would need the buffer memory in a merged state anyway */ out_buffer = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_META | GST_BUFFER_COPY_MEMORY | GST_BUFFER_COPY_MERGE, IVF_FRAME_HEADER_SIZE, frame_size); if (!out_buffer) { GST_ERROR_OBJECT (ivf, "Failed to copy frame buffer"); ret = GST_FLOW_ERROR; *skipsize = IVF_FRAME_HEADER_SIZE + frame_size; goto end; } gst_buffer_replace (&frame->out_buffer, out_buffer); gst_buffer_unref (out_buffer); /* Detect resolution changes on key frames */ if (gst_buffer_map (frame->out_buffer, &map, GST_MAP_READ)) { guint32 width, height; if (ivf->fourcc == GST_MAKE_FOURCC ('V', 'P', '8', '0')) { guint32 frame_tag; frame_tag = GST_READ_UINT24_LE (map.data); if (!(frame_tag & 0x01) && map.size >= 10) { /* key frame */ GST_DEBUG_OBJECT (ivf, "key frame detected"); width = GST_READ_UINT16_LE (map.data + 6) & 0x3fff; height = GST_READ_UINT16_LE (map.data + 8) & 0x3fff; gst_ivf_parse_set_size (ivf, width, height); } } else if (ivf->fourcc == GST_MAKE_FOURCC ('V', 'P', '9', '0')) { /* Fixme: Add vp9 frame header parsing? */ } else if (ivf->fourcc == GST_MAKE_FOURCC ('A', 'V', '0', '1')) { /* Fixme: Add av1 frame header parsing? */ /* This would allow to parse dynamic resolution changes */ /* implement when gstav1parser is ready */ } gst_buffer_unmap (frame->out_buffer, &map); } if (ivf->fps_n > 0) { GST_BUFFER_TIMESTAMP (out_buffer) = gst_util_uint64_scale_int (GST_SECOND * frame_pts, ivf->fps_d, ivf->fps_n); } gst_ivf_parse_update_src_caps (ivf); ret = gst_base_parse_finish_frame (GST_BASE_PARSE_CAST (ivf), frame, IVF_FRAME_HEADER_SIZE + frame_size); *skipsize = 0; } else { GST_LOG_OBJECT (ivf, "Frame data not yet available."); gst_buffer_unmap (buffer, &map); *skipsize = 0; } end: return ret; }
static void gst_irtsp_parse_init (GstIRTSPParse * IRTSPParse, GstIRTSPParseClass * klass) { gst_base_parse_set_min_frame_size (GST_BASE_PARSE (IRTSPParse), 4); gst_irtsp_parse_reset (IRTSPParse); }
static gboolean gst_h263_parse_check_valid_frame (GstBaseParse * parse, GstBaseParseFrame * frame, guint * framesize, gint * skipsize) { GstH263Parse *h263parse; GstBuffer *buffer; guint psc_pos, next_psc_pos; h263parse = GST_H263_PARSE (parse); buffer = frame->buffer; if (GST_BUFFER_SIZE (buffer) < 3) return FALSE; psc_pos = find_psc (buffer, 0); if (psc_pos == -1) { /* PSC not found, need more data */ if (GST_BUFFER_SIZE (buffer) > 3) psc_pos = GST_BUFFER_SIZE (buffer) - 3; else psc_pos = 0; goto more; } /* Found the start of the frame, now try to find the end */ next_psc_pos = psc_pos + 3; next_psc_pos = find_psc (buffer, next_psc_pos); if (next_psc_pos == -1) { if (GST_BASE_PARSE_FRAME_DRAIN (frame)) /* FLUSH/EOS, it's okay if we can't find the next frame */ next_psc_pos = GST_BUFFER_SIZE (buffer); else goto more; } /* We should now have a complete frame */ /* If this is the first frame, parse and set srcpad caps */ if (h263parse->state == PARSING) { H263Params params = { 0, }; GstFlowReturn res; res = gst_h263_parse_get_params (¶ms, buffer, FALSE, &h263parse->state); if (res != GST_FLOW_OK || h263parse->state != GOT_HEADER) { GST_WARNING ("Couldn't parse header - setting passthrough mode"); gst_base_parse_set_format (parse, GST_BASE_PARSE_FORMAT_PASSTHROUGH, TRUE); } else { /* Set srcpad caps since we now have sufficient information to do so */ gst_h263_parse_set_src_caps (h263parse, ¶ms); } } *skipsize = psc_pos; *framesize = next_psc_pos - psc_pos; /* XXX: After getting a keyframe, should we adjust min_frame_size to * something smaller so we don't end up collecting too many non-keyframes? */ GST_DEBUG ("Found a frame of size %d at pos %d", *framesize, *skipsize); return TRUE; more: /* Ask for 1024 bytes more - this is an arbitrary choice */ gst_base_parse_set_min_frame_size (parse, GST_BUFFER_SIZE (buffer) + 1024); *skipsize = psc_pos; return FALSE; }
static GstFlowReturn gst_ac3_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame, gint * skipsize) { GstAc3Parse *ac3parse = GST_AC3_PARSE (parse); GstBuffer *buf = frame->buffer; GstByteReader reader; gint off; gboolean lost_sync, draining, eac, more = FALSE; guint frmsiz, blocks, sid; guint rate, chans; gboolean update_rate = FALSE; gint framesize = 0; gint have_blocks = 0; GstMapInfo map; gboolean ret = FALSE; GstFlowReturn res = GST_FLOW_OK; gst_buffer_map (buf, &map, GST_MAP_READ); if (G_UNLIKELY (map.size < 8)) { *skipsize = 1; goto cleanup; } gst_byte_reader_init (&reader, map.data, map.size); off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffff0000, 0x0b770000, 0, map.size); GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off); /* didn't find anything that looks like a sync word, skip */ if (off < 0) { *skipsize = map.size - 3; goto cleanup; } /* possible frame header, but not at offset 0? skip bytes before sync */ if (off > 0) { *skipsize = off; goto cleanup; } /* make sure the values in the frame header look sane */ if (!gst_ac3_parse_frame_header (ac3parse, buf, 0, &frmsiz, &rate, &chans, &blocks, &sid, &eac)) { *skipsize = off + 2; goto cleanup; } GST_LOG_OBJECT (parse, "size: %u, blocks: %u, rate: %u, chans: %u", frmsiz, blocks, rate, chans); framesize = frmsiz; if (G_UNLIKELY (g_atomic_int_get (&ac3parse->align) == GST_AC3_PARSE_ALIGN_NONE)) gst_ac3_parse_set_alignment (ac3parse, eac); GST_LOG_OBJECT (parse, "got frame"); lost_sync = GST_BASE_PARSE_LOST_SYNC (parse); draining = GST_BASE_PARSE_DRAINING (parse); if (g_atomic_int_get (&ac3parse->align) == GST_AC3_PARSE_ALIGN_IEC61937) { /* We need 6 audio blocks from each substream, so we keep going forwards * till we have it */ g_assert (blocks > 0); GST_LOG_OBJECT (ac3parse, "Need %d frames before pushing", 6 / blocks); if (sid != 0) { /* We need the first substream to be the one with id 0 */ GST_LOG_OBJECT (ac3parse, "Skipping till we find sid 0"); *skipsize = off + 2; goto cleanup; } framesize = 0; /* Loop till we have 6 blocks per substream */ for (have_blocks = 0; !more && have_blocks < 6; have_blocks += blocks) { /* Loop till we get one frame from each substream */ do { framesize += frmsiz; if (!gst_byte_reader_skip (&reader, frmsiz) || map.size < (framesize + 6)) { more = TRUE; break; } if (!gst_ac3_parse_frame_header (ac3parse, buf, framesize, &frmsiz, NULL, NULL, NULL, &sid, &eac)) { *skipsize = off + 2; goto cleanup; } } while (sid); } /* We're now at the next frame, so no need to skip if resyncing */ frmsiz = 0; } if (lost_sync && !draining) { guint16 word = 0; GST_DEBUG_OBJECT (ac3parse, "resyncing; checking next frame syncword"); if (more || !gst_byte_reader_skip (&reader, frmsiz) || !gst_byte_reader_get_uint16_be (&reader, &word)) { GST_DEBUG_OBJECT (ac3parse, "... but not sufficient data"); gst_base_parse_set_min_frame_size (parse, framesize + 8); *skipsize = 0; goto cleanup; } else { if (word != 0x0b77) { GST_DEBUG_OBJECT (ac3parse, "0x%x not OK", word); *skipsize = off + 2; goto cleanup; } else { /* ok, got sync now, let's assume constant frame size */ gst_base_parse_set_min_frame_size (parse, framesize); } } } /* expect to have found a frame here */ g_assert (framesize); ret = TRUE; /* arrange for metadata setup */ if (G_UNLIKELY (sid)) { /* dependent frame, no need to (ac)count for or consider further */ GST_LOG_OBJECT (parse, "sid: %d", sid); frame->flags |= GST_BASE_PARSE_FRAME_FLAG_NO_FRAME; /* TODO maybe also mark as DELTA_UNIT, * if that does not surprise baseparse elsewhere */ /* occupies same time space as previous base frame */ if (G_LIKELY (GST_BUFFER_TIMESTAMP (buf) >= GST_BUFFER_DURATION (buf))) GST_BUFFER_TIMESTAMP (buf) -= GST_BUFFER_DURATION (buf); /* only shortcut if we already arranged for caps */ if (G_LIKELY (ac3parse->sample_rate > 0)) goto cleanup; } if (G_UNLIKELY (ac3parse->sample_rate != rate || ac3parse->channels != chans || ac3parse->eac != eac)) { GstCaps *caps = gst_caps_new_simple (eac ? "audio/x-eac3" : "audio/x-ac3", "framed", G_TYPE_BOOLEAN, TRUE, "rate", G_TYPE_INT, rate, "channels", G_TYPE_INT, chans, NULL); gst_caps_set_simple (caps, "alignment", G_TYPE_STRING, g_atomic_int_get (&ac3parse->align) == GST_AC3_PARSE_ALIGN_IEC61937 ? "iec61937" : "frame", NULL); gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps); gst_caps_unref (caps); ac3parse->sample_rate = rate; ac3parse->channels = chans; ac3parse->eac = eac; update_rate = TRUE; } if (G_UNLIKELY (ac3parse->blocks != blocks)) { ac3parse->blocks = blocks; update_rate = TRUE; } if (G_UNLIKELY (update_rate)) gst_base_parse_set_frame_rate (parse, rate, 256 * blocks, 2, 2); cleanup: gst_buffer_unmap (buf, &map); if (ret && framesize <= map.size) { res = gst_base_parse_finish_frame (parse, frame, framesize); } return res; }
static gboolean gst_h264_parse_check_valid_frame (GstBaseParse * parse, GstBaseParseFrame * frame, guint * framesize, gint * skipsize) { GstH264Parse *h264parse = GST_H264_PARSE (parse); GstBuffer *buffer = frame->buffer; gint sc_pos, nal_pos, next_sc_pos, next_nal_pos; guint8 *data; guint size; gboolean drain; /* expect at least 3 bytes startcode == sc, and 2 bytes NALU payload */ if (G_UNLIKELY (GST_BUFFER_SIZE (buffer) < 5)) return FALSE; /* need to configure aggregation */ if (G_UNLIKELY (h264parse->format == GST_H264_PARSE_FORMAT_NONE)) gst_h264_parse_negotiate (h264parse); data = GST_BUFFER_DATA (buffer); size = GST_BUFFER_SIZE (buffer); GST_LOG_OBJECT (h264parse, "last_nal_pos: %d, last_scan_pos %d", h264parse->last_nal_pos, h264parse->next_sc_pos); nal_pos = h264parse->last_nal_pos; next_sc_pos = h264parse->next_sc_pos; if (!next_sc_pos) { sc_pos = gst_h264_parse_find_sc (buffer, 0); if (sc_pos == -1) { /* SC not found, need more data */ sc_pos = GST_BUFFER_SIZE (buffer) - 3; goto more; } nal_pos = sc_pos + 3; next_sc_pos = nal_pos; /* sc might have 2 or 3 0-bytes */ if (sc_pos > 0 && data[sc_pos - 1] == 00) sc_pos--; GST_LOG_OBJECT (h264parse, "found sc at offset %d", sc_pos); } else { /* previous checks already arrange sc at start */ sc_pos = 0; } drain = GST_BASE_PARSE_FRAME_DRAIN (frame); while (TRUE) { gint prev_sc_pos; next_sc_pos = gst_h264_parse_find_sc (buffer, next_sc_pos); if (next_sc_pos == -1) { GST_LOG_OBJECT (h264parse, "no next sc"); if (drain) { /* FLUSH/EOS, it's okay if we can't find the next frame */ next_sc_pos = size; next_nal_pos = size; } else { next_sc_pos = size - 3; goto more; } } else { next_nal_pos = next_sc_pos + 3; if (data[next_sc_pos - 1] == 00) next_sc_pos--; GST_LOG_OBJECT (h264parse, "found next sc at offset %d", next_sc_pos); /* need at least 1 more byte of next NAL */ if (!drain && (next_nal_pos == size - 1)) goto more; } /* determine nal's sc position */ prev_sc_pos = nal_pos - 3; g_assert (prev_sc_pos >= 0); if (prev_sc_pos > 0 && data[prev_sc_pos - 1] == 0) prev_sc_pos--; /* already consume and gather info from NAL */ gst_h264_parse_process_nal (h264parse, data, prev_sc_pos, nal_pos, next_sc_pos - nal_pos); if (next_nal_pos >= size - 1 || gst_h264_parse_collect_nal (h264parse, data + nal_pos, data + next_nal_pos)) break; /* move along */ next_sc_pos = nal_pos = next_nal_pos; } *skipsize = sc_pos; *framesize = next_sc_pos - sc_pos; return TRUE; more: /* Ask for 1024 bytes more - this is an arbitrary choice */ gst_base_parse_set_min_frame_size (parse, GST_BUFFER_SIZE (buffer) + 1024); /* skip up to initial startcode */ *skipsize = sc_pos; /* resume scanning here next time */ h264parse->last_nal_pos = nal_pos; h264parse->next_sc_pos = next_sc_pos; return FALSE; }
static void gst_raw_video_parse_set_property (GObject * object, guint prop_id, GValue const *value, GParamSpec * pspec) { GstBaseParse *base_parse = GST_BASE_PARSE (object); GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (object); GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (object); GstRawVideoParseConfig *props_cfg = &(raw_video_parse->properties_config); /* All properties are handled similarly: * - if the new value is the same as the current value, nothing is done * - the parser lock is held while the new value is set * - if the properties config is the current config, the source caps are * invalidated to ensure that the code in handle_frame pushes a new CAPS * event out * - properties that affect the video frame size call the function to update * the info and also call gst_base_parse_set_min_frame_size() to ensure * that the minimum frame size can hold 1 frame (= one sample for each * channel); to ensure that the min frame size includes any extra padding, * it is set to the result of gst_raw_video_parse_get_config_frame_size() * - property configuration values that require video info updates aren't * written directory into the video info structure, but in the extra * fields instead (gst_raw_video_parse_update_info() then copies the values * from these fields into the video info); see the documentation inside * gst_raw_video_parse_update_info() for the reason why */ switch (prop_id) { case PROP_WIDTH: { gint new_width = g_value_get_int (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_width != props_cfg->width) { props_cfg->width = new_width; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) { gst_raw_base_parse_invalidate_src_caps (raw_base_parse); gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); } } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_HEIGHT: { gint new_height = g_value_get_int (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_height != props_cfg->height) { props_cfg->height = new_height; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) { gst_raw_base_parse_invalidate_src_caps (raw_base_parse); gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); } } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_FORMAT: { GstVideoFormat new_format = g_value_get_enum (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_format != props_cfg->format) { props_cfg->format = new_format; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) { gst_raw_base_parse_invalidate_src_caps (raw_base_parse); gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); } } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_PIXEL_ASPECT_RATIO: { GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); /* The pixel aspect ratio does not affect the video frame size, * so it is just set directly without any updates */ props_cfg->pixel_aspect_ratio_n = GST_VIDEO_INFO_PAR_N (&(props_cfg->info)) = gst_value_get_fraction_numerator (value); props_cfg->pixel_aspect_ratio_d = GST_VIDEO_INFO_PAR_D (&(props_cfg->info)) = gst_value_get_fraction_denominator (value); GST_DEBUG_OBJECT (raw_video_parse, "setting pixel aspect ratio to %u/%u", props_cfg->pixel_aspect_ratio_n, props_cfg->pixel_aspect_ratio_d); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_FRAMERATE: { GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); /* The framerate does not affect the video frame size, * so it is just set directly without any updates */ props_cfg->framerate_n = GST_VIDEO_INFO_FPS_N (&(props_cfg->info)) = gst_value_get_fraction_numerator (value); props_cfg->framerate_d = GST_VIDEO_INFO_FPS_D (&(props_cfg->info)) = gst_value_get_fraction_denominator (value); GST_DEBUG_OBJECT (raw_video_parse, "setting framerate to %u/%u", props_cfg->framerate_n, props_cfg->framerate_d); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_INTERLACED: { GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); /* Interlacing does not affect the video frame size, * so it is just set directly without any updates */ props_cfg->interlaced = g_value_get_boolean (value); GST_VIDEO_INFO_INTERLACE_MODE (&(props_cfg->info)) = props_cfg->interlaced ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED : GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_TOP_FIELD_FIRST: { /* The top-field-first flag is a detail related to * interlacing, so no video info update is needed */ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); props_cfg->top_field_first = g_value_get_boolean (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_PLANE_STRIDES: { GValueArray *valarray = g_value_get_boxed (value); guint n_planes; guint i; /* If no valarray is given, then disable custom * plane strides & offsets and stick to the * standard computed ones */ if (valarray == NULL) { GST_DEBUG_OBJECT (raw_video_parse, "custom plane strides & offsets disabled"); props_cfg->custom_plane_strides = FALSE; gst_raw_video_parse_update_info (props_cfg); break; } /* Sanity check - reject empty arrays */ if ((valarray != NULL) && (valarray->n_values == 0)) { GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS, ("plane strides property holds an empty array"), (NULL)); break; } GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); n_planes = GST_VIDEO_INFO_N_PLANES (&(props_cfg->info)); /* Check that the valarray holds the right number of values */ if (valarray->n_values != n_planes) { GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS, ("incorrect number of elements in plane strides property"), ("expected: %u, got: %u", n_planes, valarray->n_values)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } /* Copy the values to the stride array */ for (i = 0; i < n_planes; ++i) { GValue *val = g_value_array_get_nth (valarray, i); props_cfg->plane_strides[i] = g_value_get_uint (val); GST_DEBUG_OBJECT (raw_video_parse, "plane #%u stride: %d", i, props_cfg->plane_strides[i]); } props_cfg->custom_plane_strides = TRUE; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_PLANE_OFFSETS: { GValueArray *valarray = g_value_get_boxed (value); guint n_planes; guint i; /* If no valarray is given, then disable custom * plane strides & offsets and stick to the * standard computed ones */ if (valarray == NULL) { GST_DEBUG_OBJECT (raw_video_parse, "custom plane strides & offsets disabled"); props_cfg->custom_plane_strides = FALSE; gst_raw_video_parse_update_info (props_cfg); break; } /* Sanity check - reject empty arrays */ if ((valarray != NULL) && (valarray->n_values == 0)) { GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS, ("plane offsets property holds an empty array"), (NULL)); break; } GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); n_planes = GST_VIDEO_INFO_N_PLANES (&(props_cfg->info)); /* Check that the valarray holds the right number of values */ if (valarray->n_values != n_planes) { GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS, ("incorrect number of elements in plane offsets property"), ("expected: %u, got: %u", n_planes, valarray->n_values)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } /* Copy the values to the offset array */ for (i = 0; i < n_planes; ++i) { GValue *val = g_value_array_get_nth (valarray, i); props_cfg->plane_offsets[i] = g_value_get_uint (val); GST_DEBUG_OBJECT (raw_video_parse, "plane #%u offset: %" G_GSIZE_FORMAT, i, props_cfg->plane_offsets[i]); } props_cfg->custom_plane_strides = TRUE; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_FRAME_STRIDE: { /* The frame stride does not affect the video frame size, * so it is just set directly without any updates */ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); props_cfg->frame_stride = g_value_get_uint (value); gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_raw_base_parse_set_property (GObject * object, guint prop_id, GValue const *value, GParamSpec * pspec) { GstBaseParse *base_parse = GST_BASE_PARSE (object); GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (object); GstRawBaseParseClass *klass = GST_RAW_BASE_PARSE_GET_CLASS (object); g_assert (klass->is_config_ready); g_assert (klass->set_current_config); switch (prop_id) { case PROP_USE_SINK_CAPS: { gboolean new_state, cur_state; GstRawBaseParseConfig new_config; GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); /* Check to ensure nothing is done if the value stays the same */ new_state = g_value_get_boolean (value); cur_state = gst_raw_base_parse_is_using_sink_caps (raw_base_parse); if (new_state == cur_state) { GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } GST_DEBUG_OBJECT (raw_base_parse, "switching to %s config", new_state ? "sink caps" : "properties"); new_config = new_state ? GST_RAW_BASE_PARSE_CONFIG_SINKCAPS : GST_RAW_BASE_PARSE_CONFIG_PROPERTIES; if (!klass->set_current_config (raw_base_parse, new_config)) { GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); GST_ELEMENT_ERROR (raw_base_parse, STREAM, FAILED, ("could not set new current config"), ("use-sink-caps property: %d", new_state)); break; } /* Update the minimum frame size if the config is ready. This ensures that * the next buffer that is passed to handle_frame contains complete frames. * If the current config is the properties config, then it will always be * ready, and its frame size will be valid. Ensure that the baseparse minimum * frame size is set properly then. * If the current config is the sink caps config, then it will initially not * be ready until the sink caps are set, so the minimum frame size cannot be * set right here. However, since the caps always come in *before* the actual * data, the config will be readied in the set_sink_caps function, and be ready * by the time handle_frame is called. There, the minimum frame size is set as * well. */ if (klass->is_config_ready (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT)) { gsize frame_size = klass->get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT); gst_base_parse_set_min_frame_size (base_parse, frame_size); } /* Since the current config was switched, the source caps change. Ensure the * new caps are pushed downstream by setting src_caps_set to FALSE: This way, * the next handle_frame call will take care of that. */ raw_base_parse->src_caps_set = FALSE; GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static gboolean gst_raw_base_parse_set_sink_caps (GstBaseParse * parse, GstCaps * caps) { gboolean ret = FALSE; GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (parse); GstRawBaseParseClass *klass = GST_RAW_BASE_PARSE_GET_CLASS (parse); g_assert (klass->set_config_from_caps); g_assert (klass->get_caps_from_config); g_assert (klass->get_config_frame_size); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (raw_base_parse); GST_DEBUG_OBJECT (parse, "getting config from new sink caps"); /* Convert the new sink caps to sink caps config. This also * readies the config. */ ret = klass->set_config_from_caps (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_SINKCAPS, caps); if (!ret) { GST_ERROR_OBJECT (raw_base_parse, "could not get config from sink caps"); goto done; } /* If the sink caps config is currently active, push caps downstream, * set the minimum frame size (to guarantee that input buffers hold * complete frames), and update the src_caps_set flag. If the sink * caps config isn't the currently active config, just exit, since in * that case, the caps will always be pushed downstream in handle_frame. */ if (gst_raw_base_parse_is_using_sink_caps (raw_base_parse)) { GstCaps *new_src_caps; gsize frame_size; GST_DEBUG_OBJECT (parse, "sink caps config is the current one; trying to push new caps downstream"); /* Convert back to caps. The caps may have changed, for example * audio/x-unaligned-raw may have been replaced with audio/x-raw. * (Also, this keeps the behavior in sync with that of the block * in handle_frame that pushes caps downstream if not done already.) */ if (!klass->get_caps_from_config (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT, &new_src_caps)) { GST_ERROR_OBJECT (raw_base_parse, "could not get src caps from current config"); goto done; } GST_DEBUG_OBJECT (raw_base_parse, "got new sink caps; updating src caps to %" GST_PTR_FORMAT, (gpointer) new_src_caps); frame_size = klass->get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT); gst_base_parse_set_min_frame_size (parse, frame_size); raw_base_parse->src_caps_set = TRUE; GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse); /* Push caps outside of the lock */ gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (raw_base_parse), gst_event_new_caps (new_src_caps) ); gst_caps_unref (new_src_caps); } else { GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse); } ret = TRUE; done: return ret; }
static gboolean test_mpeg_audio_parse_start (GstBaseParse * parse) { gst_base_parse_set_min_frame_size (parse, 6); return TRUE; }