static GstFlowReturn gst_videoframe_audiolevel_asink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf) { GstClockTime timestamp, cur_time; GstVideoFrameAudioLevel *self = GST_VIDEOFRAME_AUDIOLEVEL (parent); GstBuffer *buf; gsize inbuf_size; guint64 start_offset, end_offset; GstClockTime running_time; gint rate, bpf; gboolean discont = FALSE; timestamp = GST_BUFFER_TIMESTAMP (inbuf); running_time = gst_segment_to_running_time (&self->asegment, GST_FORMAT_TIME, timestamp); rate = GST_AUDIO_INFO_RATE (&self->ainfo); bpf = GST_AUDIO_INFO_BPF (&self->ainfo); start_offset = gst_util_uint64_scale (timestamp, rate, GST_SECOND); inbuf_size = gst_buffer_get_size (inbuf); end_offset = start_offset + inbuf_size / bpf; g_mutex_lock (&self->mutex); if (GST_BUFFER_IS_DISCONT (inbuf) || GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_RESYNC) || self->first_time == GST_CLOCK_TIME_NONE) { discont = TRUE; } else { guint64 diff, max_sample_diff; /* Check discont, based on audiobasesink */ if (start_offset <= self->next_offset) diff = self->next_offset - start_offset; else diff = start_offset - self->next_offset; max_sample_diff = gst_util_uint64_scale_int (self->alignment_threshold, rate, GST_SECOND); /* Discont! */ if (G_UNLIKELY (diff >= max_sample_diff)) { if (self->discont_wait > 0) { if (self->discont_time == GST_CLOCK_TIME_NONE) { self->discont_time = timestamp; } else if (timestamp - self->discont_time >= self->discont_wait) { discont = TRUE; self->discont_time = GST_CLOCK_TIME_NONE; } } else { discont = TRUE; } } else if (G_UNLIKELY (self->discont_time != GST_CLOCK_TIME_NONE)) { /* we have had a discont, but are now back on track! */ self->discont_time = GST_CLOCK_TIME_NONE; } } if (discont) { /* Have discont, need resync */ if (self->next_offset != -1) GST_INFO_OBJECT (pad, "Have discont. Expected %" G_GUINT64_FORMAT ", got %" G_GUINT64_FORMAT, self->next_offset, start_offset); self->total_frames = 0; self->first_time = running_time; self->next_offset = end_offset; } else { self->next_offset += inbuf_size / bpf; } gst_adapter_push (self->adapter, gst_buffer_ref (inbuf)); GST_DEBUG_OBJECT (self, "Queue length %i", g_queue_get_length (&self->vtimeq)); while (TRUE) { GstClockTime *vt0, *vt1; GstClockTime vtemp; GstMessage *msg; gsize bytes, available_bytes; vtemp = GST_CLOCK_TIME_NONE; while (!(g_queue_get_length (&self->vtimeq) >= 2 || self->video_eos_flag || self->audio_flush_flag || self->shutdown_flag)) g_cond_wait (&self->cond, &self->mutex); if (self->audio_flush_flag || self->shutdown_flag) { g_mutex_unlock (&self->mutex); gst_buffer_unref (inbuf); return GST_FLOW_FLUSHING; } else if (self->video_eos_flag) { GST_DEBUG_OBJECT (self, "Video EOS flag alert"); /* nothing to do here if queue is empty */ if (g_queue_get_length (&self->vtimeq) == 0) break; if (g_queue_get_length (&self->vtimeq) < 2) { vtemp = self->vsegment.position; } else if (self->vsegment.position == GST_CLOCK_TIME_NONE) { /* g_queue_get_length is surely >= 2 at this point * so the adapter isn't empty */ buf = gst_adapter_take_buffer (self->adapter, gst_adapter_available (self->adapter)); if (buf != NULL) { GstMessage *msg; msg = update_rms_from_buffer (self, buf); g_mutex_unlock (&self->mutex); gst_element_post_message (GST_ELEMENT (self), msg); gst_buffer_unref (buf); g_mutex_lock (&self->mutex); /* we unlock again later */ } break; } } else if (g_queue_get_length (&self->vtimeq) < 2) { continue; } vt0 = g_queue_pop_head (&self->vtimeq); if (vtemp == GST_CLOCK_TIME_NONE) vt1 = g_queue_peek_head (&self->vtimeq); else vt1 = &vtemp; cur_time = self->first_time + gst_util_uint64_scale (self->total_frames, GST_SECOND, rate); GST_DEBUG_OBJECT (self, "Processing: current time is %" GST_TIME_FORMAT, GST_TIME_ARGS (cur_time)); GST_DEBUG_OBJECT (self, "Total frames is %i with a rate of %d", self->total_frames, rate); GST_DEBUG_OBJECT (self, "Start time is %" GST_TIME_FORMAT, GST_TIME_ARGS (self->first_time)); GST_DEBUG_OBJECT (self, "Time on top is %" GST_TIME_FORMAT, GST_TIME_ARGS (*vt0)); if (cur_time < *vt0) { guint num_frames = gst_util_uint64_scale (*vt0 - cur_time, rate, GST_SECOND); bytes = num_frames * GST_AUDIO_INFO_BPF (&self->ainfo); available_bytes = gst_adapter_available (self->adapter); if (available_bytes == 0) { g_queue_push_head (&self->vtimeq, vt0); break; } if (bytes == 0) { cur_time = *vt0; } else { GST_DEBUG_OBJECT (self, "Flushed %" G_GSIZE_FORMAT " out of %" G_GSIZE_FORMAT " bytes", bytes, available_bytes); gst_adapter_flush (self->adapter, MIN (bytes, available_bytes)); self->total_frames += num_frames; if (available_bytes <= bytes) { g_queue_push_head (&self->vtimeq, vt0); break; } cur_time = self->first_time + gst_util_uint64_scale (self->total_frames, GST_SECOND, rate); } } if (*vt1 > cur_time) { bytes = GST_AUDIO_INFO_BPF (&self->ainfo) * gst_util_uint64_scale (*vt1 - cur_time, rate, GST_SECOND); } else { bytes = 0; /* We just need to discard vt0 */ } available_bytes = gst_adapter_available (self->adapter); GST_DEBUG_OBJECT (self, "Adapter contains %" G_GSIZE_FORMAT " out of %" G_GSIZE_FORMAT " bytes", available_bytes, bytes); if (available_bytes < bytes) { g_queue_push_head (&self->vtimeq, vt0); goto done; } if (bytes > 0) { buf = gst_adapter_take_buffer (self->adapter, bytes); g_assert (buf != NULL); } else { /* Just an empty buffer */ buf = gst_buffer_new (); } msg = update_rms_from_buffer (self, buf); g_mutex_unlock (&self->mutex); gst_element_post_message (GST_ELEMENT (self), msg); g_mutex_lock (&self->mutex); gst_buffer_unref (buf); g_free (vt0); if (available_bytes == bytes) break; } done: g_mutex_unlock (&self->mutex); return gst_pad_push (self->asrcpad, inbuf); }
static GstFlowReturn gst_asf_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstAsfParse *asfparse; GstFlowReturn ret = GST_FLOW_OK; asfparse = GST_ASF_PARSE (parent); gst_adapter_push (asfparse->adapter, buffer); switch (asfparse->parse_state) { case ASF_PARSING_HEADERS: if (asfparse->headers_size == 0 && gst_adapter_available (asfparse->adapter) >= ASF_GUID_OBJSIZE_SIZE) { /* we can peek at the object size */ asfparse->headers_size = gst_asf_match_and_peek_obj_size (gst_adapter_map (asfparse->adapter, ASF_GUID_OBJSIZE_SIZE), &(guids[ASF_HEADER_OBJECT_INDEX])); gst_adapter_unmap (asfparse->adapter); if (asfparse->headers_size == 0) { /* something is wrong, this probably ain't an ASF stream */ GST_ERROR_OBJECT (asfparse, "ASF starting identifier missing"); ret = GST_FLOW_ERROR; goto end; } } if (gst_adapter_available (asfparse->adapter) >= asfparse->headers_size) { GstBuffer *headers = gst_adapter_take_buffer (asfparse->adapter, asfparse->headers_size); if (gst_asf_parse_headers (headers, asfparse->asfinfo)) { ret = gst_asf_parse_push (asfparse, headers); asfparse->parse_state = ASF_PARSING_DATA; } else { ret = GST_FLOW_ERROR; GST_ERROR_OBJECT (asfparse, "Failed to parse headers"); } } break; case ASF_PARSING_DATA: if (asfparse->data_size == 0 && gst_adapter_available (asfparse->adapter) >= ASF_GUID_OBJSIZE_SIZE) { /* we can peek at the object size */ asfparse->data_size = gst_asf_match_and_peek_obj_size (gst_adapter_map (asfparse->adapter, ASF_GUID_OBJSIZE_SIZE), &(guids[ASF_DATA_OBJECT_INDEX])); gst_adapter_unmap (asfparse->adapter); if (asfparse->data_size == 0) { /* something is wrong */ GST_ERROR_OBJECT (asfparse, "Unexpected object after headers, was " "expecting a data object"); ret = GST_FLOW_ERROR; goto end; } } /* if we have received the full data object headers */ if (gst_adapter_available (asfparse->adapter) >= ASF_DATA_OBJECT_SIZE) { ret = gst_asf_parse_parse_data_object (asfparse, gst_adapter_take_buffer (asfparse->adapter, ASF_DATA_OBJECT_SIZE)); if (ret != GST_FLOW_OK) { goto end; } asfparse->parse_state = ASF_PARSING_PACKETS; } break; case ASF_PARSING_PACKETS: g_assert (asfparse->asfinfo->packet_size); while ((asfparse->asfinfo->broadcast || asfparse->parsed_packets < asfparse->asfinfo->packets_count) && gst_adapter_available (asfparse->adapter) >= asfparse->asfinfo->packet_size) { GstBuffer *packet = gst_adapter_take_buffer (asfparse->adapter, asfparse->asfinfo->packet_size); asfparse->parsed_packets++; ret = gst_asf_parse_parse_packet (asfparse, packet); if (ret != GST_FLOW_OK) goto end; } if (!asfparse->asfinfo->broadcast && asfparse->parsed_packets >= asfparse->asfinfo->packets_count) { GST_INFO_OBJECT (asfparse, "Finished parsing packets"); asfparse->parse_state = ASF_PARSING_INDEXES; } break; case ASF_PARSING_INDEXES: /* we currently don't care about any of those objects */ if (gst_adapter_available (asfparse->adapter) >= ASF_GUID_OBJSIZE_SIZE) { guint64 obj_size; /* we can peek at the object size */ obj_size = gst_asf_match_and_peek_obj_size (gst_adapter_map (asfparse->adapter, ASF_GUID_OBJSIZE_SIZE), NULL); gst_adapter_unmap (asfparse->adapter); if (gst_adapter_available (asfparse->adapter) >= obj_size) { GST_DEBUG_OBJECT (asfparse, "Skiping object"); ret = gst_asf_parse_push (asfparse, gst_adapter_take_buffer (asfparse->adapter, obj_size)); if (ret != GST_FLOW_OK) { goto end; } } } break; default: break; } end: return ret; }
static GstFlowReturn gst_y4m_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstY4mDec *y4mdec; int n_avail; GstFlowReturn flow_ret = GST_FLOW_OK; #define MAX_HEADER_LENGTH 80 char header[MAX_HEADER_LENGTH]; int i; int len; y4mdec = GST_Y4M_DEC (parent); GST_DEBUG_OBJECT (y4mdec, "chain"); if (GST_BUFFER_IS_DISCONT (buffer)) { GST_DEBUG ("got discont"); gst_adapter_clear (y4mdec->adapter); } gst_adapter_push (y4mdec->adapter, buffer); n_avail = gst_adapter_available (y4mdec->adapter); if (!y4mdec->have_header) { gboolean ret; GstCaps *caps; if (n_avail < MAX_HEADER_LENGTH) return GST_FLOW_OK; gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH); header[MAX_HEADER_LENGTH - 1] = 0; for (i = 0; i < MAX_HEADER_LENGTH; i++) { if (header[i] == 0x0a) header[i] = 0; } ret = gst_y4m_dec_parse_header (y4mdec, header); if (!ret) { GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE, ("Failed to parse YUV4MPEG header"), (NULL)); return GST_FLOW_ERROR; } y4mdec->header_size = strlen (header) + 1; gst_adapter_flush (y4mdec->adapter, y4mdec->header_size); caps = gst_video_info_to_caps (&y4mdec->info); ret = gst_pad_set_caps (y4mdec->srcpad, caps); gst_caps_unref (caps); if (!ret) { GST_DEBUG_OBJECT (y4mdec, "Couldn't set caps on src pad"); return GST_FLOW_ERROR; } y4mdec->have_header = TRUE; } if (y4mdec->have_new_segment) { GstEvent *event; GstClockTime start = gst_y4m_dec_bytes_to_timestamp (y4mdec, y4mdec->segment.start); GstClockTime stop = gst_y4m_dec_bytes_to_timestamp (y4mdec, y4mdec->segment.stop); GstClockTime time = gst_y4m_dec_bytes_to_timestamp (y4mdec, y4mdec->segment.time); GstSegment seg; gst_segment_init (&seg, GST_FORMAT_TIME); seg.start = start; seg.stop = stop; seg.time = time; event = gst_event_new_segment (&seg); gst_pad_push_event (y4mdec->srcpad, event); //gst_event_unref (event); y4mdec->have_new_segment = FALSE; y4mdec->frame_index = gst_y4m_dec_bytes_to_frames (y4mdec, y4mdec->segment.time); GST_DEBUG ("new frame_index %d", y4mdec->frame_index); } while (1) { n_avail = gst_adapter_available (y4mdec->adapter); if (n_avail < MAX_HEADER_LENGTH) break; gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH); header[MAX_HEADER_LENGTH - 1] = 0; for (i = 0; i < MAX_HEADER_LENGTH; i++) { if (header[i] == 0x0a) header[i] = 0; } if (memcmp (header, "FRAME", 5) != 0) { GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE, ("Failed to parse YUV4MPEG frame"), (NULL)); flow_ret = GST_FLOW_ERROR; break; } len = strlen (header); if (n_avail < y4mdec->info.size + len + 1) { /* not enough data */ GST_DEBUG ("not enough data for frame %d < %" G_GSIZE_FORMAT, n_avail, y4mdec->info.size + len + 1); break; } gst_adapter_flush (y4mdec->adapter, len + 1); buffer = gst_adapter_take_buffer (y4mdec->adapter, y4mdec->info.size); GST_BUFFER_TIMESTAMP (buffer) = gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index); GST_BUFFER_DURATION (buffer) = gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index + 1) - GST_BUFFER_TIMESTAMP (buffer); y4mdec->frame_index++; flow_ret = gst_pad_push (y4mdec->srcpad, buffer); if (flow_ret != GST_FLOW_OK) break; } GST_DEBUG ("returning %d", flow_ret); return flow_ret; }
static void stop_typefinding (GstTypeFindElement * typefind) { GstState state; gboolean push_cached_buffers; gsize avail; GstBuffer *buffer; GstClockTime pts, dts; gst_element_get_state (GST_ELEMENT (typefind), &state, NULL, 0); push_cached_buffers = (state >= GST_STATE_PAUSED && typefind->caps); GST_DEBUG_OBJECT (typefind, "stopping typefinding%s", push_cached_buffers ? " and pushing cached events and buffers" : ""); typefind->mode = MODE_NORMAL; if (push_cached_buffers) gst_type_find_element_send_cached_events (typefind); GST_OBJECT_LOCK (typefind); avail = gst_adapter_available (typefind->adapter); if (avail == 0) goto no_data; pts = gst_adapter_prev_pts (typefind->adapter, NULL); dts = gst_adapter_prev_dts (typefind->adapter, NULL); buffer = gst_adapter_take_buffer (typefind->adapter, avail); GST_BUFFER_PTS (buffer) = pts; GST_BUFFER_DTS (buffer) = dts; GST_BUFFER_OFFSET (buffer) = typefind->initial_offset; GST_OBJECT_UNLOCK (typefind); if (!push_cached_buffers) { gst_buffer_unref (buffer); } else { GstPad *peer = gst_pad_get_peer (typefind->src); /* make sure the user gets a meaningful error message in this case, * which is not a core bug or bug of any kind (as the default error * message emitted by gstpad.c otherwise would make you think) */ if (peer && GST_PAD_CHAINFUNC (peer) == NULL) { GST_DEBUG_OBJECT (typefind, "upstream only supports push mode, while " "downstream element only works in pull mode, erroring out"); GST_ELEMENT_ERROR (typefind, STREAM, FAILED, ("%s cannot work in push mode. The operation is not supported " "with this source element or protocol.", G_OBJECT_TYPE_NAME (GST_PAD_PARENT (peer))), ("Downstream pad %s:%s has no chainfunction, and the upstream " "element does not support pull mode", GST_DEBUG_PAD_NAME (peer))); typefind->mode = MODE_ERROR; /* make the chain function error out */ gst_buffer_unref (buffer); } else { gst_pad_push (typefind->src, buffer); } if (peer) gst_object_unref (peer); } return; /* ERRORS */ no_data: { GST_DEBUG_OBJECT (typefind, "we have no data to typefind"); GST_OBJECT_UNLOCK (typefind); return; } }
static GstBuffer * gst_rtp_qdm2_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf) { GstRtpQDM2Depay *rtpqdm2depay; GstBuffer *outbuf; guint16 seq; rtpqdm2depay = GST_RTP_QDM2_DEPAY (depayload); { gint payload_len; guint8 *payload; guint avail; guint pos = 0; payload_len = gst_rtp_buffer_get_payload_len (buf); if (payload_len < 3) goto bad_packet; payload = gst_rtp_buffer_get_payload (buf); seq = gst_rtp_buffer_get_seq (buf); if (G_UNLIKELY (seq != rtpqdm2depay->nextseq)) { GST_DEBUG ("GAP in sequence number, Resetting data !"); /* Flush previous data */ flush_data (rtpqdm2depay); /* And store new timestamp */ rtpqdm2depay->ptimestamp = rtpqdm2depay->timestamp; rtpqdm2depay->timestamp = GST_BUFFER_TIMESTAMP (buf); /* And that previous data will be pushed at the bottom */ } rtpqdm2depay->nextseq = seq + 1; GST_DEBUG ("Payload size %d 0x%x sequence:%d", payload_len, payload_len, seq); GST_MEMDUMP ("Incoming payload", payload, payload_len); while (pos < payload_len) { switch (payload[pos]) { case 0x80:{ GST_DEBUG ("Unrecognized 0x80 marker, skipping 12 bytes"); pos += 12; } break; case 0xff: /* HEADERS */ GST_DEBUG ("Headers"); /* Store the incoming timestamp */ rtpqdm2depay->ptimestamp = rtpqdm2depay->timestamp; rtpqdm2depay->timestamp = GST_BUFFER_TIMESTAMP (buf); /* flush the internal data if needed */ flush_data (rtpqdm2depay); if (G_UNLIKELY (!rtpqdm2depay->configured)) { guint8 *ourdata; GstBuffer *codecdata; GstCaps *caps; /* First bytes are unknown */ GST_MEMDUMP ("Header", payload + pos, 32); ourdata = payload + pos + 10; pos += 10; rtpqdm2depay->channs = GST_READ_UINT32_BE (payload + pos + 4); rtpqdm2depay->samplerate = GST_READ_UINT32_BE (payload + pos + 8); rtpqdm2depay->bitrate = GST_READ_UINT32_BE (payload + pos + 12); rtpqdm2depay->blocksize = GST_READ_UINT32_BE (payload + pos + 16); rtpqdm2depay->framesize = GST_READ_UINT32_BE (payload + pos + 20); rtpqdm2depay->packetsize = GST_READ_UINT32_BE (payload + pos + 24); /* 16 bit empty block (0x02 0x00) */ pos += 30; GST_DEBUG ("channs:%d, samplerate:%d, bitrate:%d, blocksize:%d, framesize:%d, packetsize:%d", rtpqdm2depay->channs, rtpqdm2depay->samplerate, rtpqdm2depay->bitrate, rtpqdm2depay->blocksize, rtpqdm2depay->framesize, rtpqdm2depay->packetsize); /* Caps */ codecdata = gst_buffer_new_and_alloc (48); memcpy (GST_BUFFER_DATA (codecdata), headheader, 20); memcpy (GST_BUFFER_DATA (codecdata) + 20, ourdata, 28); caps = gst_caps_new_simple ("audio/x-qdm2", "samplesize", G_TYPE_INT, 16, "rate", G_TYPE_INT, rtpqdm2depay->samplerate, "channels", G_TYPE_INT, rtpqdm2depay->channs, "codec_data", GST_TYPE_BUFFER, codecdata, NULL); gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), caps); gst_caps_unref (caps); rtpqdm2depay->configured = TRUE; } else { GST_DEBUG ("Already configured, skipping headers"); pos += 40; } break; default:{ /* Shuffled packet contents */ guint packetid = payload[pos++]; guint packettype = payload[pos++]; guint packlen = payload[pos++]; guint hsize = 2; GST_DEBUG ("Packet id:%d, type:0x%x, len:%d", packetid, packettype, packlen); /* Packets bigger than 0xff bytes have a type with the high bit set */ if (G_UNLIKELY (packettype & 0x80)) { packettype &= 0x7f; packlen <<= 8; packlen |= payload[pos++]; hsize = 3; GST_DEBUG ("Packet id:%d, type:0x%x, len:%d", packetid, packettype, packlen); } if (packettype > 0x7f) { GST_ERROR ("HOUSTON WE HAVE A PROBLEM !!!!"); } add_packet (rtpqdm2depay, packetid, packlen + hsize, payload + pos - hsize); pos += packlen; } } } GST_DEBUG ("final pos %d", pos); avail = gst_adapter_available (rtpqdm2depay->adapter); if (G_UNLIKELY (avail)) { GST_DEBUG ("Pushing out %d bytes of collected data", avail); outbuf = gst_adapter_take_buffer (rtpqdm2depay->adapter, avail); GST_BUFFER_TIMESTAMP (outbuf) = rtpqdm2depay->ptimestamp; GST_DEBUG ("Outgoing buffer timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (rtpqdm2depay->ptimestamp)); return outbuf; } } return NULL; /* ERRORS */ bad_packet: { GST_ELEMENT_WARNING (rtpqdm2depay, STREAM, DECODE, (NULL), ("Packet was too short")); return NULL; } }
static GstFlowReturn gst_multipart_demux_chain (GstPad * pad, GstBuffer * buf) { GstMultipartDemux *multipart; GstAdapter *adapter; GstClockTime timestamp; gint size = 1; GstFlowReturn res; multipart = GST_MULTIPART_DEMUX (gst_pad_get_parent (pad)); adapter = multipart->adapter; res = GST_FLOW_OK; timestamp = GST_BUFFER_TIMESTAMP (buf); if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) { gst_adapter_clear (adapter); } gst_adapter_push (adapter, buf); while (gst_adapter_available (adapter) > 0) { GstMultipartPad *srcpad; GstBuffer *outbuf; gboolean created; gint datalen; if (G_UNLIKELY (!multipart->header_completed)) { if ((size = multipart_parse_header (multipart)) < 0) { goto nodata; } else { gst_adapter_flush (adapter, size); multipart->header_completed = TRUE; } } if ((size = multipart_find_boundary (multipart, &datalen)) < 0) { goto nodata; } /* Invalidate header info */ multipart->header_completed = FALSE; multipart->content_length = -1; if (G_UNLIKELY (datalen <= 0)) { GST_DEBUG_OBJECT (multipart, "skipping empty content."); gst_adapter_flush (adapter, size - datalen); } else { srcpad = gst_multipart_find_pad_by_mime (multipart, multipart->mime_type, &created); outbuf = gst_adapter_take_buffer (adapter, datalen); gst_adapter_flush (adapter, size - datalen); gst_buffer_set_caps (outbuf, GST_PAD_CAPS (srcpad->pad)); if (created) { GstTagList *tags; /* Push new segment, first buffer has 0 timestamp */ gst_pad_push_event (srcpad->pad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0)); tags = gst_tag_list_new_full (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL); gst_pad_push_event (srcpad->pad, gst_event_new_tag (tags)); GST_BUFFER_TIMESTAMP (outbuf) = 0; } else { GST_BUFFER_TIMESTAMP (outbuf) = timestamp; } GST_DEBUG_OBJECT (multipart, "pushing buffer with timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf))); GST_DEBUG_OBJECT (multipart, "buffer has caps %" GST_PTR_FORMAT, GST_BUFFER_CAPS (outbuf)); res = gst_pad_push (srcpad->pad, outbuf); res = gst_multipart_combine_flows (multipart, srcpad, res); if (res != GST_FLOW_OK) break; } } nodata: gst_object_unref (multipart); if (G_UNLIKELY (size == MULTIPART_DATA_ERROR)) return GST_FLOW_ERROR; if (G_UNLIKELY (size == MULTIPART_DATA_EOS)) return GST_FLOW_UNEXPECTED; return res; }
/* chain function * this function does the actual processing */ static GstFlowReturn gst_ivf_parse_chain (GstPad * pad, GstBuffer * buf) { GstIvfParse *ivf = GST_IVF_PARSE (GST_OBJECT_PARENT (pad)); gboolean res; /* lazy creation of the adapter */ if (G_UNLIKELY (ivf->adapter == NULL)) { ivf->adapter = gst_adapter_new (); } GST_LOG_OBJECT (ivf, "Pushing buffer of size %u to adapter", GST_BUFFER_SIZE (buf)); gst_adapter_push (ivf->adapter, buf); /* adapter takes ownership of buf */ res = GST_FLOW_OK; switch (ivf->state) { case GST_IVF_PARSE_START: if (gst_adapter_available (ivf->adapter) >= 32) { GstCaps *caps; const guint8 *data = gst_adapter_peek (ivf->adapter, 32); guint32 magic = GST_READ_UINT32_LE (data); guint16 version = GST_READ_UINT16_LE (data + 4); guint16 header_size = GST_READ_UINT16_LE (data + 6); guint32 fourcc = GST_READ_UINT32_LE (data + 8); guint16 width = GST_READ_UINT16_LE (data + 12); guint16 height = GST_READ_UINT16_LE (data + 14); guint32 rate_num = GST_READ_UINT32_LE (data + 16); guint32 rate_den = GST_READ_UINT32_LE (data + 20); #ifndef GST_DISABLE_GST_DEBUG guint32 num_frames = GST_READ_UINT32_LE (data + 24); #endif /* last 4 bytes unused */ gst_adapter_flush (ivf->adapter, 32); if (magic != GST_MAKE_FOURCC ('D', 'K', 'I', 'F') || version != 0 || header_size != 32 || fourcc != GST_MAKE_FOURCC ('V', 'P', '8', '0')) { GST_ELEMENT_ERROR (ivf, STREAM, WRONG_TYPE, (NULL), (NULL)); return GST_FLOW_ERROR; } /* create src pad caps */ caps = gst_caps_new_simple ("video/x-vp8", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, rate_num, rate_den, NULL); GST_INFO_OBJECT (ivf, "Found stream: %" GST_PTR_FORMAT, caps); GST_LOG_OBJECT (ivf, "Stream has %d frames", num_frames); gst_pad_set_caps (ivf->srcpad, caps); gst_caps_unref (caps); /* keep framerate in instance for convenience */ ivf->rate_num = rate_num; ivf->rate_den = rate_den; gst_pad_push_event (ivf->srcpad, gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, -1, 0)); /* move along */ ivf->state = GST_IVF_PARSE_DATA; } else { GST_LOG_OBJECT (ivf, "Header data not yet available."); break; } /* fall through */ case GST_IVF_PARSE_DATA: while (gst_adapter_available (ivf->adapter) > 12) { const guint8 *data = gst_adapter_peek (ivf->adapter, 12); guint32 frame_size = GST_READ_UINT32_LE (data); guint64 frame_pts = GST_READ_UINT64_LE (data + 4); GST_LOG_OBJECT (ivf, "Read frame header: size %u, pts %" G_GUINT64_FORMAT, frame_size, frame_pts); if (gst_adapter_available (ivf->adapter) >= 12 + frame_size) { GstBuffer *frame; gst_adapter_flush (ivf->adapter, 12); frame = gst_adapter_take_buffer (ivf->adapter, frame_size); gst_buffer_set_caps (frame, GST_PAD_CAPS (ivf->srcpad)); GST_BUFFER_TIMESTAMP (frame) = gst_util_uint64_scale_int (GST_SECOND * frame_pts, ivf->rate_den, ivf->rate_num); GST_BUFFER_DURATION (frame) = gst_util_uint64_scale_int (GST_SECOND, ivf->rate_den, ivf->rate_num); GST_DEBUG_OBJECT (ivf, "Pushing frame of size %u, ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT ", off_end %" G_GUINT64_FORMAT, GST_BUFFER_SIZE (frame), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (frame)), GST_TIME_ARGS (GST_BUFFER_DURATION (frame)), GST_BUFFER_OFFSET (frame), GST_BUFFER_OFFSET_END (frame)); res = gst_pad_push (ivf->srcpad, frame); if (res != GST_FLOW_OK) break; } else { GST_LOG_OBJECT (ivf, "Frame data not yet available."); break; } } break; default: g_return_val_if_reached (GST_FLOW_ERROR); } return res; }
static GstFlowReturn gst_rtp_j2k_depay_flush_frame (GstRTPBaseDepayload * depayload) { GstRtpJ2KDepay *rtpj2kdepay; guint8 end[2]; guint avail; GstFlowReturn ret = GST_FLOW_OK; rtpj2kdepay = GST_RTP_J2K_DEPAY (depayload); /* flush pending tile */ gst_rtp_j2k_depay_flush_tile (depayload); /* last buffer take all data out of the adapter */ avail = gst_adapter_available (rtpj2kdepay->f_adapter); if (avail == 0) goto done; if (avail > 2) { GstBuffer *outbuf; /* take the last bytes of the JPEG 2000 data to see if there is an EOC * marker */ gst_adapter_copy (rtpj2kdepay->f_adapter, end, avail - 2, 2); if (end[0] != 0xff && end[1] != 0xd9) { end[0] = 0xff; end[1] = 0xd9; GST_DEBUG_OBJECT (rtpj2kdepay, "no EOC marker, adding one"); /* no EOI marker, add one */ outbuf = gst_buffer_new_and_alloc (2); gst_buffer_fill (outbuf, 0, end, 2); gst_adapter_push (rtpj2kdepay->f_adapter, outbuf); avail += 2; } GST_DEBUG_OBJECT (rtpj2kdepay, "pushing buffer of %u bytes", avail); outbuf = gst_adapter_take_buffer (rtpj2kdepay->f_adapter, avail); gst_rtp_drop_meta (GST_ELEMENT_CAST (depayload), outbuf, g_quark_from_static_string (GST_META_TAG_VIDEO_STR)); ret = gst_rtp_base_depayload_push (depayload, outbuf); } else { GST_WARNING_OBJECT (rtpj2kdepay, "empty packet"); gst_adapter_clear (rtpj2kdepay->f_adapter); } /* we accept any mh_id now */ rtpj2kdepay->last_mh_id = -1; /* reset state */ rtpj2kdepay->next_frag = 0; rtpj2kdepay->have_sync = FALSE; done: /* we can't keep headers with mh_id of 0 */ store_mheader (rtpj2kdepay, 0, NULL); return ret; }
static GstFlowReturn gst_flac_tag_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstFlacTag *tag; GstFlowReturn ret; GstMapInfo map; gsize size; ret = GST_FLOW_OK; tag = GST_FLAC_TAG (parent); gst_adapter_push (tag->adapter, buffer); /* Initial state, we don't even know if we are dealing with a flac file */ if (tag->state == GST_FLAC_TAG_STATE_INIT) { GstBuffer *id_buffer; if (gst_adapter_available (tag->adapter) < sizeof (FLAC_MAGIC)) goto cleanup; id_buffer = gst_adapter_take_buffer (tag->adapter, FLAC_MAGIC_SIZE); GST_DEBUG_OBJECT (tag, "looking for " FLAC_MAGIC " identifier"); if (gst_buffer_memcmp (id_buffer, 0, FLAC_MAGIC, FLAC_MAGIC_SIZE) == 0) { GST_DEBUG_OBJECT (tag, "pushing " FLAC_MAGIC " identifier buffer"); ret = gst_pad_push (tag->srcpad, id_buffer); if (ret != GST_FLOW_OK) goto cleanup; tag->state = GST_FLAC_TAG_STATE_METADATA_BLOCKS; } else { /* FIXME: does that work well with FLAC files containing ID3v2 tags ? */ gst_buffer_unref (id_buffer); GST_ELEMENT_ERROR (tag, STREAM, WRONG_TYPE, (NULL), (NULL)); ret = GST_FLOW_ERROR; } } /* The fLaC magic string has been skipped, try to detect the beginning * of a metadata block */ if (tag->state == GST_FLAC_TAG_STATE_METADATA_BLOCKS) { guint type; gboolean is_last; const guint8 *block_header; g_assert (tag->metadata_block_size == 0); g_assert (tag->metadata_last_block == FALSE); /* The header of a flac metadata block is 4 bytes long: * 1st bit: indicates whether this is the last metadata info block * 7 next bits: 4 if vorbis comment block * 24 next bits: size of the metadata to follow (big endian) */ if (gst_adapter_available (tag->adapter) < 4) goto cleanup; block_header = gst_adapter_map (tag->adapter, 4); is_last = ((block_header[0] & 0x80) == 0x80); type = block_header[0] & 0x7F; size = (block_header[1] << 16) | (block_header[2] << 8) | block_header[3]; gst_adapter_unmap (tag->adapter); /* The 4 bytes long header isn't included in the metadata size */ tag->metadata_block_size = size + 4; tag->metadata_last_block = is_last; GST_DEBUG_OBJECT (tag, "got metadata block: %" G_GSIZE_FORMAT " bytes, type %d, " "is vorbiscomment: %d, is last: %d", size, type, (type == 0x04), is_last); /* Metadata blocks of type 4 are vorbis comment blocks */ if (type == 0x04) { tag->state = GST_FLAC_TAG_STATE_VC_METADATA_BLOCK; } else { tag->state = GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK; } } /* Reads a metadata block */ if ((tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) || (tag->state == GST_FLAC_TAG_STATE_VC_METADATA_BLOCK)) { GstBuffer *metadata_buffer; if (gst_adapter_available (tag->adapter) < tag->metadata_block_size) goto cleanup; metadata_buffer = gst_adapter_take_buffer (tag->adapter, tag->metadata_block_size); /* clear the is-last flag, as the last metadata block will * be the vorbis comment block which we will build ourselves. */ gst_buffer_map (metadata_buffer, &map, GST_MAP_READWRITE); map.data[0] &= (~0x80); gst_buffer_unmap (metadata_buffer, &map); if (tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) { GST_DEBUG_OBJECT (tag, "pushing metadata block buffer"); ret = gst_pad_push (tag->srcpad, metadata_buffer); if (ret != GST_FLOW_OK) goto cleanup; } else { tag->vorbiscomment = metadata_buffer; } tag->metadata_block_size = 0; tag->state = GST_FLAC_TAG_STATE_METADATA_NEXT_BLOCK; } /* This state is mainly used to be able to stop as soon as we read * a vorbiscomment block from the flac file if we are in an only output * tags mode */ if (tag->state == GST_FLAC_TAG_STATE_METADATA_NEXT_BLOCK) { /* Check if in the previous iteration we read a vorbis comment metadata * block, and stop now if the user only wants to read tags */ if (tag->vorbiscomment != NULL) { guint8 id_data[4]; /* We found some tags, try to parse them and notify the other elements * that we encountered some tags */ GST_DEBUG_OBJECT (tag, "emitting vorbiscomment tags"); gst_buffer_extract (tag->vorbiscomment, 0, id_data, 4); tag->tags = gst_tag_list_from_vorbiscomment_buffer (tag->vorbiscomment, id_data, 4, NULL); if (tag->tags != NULL) { gst_pad_push_event (tag->srcpad, gst_event_new_tag (gst_tag_list_copy (tag->tags))); } gst_buffer_unref (tag->vorbiscomment); tag->vorbiscomment = NULL; } /* Skip to next state */ if (tag->metadata_last_block == FALSE) { tag->state = GST_FLAC_TAG_STATE_METADATA_BLOCKS; } else { tag->state = GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT; } } /* Creates a vorbis comment block from the metadata which was set * on the gstreamer element, and add it to the flac stream */ if (tag->state == GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT) { GstBuffer *buffer; const GstTagList *user_tags; GstTagList *merged_tags; /* merge the tag lists */ user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (tag)); if (user_tags != NULL) { merged_tags = gst_tag_list_merge (user_tags, tag->tags, gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (tag))); } else { merged_tags = gst_tag_list_copy (tag->tags); } if (merged_tags == NULL) { /* If we get a NULL list of tags, we must generate a padding block * which is marked as the last metadata block, otherwise we'll * end up with a corrupted flac file. */ GST_WARNING_OBJECT (tag, "No tags found"); buffer = gst_buffer_new_and_alloc (12); if (buffer == NULL) goto no_buffer; gst_buffer_map (buffer, &map, GST_MAP_WRITE); memset (map.data, 0, map.size); map.data[0] = 0x81; /* 0x80 = Last metadata block, * 0x01 = padding block */ gst_buffer_unmap (buffer, &map); } else { guchar header[4]; guint8 fbit[1]; memset (header, 0, sizeof (header)); header[0] = 0x84; /* 0x80 = Last metadata block, * 0x04 = vorbiscomment block */ buffer = gst_tag_list_to_vorbiscomment_buffer (merged_tags, header, sizeof (header), NULL); GST_DEBUG_OBJECT (tag, "Writing tags %" GST_PTR_FORMAT, merged_tags); gst_tag_list_free (merged_tags); if (buffer == NULL) goto no_comment; size = gst_buffer_get_size (buffer); if ((size < 4) || ((size - 4) > 0xFFFFFF)) goto comment_too_long; fbit[0] = 1; /* Get rid of the framing bit at the end of the vorbiscomment buffer * if it exists since libFLAC seems to lose sync because of this * bit in gstflacdec */ if (gst_buffer_memcmp (buffer, size - 1, fbit, 1) == 0) { buffer = gst_buffer_make_writable (buffer); gst_buffer_resize (buffer, 0, size - 1); } } /* The 4 byte metadata block header isn't accounted for in the total * size of the metadata block */ gst_buffer_map (buffer, &map, GST_MAP_WRITE); map.data[1] = (((map.size - 4) & 0xFF0000) >> 16); map.data[2] = (((map.size - 4) & 0x00FF00) >> 8); map.data[3] = ((map.size - 4) & 0x0000FF); gst_buffer_unmap (buffer, &map); GST_DEBUG_OBJECT (tag, "pushing %" G_GSIZE_FORMAT " byte vorbiscomment " "buffer", map.size); ret = gst_pad_push (tag->srcpad, buffer); if (ret != GST_FLOW_OK) { goto cleanup; } tag->state = GST_FLAC_TAG_STATE_AUDIO_DATA; }
static GstFlowReturn gst_wavpack_parse_chain (GstPad * pad, GstBuffer * buf) { GstWavpackParse *wvparse = GST_WAVPACK_PARSE (GST_PAD_PARENT (pad)); GstFlowReturn ret = GST_FLOW_OK; WavpackHeader wph; const guint8 *tmp_buf; if (!wvparse->adapter) { wvparse->adapter = gst_adapter_new (); } if (GST_BUFFER_IS_DISCONT (buf)) { gst_adapter_clear (wvparse->adapter); wvparse->discont = TRUE; } gst_adapter_push (wvparse->adapter, buf); if (gst_adapter_available (wvparse->adapter) < sizeof (WavpackHeader)) return ret; if (!gst_wavpack_parse_resync_adapter (wvparse->adapter)) return ret; tmp_buf = gst_adapter_peek (wvparse->adapter, sizeof (WavpackHeader)); gst_wavpack_read_header (&wph, (guint8 *) tmp_buf); while (gst_adapter_available (wvparse->adapter) >= wph.ckSize + 4 * 1 + 4) { GstBuffer *outbuf = gst_adapter_take_buffer (wvparse->adapter, wph.ckSize + 4 * 1 + 4); if (!outbuf) return GST_FLOW_ERROR; if (wvparse->srcpad == NULL) { if (!gst_wavpack_parse_create_src_pad (wvparse, outbuf, &wph)) { GST_ERROR_OBJECT (wvparse, "Failed to create src pad"); ret = GST_FLOW_ERROR; break; } } ret = gst_wavpack_parse_push_buffer (wvparse, outbuf, &wph); if (ret != GST_FLOW_OK) break; if (gst_adapter_available (wvparse->adapter) >= sizeof (WavpackHeader)) { tmp_buf = gst_adapter_peek (wvparse->adapter, sizeof (WavpackHeader)); if (!gst_wavpack_parse_resync_adapter (wvparse->adapter)) break; gst_wavpack_read_header (&wph, (guint8 *) tmp_buf); } } return ret; }
static gboolean gme_setup (GstGmeDec * gme) { gme_info_t *info; gme_err_t gme_err = NULL; GstTagList *taglist; guint64 total_duration; guint64 fade_time; GstBuffer *buffer; GstSegment seg; GstMapInfo map; if (!gst_adapter_available (gme->adapter) || !gme_negotiate (gme)) { return FALSE; } buffer = gst_adapter_take_buffer (gme->adapter, gst_adapter_available (gme->adapter)); gst_buffer_map (buffer, &map, GST_MAP_READ); gme_err = gme_open_data (map.data, map.size, &gme->player, 32000); gst_buffer_unmap (buffer, &map); gst_buffer_unref (buffer); if (gme_err || !gme->player) { if (gme->player) { gme_delete (gme->player); gme->player = NULL; } GST_ELEMENT_ERROR (gme, STREAM, DEMUX, (NULL), ("%s", gme_err)); return FALSE; } gme_err = gme_track_info (gme->player, &info, 0); taglist = gst_tag_list_new_empty (); if (info->song && *info->song) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_TITLE, info->song, NULL); if (info->author && *info->author) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ARTIST, info->author, NULL); /* Prefer the name of the official soundtrack over the name of the game (since this is * how track numbers are derived) */ if (info->game && *info->game) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ALBUM, info->game, NULL); if (info->comment && *info->comment) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_COMMENT, info->comment, NULL); if (info->dumper && *info->dumper) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_CONTACT, info->dumper, NULL); if (info->copyright && *info->copyright) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_COPYRIGHT, info->copyright, NULL); if (info->system && *info->system) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER, info->system, NULL); gme->total_duration = total_duration = gst_util_uint64_scale_int (info->play_length + (info->loop_length > 0 ? 8000 : 0), GST_MSECOND, 1); fade_time = info->loop_length > 0 ? info->play_length : 0; gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_DURATION, total_duration, NULL); gst_pad_push_event (gme->srcpad, gst_event_new_tag (taglist)); g_free (info); #ifdef HAVE_LIBGME_ACCURACY /* TODO: Is it worth it to make this optional? */ gme_enable_accuracy (gme->player, 1); #endif gme_start_track (gme->player, 0); if (fade_time) gme_set_fade (gme->player, fade_time); gst_segment_init (&seg, GST_FORMAT_TIME); gst_pad_push_event (gme->srcpad, gst_event_new_segment (&seg)); gst_pad_start_task (gme->srcpad, (GstTaskFunction) gst_gme_play, gme->srcpad, NULL); gme->initialized = TRUE; gme->seeking = FALSE; gme->seekpoint = 0; return gme->initialized; }
static GstBuffer * gst_rtp_vp8_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) { GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (depay); GstBuffer *payload; guint8 *data; guint hdrsize; guint size; if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (rtp->buffer))) { GST_LOG_OBJECT (self, "Discontinuity, flushing adapter"); gst_adapter_clear (self->adapter); self->started = FALSE; } size = gst_rtp_buffer_get_payload_len (rtp); /* At least one header and one vp8 byte */ if (G_UNLIKELY (size < 2)) goto too_small; data = gst_rtp_buffer_get_payload (rtp); if (G_UNLIKELY (!self->started)) { /* Check if this is the start of a VP8 frame, otherwise bail */ /* S=1 and PartID= 0 */ if ((data[0] & 0x17) != 0x10) goto done; self->started = TRUE; } hdrsize = 1; /* Check X optional header */ if ((data[0] & 0x80) != 0) { hdrsize++; /* Check I optional header */ if ((data[1] & 0x80) != 0) { if (G_UNLIKELY (size < 3)) goto too_small; hdrsize++; /* Check for 16 bits PictureID */ if ((data[2] & 0x80) != 0) hdrsize++; } /* Check L optional header */ if ((data[1] & 0x40) != 0) hdrsize++; /* Check T or K optional headers */ if ((data[1] & 0x20) != 0 || (data[1] & 0x10) != 0) hdrsize++; } GST_DEBUG_OBJECT (depay, "hdrsize %u, size %u", hdrsize, size); if (G_UNLIKELY (hdrsize >= size)) goto too_small; payload = gst_rtp_buffer_get_payload_subbuffer (rtp, hdrsize, -1); gst_adapter_push (self->adapter, payload); /* Marker indicates that it was the last rtp packet for this frame */ if (gst_rtp_buffer_get_marker (rtp)) { GstBuffer *out; guint8 header[10]; gst_adapter_copy (self->adapter, &header, 0, 10); out = gst_adapter_take_buffer (self->adapter, gst_adapter_available (self->adapter)); self->started = FALSE; /* mark keyframes */ out = gst_buffer_make_writable (out); if ((header[0] & 0x01)) { GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DELTA_UNIT); if (!self->caps_sent) { gst_buffer_unref (out); out = NULL; GST_INFO_OBJECT (self, "Dropping inter-frame before intra-frame"); gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay), gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, TRUE, 0)); } } else { guint profile, width, height; GST_BUFFER_FLAG_UNSET (out, GST_BUFFER_FLAG_DELTA_UNIT); profile = (header[0] & 0x0e) >> 1; width = GST_READ_UINT16_LE (header + 6) & 0x3fff; height = GST_READ_UINT16_LE (header + 8) & 0x3fff; if (G_UNLIKELY (self->last_width != width || self->last_height != height || self->last_profile != profile)) { gchar profile_str[3]; GstCaps *srccaps; snprintf (profile_str, 3, "%u", profile); srccaps = gst_caps_new_simple ("video/x-vp8", "framerate", GST_TYPE_FRACTION, 0, 1, "height", G_TYPE_INT, height, "width", G_TYPE_INT, width, "profile", G_TYPE_STRING, profile_str, NULL); gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), srccaps); gst_caps_unref (srccaps); self->caps_sent = TRUE; self->last_width = width; self->last_height = height; self->last_profile = profile; } } return out; } done: return NULL; too_small: GST_LOG_OBJECT (self, "Invalid rtp packet (too small), ignoring"); gst_adapter_clear (self->adapter); self->started = FALSE; goto done; }
static GstFlowReturn gst_vdp_mpeg_dec_decode (GstVdpMpegDec * mpeg_dec, GstClockTime timestamp, gint64 size) { VdpPictureInfoMPEG1Or2 *info; GstBuffer *buffer; GstBuffer *outbuf; VdpVideoSurface surface; GstVdpDevice *device; VdpBitstreamBuffer vbit[1]; VdpStatus status; info = &mpeg_dec->vdp_info; if (info->picture_coding_type != B_FRAME) { if (info->backward_reference != VDP_INVALID_HANDLE) { gst_buffer_ref (mpeg_dec->b_buffer); gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec, GST_VDP_VIDEO_BUFFER (mpeg_dec->b_buffer)); } if (info->forward_reference != VDP_INVALID_HANDLE) { gst_buffer_unref (mpeg_dec->f_buffer); info->forward_reference = VDP_INVALID_HANDLE; } info->forward_reference = info->backward_reference; mpeg_dec->f_buffer = mpeg_dec->b_buffer; info->backward_reference = VDP_INVALID_HANDLE; } if (gst_vdp_mpeg_dec_alloc_buffer (mpeg_dec, &outbuf) != GST_FLOW_OK) { gst_adapter_clear (mpeg_dec->adapter); return GST_FLOW_ERROR; } device = GST_VDP_VIDEO_BUFFER (outbuf)->device; if (info->forward_reference != VDP_INVALID_HANDLE && info->picture_coding_type != I_FRAME) gst_vdp_video_buffer_add_reference (GST_VDP_VIDEO_BUFFER (outbuf), GST_VDP_VIDEO_BUFFER (mpeg_dec->f_buffer)); if (info->backward_reference != VDP_INVALID_HANDLE && info->picture_coding_type == B_FRAME) gst_vdp_video_buffer_add_reference (GST_VDP_VIDEO_BUFFER (outbuf), GST_VDP_VIDEO_BUFFER (mpeg_dec->b_buffer)); GST_BUFFER_TIMESTAMP (outbuf) = timestamp; GST_BUFFER_DURATION (outbuf) = mpeg_dec->duration; GST_BUFFER_OFFSET (outbuf) = mpeg_dec->frame_nr; GST_BUFFER_SIZE (outbuf) = size; if (info->picture_coding_type == I_FRAME) GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); else GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); if (info->top_field_first) GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_TFF); else GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_TFF); buffer = gst_adapter_take_buffer (mpeg_dec->adapter, gst_adapter_available (mpeg_dec->adapter)); surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface; vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION; vbit[0].bitstream = GST_BUFFER_DATA (buffer); vbit[0].bitstream_bytes = GST_BUFFER_SIZE (buffer); status = device->vdp_decoder_render (mpeg_dec->decoder, surface, (VdpPictureInfo *) info, 1, vbit); gst_buffer_unref (buffer); info->slice_count = 0; if (status != VDP_STATUS_OK) { GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ, ("Could not decode"), ("Error returned from vdpau was: %s", device->vdp_get_error_string (status))); gst_buffer_unref (GST_BUFFER (outbuf)); return GST_FLOW_ERROR; } if (info->picture_coding_type == B_FRAME) { gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec, GST_VDP_VIDEO_BUFFER (outbuf)); } else { info->backward_reference = surface; mpeg_dec->b_buffer = GST_BUFFER (outbuf); } return GST_FLOW_OK; }
static GstFlowReturn gst_y4m_dec_chain (GstPad * pad, GstBuffer * buffer) { GstY4mDec *y4mdec; int n_avail; GstFlowReturn flow_ret = GST_FLOW_OK; #define MAX_HEADER_LENGTH 80 char header[MAX_HEADER_LENGTH]; int i; int len; y4mdec = GST_Y4M_DEC (gst_pad_get_parent (pad)); GST_DEBUG_OBJECT (y4mdec, "chain"); if (GST_BUFFER_IS_DISCONT (buffer)) { GST_DEBUG ("got discont"); gst_adapter_clear (y4mdec->adapter); } gst_adapter_push (y4mdec->adapter, buffer); n_avail = gst_adapter_available (y4mdec->adapter); if (!y4mdec->have_header) { gboolean ret; GstCaps *caps; if (n_avail < MAX_HEADER_LENGTH) return GST_FLOW_OK; gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH); header[MAX_HEADER_LENGTH - 1] = 0; for (i = 0; i < MAX_HEADER_LENGTH; i++) { if (header[i] == 0x0a) header[i] = 0; } ret = gst_y4m_dec_parse_header (y4mdec, header); if (!ret) { GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE, ("Failed to parse YUV4MPEG header"), (NULL)); return GST_FLOW_ERROR; } y4mdec->header_size = strlen (header) + 1; gst_adapter_flush (y4mdec->adapter, y4mdec->header_size); caps = gst_video_format_new_caps_interlaced (y4mdec->format, y4mdec->width, y4mdec->height, y4mdec->fps_n, y4mdec->fps_d, y4mdec->par_n, y4mdec->par_d, y4mdec->interlaced); ret = gst_pad_set_caps (y4mdec->srcpad, caps); gst_caps_unref (caps); if (!ret) { GST_DEBUG_OBJECT (y4mdec, "Couldn't set caps on src pad"); return GST_FLOW_ERROR; } y4mdec->have_header = TRUE; } if (y4mdec->have_new_segment) { GstEvent *event; GstClockTime start = gst_y4m_dec_bytes_to_timestamp (y4mdec, y4mdec->segment_start); GstClockTime stop = gst_y4m_dec_bytes_to_timestamp (y4mdec, y4mdec->segment_stop); GstClockTime position = gst_y4m_dec_bytes_to_timestamp (y4mdec, y4mdec->segment_position); event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, start, stop, position); gst_pad_push_event (y4mdec->srcpad, event); //gst_event_unref (event); y4mdec->have_new_segment = FALSE; y4mdec->frame_index = gst_y4m_dec_bytes_to_frames (y4mdec, y4mdec->segment_position); GST_DEBUG ("new frame_index %d", y4mdec->frame_index); } while (1) { n_avail = gst_adapter_available (y4mdec->adapter); if (n_avail < MAX_HEADER_LENGTH) break; gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH); header[MAX_HEADER_LENGTH - 1] = 0; for (i = 0; i < MAX_HEADER_LENGTH; i++) { if (header[i] == 0x0a) header[i] = 0; } if (memcmp (header, "FRAME", 5) != 0) { GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE, ("Failed to parse YUV4MPEG frame"), (NULL)); flow_ret = GST_FLOW_ERROR; break; } len = strlen (header); if (n_avail < y4mdec->frame_size + len + 1) { /* not enough data */ GST_DEBUG ("not enough data for frame %d < %d", n_avail, y4mdec->frame_size + len + 1); break; } gst_adapter_flush (y4mdec->adapter, len + 1); buffer = gst_adapter_take_buffer (y4mdec->adapter, y4mdec->frame_size); GST_BUFFER_CAPS (buffer) = gst_caps_ref (GST_PAD_CAPS (y4mdec->srcpad)); GST_BUFFER_TIMESTAMP (buffer) = gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index); GST_BUFFER_DURATION (buffer) = gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index + 1) - GST_BUFFER_TIMESTAMP (buffer); if (y4mdec->interlaced && y4mdec->tff) { GST_BUFFER_FLAG_SET (buffer, GST_VIDEO_BUFFER_TFF); } y4mdec->frame_index++; flow_ret = gst_pad_push (y4mdec->srcpad, buffer); if (flow_ret != GST_FLOW_OK) break; } gst_object_unref (y4mdec); GST_DEBUG ("returning %d", flow_ret); return flow_ret; }
/** * gst_rtp_base_audio_payload_flush: * @baseaudiopayload: a #GstRTPBasePayload * @payload_len: length of payload * @timestamp: a #GstClockTime * * Create an RTP buffer and store @payload_len bytes of the adapter as the * payload. Set the timestamp on the new buffer to @timestamp before pushing * the buffer downstream. * * If @payload_len is -1, all pending bytes will be flushed. If @timestamp is * -1, the timestamp will be calculated automatically. * * Returns: a #GstFlowReturn */ GstFlowReturn gst_rtp_base_audio_payload_flush (GstRTPBaseAudioPayload * baseaudiopayload, guint payload_len, GstClockTime timestamp) { GstRTPBasePayload *basepayload; GstRTPBaseAudioPayloadPrivate *priv; GstBuffer *outbuf; GstFlowReturn ret; GstAdapter *adapter; guint64 distance; priv = baseaudiopayload->priv; adapter = priv->adapter; basepayload = GST_RTP_BASE_PAYLOAD (baseaudiopayload); if (payload_len == -1) payload_len = gst_adapter_available (adapter); /* nothing to do, just return */ if (payload_len == 0) return GST_FLOW_OK; if (timestamp == -1) { /* calculate the timestamp */ timestamp = gst_adapter_prev_pts (adapter, &distance); GST_LOG_OBJECT (baseaudiopayload, "last timestamp %" GST_TIME_FORMAT ", distance %" G_GUINT64_FORMAT, GST_TIME_ARGS (timestamp), distance); if (GST_CLOCK_TIME_IS_VALID (timestamp) && distance > 0) { /* convert the number of bytes since the last timestamp to time and add to * the last seen timestamp */ timestamp += priv->bytes_to_time (baseaudiopayload, distance); } } GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT, payload_len, GST_TIME_ARGS (timestamp)); if (priv->buffer_list && gst_adapter_available_fast (adapter) >= payload_len) { GstBuffer *buffer; /* we can quickly take a buffer out of the adapter without having to copy * anything. */ buffer = gst_adapter_take_buffer (adapter, payload_len); ret = gst_rtp_base_audio_payload_push_buffer (baseaudiopayload, buffer, timestamp); } else { GstBuffer *paybuf; CopyMetaData data; /* create buffer to hold the payload */ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0); paybuf = gst_adapter_take_buffer_fast (adapter, payload_len); data.pay = baseaudiopayload; data.outbuf = outbuf; gst_buffer_foreach_meta (paybuf, foreach_metadata, &data); outbuf = gst_buffer_append (outbuf, paybuf); /* set metadata */ gst_rtp_base_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len, timestamp); ret = gst_rtp_base_payload_push (basepayload, outbuf); } return ret; }
static GstBuffer * gst_rtp_j2k_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf) { GstRtpJ2KDepay *rtpj2kdepay; GstBuffer *outbuf; guint8 *payload; guint frag_offset; rtpj2kdepay = GST_RTP_J2K_DEPAY (depayload); /* flush everything on discont for now */ if (GST_BUFFER_IS_DISCONT (buf)) { GST_DEBUG_OBJECT (rtpj2kdepay, "DISCONT, flushing data"); gst_adapter_clear (rtpj2kdepay->adapter); rtpj2kdepay->need_header = TRUE; } if (gst_rtp_buffer_get_payload_len (buf) < 8) goto empty_packet; payload = gst_rtp_buffer_get_payload (buf); /* * 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |tp |MHF|mh_id|T| priority | tile number | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |reserved | fragment offset | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ frag_offset = (payload[5] << 16) | (payload[6] << 8) | payload[7]; GST_DEBUG_OBJECT (rtpj2kdepay, "frag %u", frag_offset); if (rtpj2kdepay->need_header) { if (frag_offset != 0) goto waiting_header; rtpj2kdepay->need_header = FALSE; } /* take JPEG 2000 data, push in the adapter */ outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 8, -1); gst_adapter_push (rtpj2kdepay->adapter, outbuf); outbuf = NULL; if (gst_rtp_buffer_get_marker (buf)) { guint avail; guint8 end[2]; guint8 *data; /* last buffer take all data out of the adapter */ avail = gst_adapter_available (rtpj2kdepay->adapter); GST_DEBUG_OBJECT (rtpj2kdepay, "marker set, last buffer"); /* take the last bytes of the JPEG 2000 data to see if there is an EOC * marker */ gst_adapter_copy (rtpj2kdepay->adapter, end, avail - 2, 2); if (end[0] != 0xff && end[1] != 0xd9) { GST_DEBUG_OBJECT (rtpj2kdepay, "no EOC marker, adding one"); /* no EOI marker, add one */ outbuf = gst_buffer_new_and_alloc (2); data = GST_BUFFER_DATA (outbuf); data[0] = 0xff; data[1] = 0xd9; gst_adapter_push (rtpj2kdepay->adapter, outbuf); avail += 2; } outbuf = gst_adapter_take_buffer (rtpj2kdepay->adapter, avail); GST_DEBUG_OBJECT (rtpj2kdepay, "returning %u bytes", avail); } return outbuf; /* ERRORS */ empty_packet: { GST_ELEMENT_WARNING (rtpj2kdepay, STREAM, DECODE, ("Empty Payload."), (NULL)); return NULL; } waiting_header: { GST_DEBUG_OBJECT (rtpj2kdepay, "we are waiting for a header"); return NULL; } }
static GstFlowReturn gst_jpeg_parse_push_buffer (GstJpegParse * parse, guint len) { GstBuffer *outbuf; GstFlowReturn ret = GST_FLOW_OK; gboolean header_ok; /* reset the offset (only when we flushed) */ parse->priv->last_offset = 0; parse->priv->last_entropy_len = 0; outbuf = gst_adapter_take_buffer (parse->priv->adapter, len); if (outbuf == NULL) { GST_ELEMENT_ERROR (parse, STREAM, DECODE, ("Failed to take buffer of size %u", len), ("Failed to take buffer of size %u", len)); return GST_FLOW_ERROR; } header_ok = gst_jpeg_parse_read_header (parse, outbuf); if (parse->priv->new_segment == TRUE || parse->priv->width != parse->priv->caps_width || parse->priv->height != parse->priv->caps_height || parse->priv->framerate_numerator != parse->priv->caps_framerate_numerator || parse->priv->framerate_denominator != parse->priv->caps_framerate_denominator) { if (!gst_jpeg_parse_set_new_caps (parse, header_ok)) { GST_ELEMENT_ERROR (parse, CORE, NEGOTIATION, ("Can't set caps to the src pad"), ("Can't set caps to the src pad")); return GST_FLOW_ERROR; } if (parse->priv->tags) { GST_DEBUG_OBJECT (parse, "Pushing tags: %" GST_PTR_FORMAT, parse->priv->tags); gst_element_found_tags_for_pad (GST_ELEMENT_CAST (parse), parse->priv->srcpad, parse->priv->tags); parse->priv->tags = NULL; } parse->priv->new_segment = FALSE; parse->priv->caps_width = parse->priv->width; parse->priv->caps_height = parse->priv->height; parse->priv->caps_framerate_numerator = parse->priv->framerate_numerator; parse->priv->caps_framerate_denominator = parse->priv->framerate_denominator; } GST_BUFFER_TIMESTAMP (outbuf) = parse->priv->next_ts; if (parse->priv->has_fps && GST_CLOCK_TIME_IS_VALID (parse->priv->next_ts) && GST_CLOCK_TIME_IS_VALID (parse->priv->duration)) { parse->priv->next_ts += parse->priv->duration; } else { parse->priv->duration = GST_CLOCK_TIME_NONE; parse->priv->next_ts = GST_CLOCK_TIME_NONE; } GST_BUFFER_DURATION (outbuf) = parse->priv->duration; gst_buffer_set_caps (outbuf, GST_PAD_CAPS (parse->priv->srcpad)); GST_LOG_OBJECT (parse, "pushing buffer (ts=%" GST_TIME_FORMAT ", len=%u)", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), len); ret = gst_pad_push (parse->priv->srcpad, outbuf); return ret; }
static GstBuffer * gst_rtp_sv3v_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf) { GstRtpSV3VDepay *rtpsv3vdepay; GstBuffer *outbuf; guint16 seq; rtpsv3vdepay = GST_RTP_SV3V_DEPAY (depayload); /* flush on sequence number gaps */ seq = gst_rtp_buffer_get_seq (buf); if (seq != rtpsv3vdepay->nextseq) { gst_adapter_clear (rtpsv3vdepay->adapter); } rtpsv3vdepay->nextseq = seq + 1; { gint payload_len; guint8 *payload; gboolean M; gboolean C, S, E; payload_len = gst_rtp_buffer_get_payload_len (buf); if (payload_len < 3) goto bad_packet; payload = gst_rtp_buffer_get_payload (buf); M = gst_rtp_buffer_get_marker (buf); /* This is all a guess: * 1 1 1 1 1 1 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |0|C|S|E|0|0|0|0|0|0|0|0|0|0|0|0| * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * * C: config, packet contains config info * S: start, packet contains start of frame * E: end, packet contains end of frame */ /* this seems to indicate a packet with a config string sent before each * keyframe */ C = (payload[0] & 0x40) == 0x40; /* redundant with the RTP marker bit */ S = (payload[0] & 0x20) == 0x20; E = (payload[0] & 0x10) == 0x10; if (C) { GstCaps *caps; GstBuffer *codec_data; GValue value = { 0 }; /* if we already have caps, we don't need to do anything. FIXME, check if * something changed. */ if (GST_PAD_CAPS (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload))) return NULL; /* No idea... These are the two examples I found.. */ if (payload[2] == 0x1d) { rtpsv3vdepay->width = 160; rtpsv3vdepay->height = 128; } else if (payload[2] == 0xdd) { rtpsv3vdepay->width = 320; rtpsv3vdepay->height = 240; } /* we need a dummy empty codec data */ g_value_init (&value, GST_TYPE_BUFFER); gst_value_deserialize (&value, ""); codec_data = gst_value_get_buffer (&value); caps = gst_caps_new_simple ("video/x-svq", "svqversion", G_TYPE_INT, 3, "width", G_TYPE_INT, rtpsv3vdepay->width, "height", G_TYPE_INT, rtpsv3vdepay->height, "codec_data", GST_TYPE_BUFFER, codec_data, NULL); gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), caps); gst_caps_unref (caps); g_value_unset (&value); return NULL; } /* store data in adapter, stip off 2 bytes header */ outbuf = gst_rtp_buffer_get_payload_subbuffer (buf, 2, -1); gst_adapter_push (rtpsv3vdepay->adapter, outbuf); if (M) { /* frame is completed: push contents of adapter */ guint avail; avail = gst_adapter_available (rtpsv3vdepay->adapter); outbuf = gst_adapter_take_buffer (rtpsv3vdepay->adapter, avail); return outbuf; } } return NULL; /* ERRORS */ bad_packet: { GST_ELEMENT_WARNING (rtpsv3vdepay, STREAM, DECODE, (NULL), ("Packet was too short")); return NULL; } }
static GstBuffer * gst_rtp_vp9_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) { GstRtpVP9Depay *self = GST_RTP_VP9_DEPAY (depay); GstBuffer *payload; guint8 *data; guint hdrsize = 1; guint size; gint spatial_layer = 0; gboolean i_bit, p_bit, l_bit, f_bit, b_bit, e_bit, v_bit; if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (rtp->buffer))) { GST_LOG_OBJECT (self, "Discontinuity, flushing adapter"); gst_adapter_clear (self->adapter); self->started = FALSE; } size = gst_rtp_buffer_get_payload_len (rtp); /* Mandatory with at least one header and one vp9 byte */ if (G_UNLIKELY (size < hdrsize + 1)) goto too_small; data = gst_rtp_buffer_get_payload (rtp); i_bit = (data[0] & 0x80) != 0; p_bit = (data[0] & 0x40) != 0; l_bit = (data[0] & 0x20) != 0; f_bit = (data[0] & 0x10) != 0; b_bit = (data[0] & 0x08) != 0; e_bit = (data[0] & 0x04) != 0; v_bit = (data[0] & 0x02) != 0; if (G_UNLIKELY (!self->started)) { /* Check if this is the start of a VP9 layer frame, otherwise bail */ if (!b_bit) goto done; self->started = TRUE; } GST_TRACE_OBJECT (self, "IPLFBEV : %d%d%d%d%d%d%d", i_bit, p_bit, l_bit, f_bit, b_bit, e_bit, v_bit); /* Check I optional header Picture ID */ if (i_bit) { hdrsize++; if (G_UNLIKELY (size < hdrsize + 1)) goto too_small; /* Check M for 15 bits PictureID */ if ((data[1] & 0x80) != 0) { hdrsize++; if (G_UNLIKELY (size < hdrsize + 1)) goto too_small; } } /* flexible-mode not implemented */ g_assert (!f_bit); /* Check L optional header layer indices */ if (l_bit) { hdrsize++; /* Check TL0PICIDX temporal layer zero index (non-flexible mode) */ if (!f_bit) hdrsize++; } /* Check V optional Scalability Structure */ if (v_bit) { guint n_s, y_bit, g_bit; guint8 *ss = &data[hdrsize]; guint sssize = 1; if (G_UNLIKELY (size < hdrsize + sssize + 1)) goto too_small; n_s = (ss[0] & 0xe0) >> 5; y_bit = (ss[0] & 0x10) != 0; g_bit = (ss[0] & 0x08) != 0; GST_TRACE_OBJECT (self, "SS header: N_S=%u, Y=%u, G=%u", n_s, y_bit, g_bit); sssize += y_bit ? (n_s + 1) * 4 : 0; if (G_UNLIKELY (size < hdrsize + sssize + 1)) goto too_small; if (y_bit) { guint i; for (i = 0; i <= n_s; i++) { /* For now, simply use the last layer specified for width and height */ self->ss_width = ss[1 + i * 4] * 256 + ss[2 + i * 4]; self->ss_height = ss[3 + i * 4] * 256 + ss[4 + i * 4]; GST_TRACE_OBJECT (self, "N_S[%d]: WIDTH=%u, HEIGHT=%u", i, self->ss_width, self->ss_height); } } if (g_bit) { guint i, j; guint n_g = ss[sssize]; sssize++; if (G_UNLIKELY (size < hdrsize + sssize + 1)) goto too_small; for (i = 0; i < n_g; i++) { guint t = (ss[sssize] & 0xe0) >> 5; guint u = (ss[sssize] & 0x10) >> 4; guint r = (ss[sssize] & 0x0c) >> 2; GST_TRACE_OBJECT (self, "N_G[%u]: 0x%02x -> T=%u, U=%u, R=%u", i, ss[sssize], t, u, r); for (j = 0; j < r; j++) GST_TRACE_OBJECT (self, " R[%u]: P_DIFF=%u", j, ss[sssize + 1 + j]); sssize += 1 + r; if (G_UNLIKELY (size < hdrsize + sssize + 1)) goto too_small; } } hdrsize += sssize; } GST_DEBUG_OBJECT (depay, "hdrsize %u, size %u", hdrsize, size); if (G_UNLIKELY (hdrsize >= size)) goto too_small; payload = gst_rtp_buffer_get_payload_subbuffer (rtp, hdrsize, -1); { GstMapInfo map; gst_buffer_map (payload, &map, GST_MAP_READ); GST_MEMDUMP_OBJECT (self, "vp9 payload", map.data, 16); gst_buffer_unmap (payload, &map); } gst_adapter_push (self->adapter, payload); /* Marker indicates that it was the last rtp packet for this frame */ if (gst_rtp_buffer_get_marker (rtp)) { GstBuffer *out; gboolean key_frame_first_layer = !p_bit && spatial_layer == 0; if (gst_adapter_available (self->adapter) < 10) goto too_small; out = gst_adapter_take_buffer (self->adapter, gst_adapter_available (self->adapter)); self->started = FALSE; /* mark keyframes */ out = gst_buffer_make_writable (out); /* Filter away all metas that are not sensible to copy */ gst_rtp_drop_meta (GST_ELEMENT_CAST (self), out, g_quark_from_static_string (GST_META_TAG_VIDEO_STR)); if (!key_frame_first_layer) { GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DELTA_UNIT); if (!self->caps_sent) { gst_buffer_unref (out); out = NULL; GST_INFO_OBJECT (self, "Dropping inter-frame before intra-frame"); gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay), gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, TRUE, 0)); } } else { GST_BUFFER_FLAG_UNSET (out, GST_BUFFER_FLAG_DELTA_UNIT); if (self->last_width != self->ss_width || self->last_height != self->ss_height) { GstCaps *srccaps; /* Width and height are optional in the RTP header. Consider to parse * the frame header in addition if missing from RTP header */ if (self->ss_width != 0 && self->ss_height != 0) { srccaps = gst_caps_new_simple ("video/x-vp9", "framerate", GST_TYPE_FRACTION, 0, 1, "width", G_TYPE_INT, self->ss_width, "height", G_TYPE_INT, self->ss_height, NULL); } else { srccaps = gst_caps_new_simple ("video/x-vp9", "framerate", GST_TYPE_FRACTION, 0, 1, NULL); } gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), srccaps); gst_caps_unref (srccaps); self->caps_sent = TRUE; self->last_width = self->ss_width; self->last_height = self->ss_height; self->ss_width = 0; self->ss_height = 0; } } return out; } done: return NULL; too_small: GST_LOG_OBJECT (self, "Invalid rtp packet (too small), ignoring"); gst_adapter_clear (self->adapter); self->started = FALSE; goto done; }
static GstFlowReturn gst_multipart_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstMultipartDemux *multipart; GstAdapter *adapter; gint size = 1; GstFlowReturn res; multipart = GST_MULTIPART_DEMUX (parent); adapter = multipart->adapter; res = GST_FLOW_OK; if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) { GSList *l; for (l = multipart->srcpads; l != NULL; l = l->next) { GstMultipartPad *srcpad = l->data; srcpad->discont = TRUE; } gst_adapter_clear (adapter); } gst_adapter_push (adapter, buf); while (gst_adapter_available (adapter) > 0) { GstMultipartPad *srcpad; GstBuffer *outbuf; gboolean created; gint datalen; if (G_UNLIKELY (!multipart->header_completed)) { if ((size = multipart_parse_header (multipart)) < 0) { goto nodata; } else { gst_adapter_flush (adapter, size); multipart->header_completed = TRUE; } } if ((size = multipart_find_boundary (multipart, &datalen)) < 0) { goto nodata; } /* Invalidate header info */ multipart->header_completed = FALSE; multipart->content_length = -1; if (G_UNLIKELY (datalen <= 0)) { GST_DEBUG_OBJECT (multipart, "skipping empty content."); gst_adapter_flush (adapter, size - datalen); } else if (G_UNLIKELY (!multipart->mime_type)) { GST_DEBUG_OBJECT (multipart, "content has no MIME type."); gst_adapter_flush (adapter, size - datalen); } else { GstClockTime ts; srcpad = gst_multipart_find_pad_by_mime (multipart, multipart->mime_type, &created); ts = gst_adapter_prev_pts (adapter, NULL); outbuf = gst_adapter_take_buffer (adapter, datalen); gst_adapter_flush (adapter, size - datalen); if (created) { GstTagList *tags; GstSegment segment; gst_segment_init (&segment, GST_FORMAT_TIME); /* Push new segment, first buffer has 0 timestamp */ gst_pad_push_event (srcpad->pad, gst_event_new_segment (&segment)); tags = gst_tag_list_new (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL); gst_tag_list_set_scope (tags, GST_TAG_SCOPE_GLOBAL); gst_pad_push_event (srcpad->pad, gst_event_new_tag (tags)); } outbuf = gst_buffer_make_writable (outbuf); if (srcpad->last_ts == GST_CLOCK_TIME_NONE || srcpad->last_ts != ts) { GST_BUFFER_TIMESTAMP (outbuf) = ts; srcpad->last_ts = ts; } else { GST_BUFFER_TIMESTAMP (outbuf) = GST_CLOCK_TIME_NONE; } if (srcpad->discont) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); srcpad->discont = FALSE; } else { GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DISCONT); } GST_DEBUG_OBJECT (multipart, "pushing buffer with timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf))); res = gst_pad_push (srcpad->pad, outbuf); res = gst_multipart_combine_flows (multipart, srcpad, res); if (res != GST_FLOW_OK) break; } } nodata: if (G_UNLIKELY (size == MULTIPART_DATA_ERROR)) return GST_FLOW_ERROR; if (G_UNLIKELY (size == MULTIPART_DATA_EOS)) return GST_FLOW_EOS; return res; }
static gboolean new_packet_cb (guint8 * data, guint len, void *user_data, gint64 new_pcr) { /* Called when the TsMux has prepared a packet for output. Return FALSE * on error */ MpegTsMux *mux = (MpegTsMux *) user_data; GstBuffer *buf, *out_buf; GstFlowReturn ret; gfloat current_ts; gint64 m2ts_pcr, pcr_bytes, chunk_bytes; gint8 *temp_ptr; gint64 ts_rate; if (mux->m2ts_mode == TRUE) { /* Enters when the m2ts-mode is set true */ buf = gst_buffer_new_and_alloc (M2TS_PACKET_LENGTH); if (G_UNLIKELY (buf == NULL)) { mux->last_flow_ret = GST_FLOW_ERROR; return FALSE; } gst_buffer_set_caps (buf, GST_PAD_CAPS (mux->srcpad)); /* copies the ts data of 188 bytes to the m2ts buffer at an offset of 4 bytes of timestamp */ memcpy (GST_BUFFER_DATA (buf) + 4, data, len); if (new_pcr >= 0) { /*when there is a pcr value in ts data */ pcr_bytes = 0; if (mux->first_pcr) { /*Incase of first pcr */ /*writing the 4 byte timestamp value */ GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf), new_pcr); GST_LOG_OBJECT (mux, "Outputting a packet of length %d", M2TS_PACKET_LENGTH); ret = gst_pad_push (mux->srcpad, buf); if (G_UNLIKELY (ret != GST_FLOW_OK)) { mux->last_flow_ret = ret; return FALSE; } mux->first_pcr = FALSE; mux->previous_pcr = new_pcr; pcr_bytes = M2TS_PACKET_LENGTH; } chunk_bytes = gst_adapter_available (mux->adapter); if (G_UNLIKELY (chunk_bytes)) { /* calculate rate based on latest and previous pcr values */ ts_rate = ((chunk_bytes * STANDARD_TIME_CLOCK) / (new_pcr - mux->previous_pcr)); while (1) { /*loop till all the accumulated ts packets are transformed to m2ts packets and pushed */ current_ts = ((gfloat) mux->previous_pcr / STANDARD_TIME_CLOCK) + ((gfloat) pcr_bytes / ts_rate); m2ts_pcr = (((gint64) (STANDARD_TIME_CLOCK * current_ts / 300) & TWO_POW_33_MINUS1) * 300) + ((gint64) (STANDARD_TIME_CLOCK * current_ts) % 300); temp_ptr = (gint8 *) & m2ts_pcr; out_buf = gst_adapter_take_buffer (mux->adapter, M2TS_PACKET_LENGTH); if (G_UNLIKELY (!out_buf)) break; gst_buffer_set_caps (out_buf, GST_PAD_CAPS (mux->srcpad)); /*writing the 4 byte timestamp value */ GST_WRITE_UINT32_BE (GST_BUFFER_DATA (out_buf), m2ts_pcr); GST_LOG_OBJECT (mux, "Outputting a packet of length %d", M2TS_PACKET_LENGTH); ret = gst_pad_push (mux->srcpad, out_buf); if (G_UNLIKELY (ret != GST_FLOW_OK)) { mux->last_flow_ret = ret; return FALSE; } pcr_bytes += M2TS_PACKET_LENGTH; } mux->previous_pcr = m2ts_pcr; } } else /* If theres no pcr in current ts packet then push the packet to an adapter, which is used to create m2ts packets */ gst_adapter_push (mux->adapter, buf); } else { /* In case of Normal Ts packets */ GST_LOG_OBJECT (mux, "Outputting a packet of length %d", len); buf = gst_buffer_new_and_alloc (len); if (G_UNLIKELY (buf == NULL)) { mux->last_flow_ret = GST_FLOW_ERROR; return FALSE; } gst_buffer_set_caps (buf, GST_PAD_CAPS (mux->srcpad)); memcpy (GST_BUFFER_DATA (buf), data, len); GST_BUFFER_TIMESTAMP (buf) = mux->last_ts; ret = gst_pad_push (mux->srcpad, buf); if (G_UNLIKELY (ret != GST_FLOW_OK)) { mux->last_flow_ret = ret; return FALSE; } } return TRUE; }
static GstFlowReturn gst_vcd_parse_chain (GstPad * pad, GstBuffer * buf) { GstVcdParse *vcd = GST_VCD_PARSE (GST_PAD_PARENT (pad)); GstFlowReturn flow = GST_FLOW_OK; gst_adapter_push (vcd->adapter, buf); buf = NULL; while (gst_adapter_available (vcd->adapter) >= GST_CDXA_SECTOR_SIZE) { const guint8 *data; guint8 header[4 + 8]; gint sync_offset; /* find sync (we could peek any size though really) */ data = gst_adapter_peek (vcd->adapter, GST_CDXA_SECTOR_SIZE); sync_offset = gst_vcd_parse_sync (data, GST_CDXA_SECTOR_SIZE); GST_LOG_OBJECT (vcd, "sync offset = %d", sync_offset); if (sync_offset < 0) { gst_adapter_flush (vcd->adapter, GST_CDXA_SECTOR_SIZE - 12); continue; /* try again */ } gst_adapter_flush (vcd->adapter, sync_offset); if (gst_adapter_available (vcd->adapter) < GST_CDXA_SECTOR_SIZE) { GST_LOG_OBJECT (vcd, "not enough data in adapter, waiting for more"); break; } GST_LOG_OBJECT (vcd, "have full sector"); /* have one sector: a sector is 2352 bytes long and is composed of: * * +-------------------------------------------------------+ * ! sync ! header ! subheader ! data ... ! edc ! * ! 12 bytes ! 4 bytes ! 8 bytes ! 2324 bytes ! 4 bytes ! * +-------------------------------------------------------+ * * We strip the data out of it and send it to the srcpad. * * sync : 00 FF FF FF FF FF FF FF FF FF FF 00 * header : hour minute second mode * sub-header : track channel sub_mode coding repeat (4 bytes) * edc : checksum */ /* Skip CDXA header and edc footer, only keep data in the middle */ gst_adapter_copy (vcd->adapter, header, 12, sizeof (header)); gst_adapter_flush (vcd->adapter, GST_CDXA_HEADER_SIZE); buf = gst_adapter_take_buffer (vcd->adapter, GST_CDXA_DATA_SIZE); gst_adapter_flush (vcd->adapter, 4); /* we could probably do something clever to keep track of buffer offsets */ buf = gst_buffer_make_metadata_writable (buf); GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_TIMESTAMP (buf) = GST_CLOCK_TIME_NONE; gst_buffer_set_caps (buf, GST_PAD_CAPS (vcd->srcpad)); flow = gst_pad_push (vcd->srcpad, buf); buf = NULL; if (G_UNLIKELY (flow != GST_FLOW_OK)) { GST_DEBUG_OBJECT (vcd, "flow: %s", gst_flow_get_name (flow)); break; } } return flow; }
static GstFlowReturn gst_multi_file_sink_render (GstBaseSink * bsink, GstBuffer * buffer) { GstMultiFileSink *sink = GST_MULTI_FILE_SINK (bsink); GstFlowReturn flow = GST_FLOW_OK; gboolean key_unit, header; header = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER); key_unit = !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); if (sink->aggregate_gops) { GstBuffer *gop_buffer = NULL; guint avail; avail = gst_adapter_available (sink->gop_adapter); GST_LOG_OBJECT (sink, "aggregate GOP: received %s%s unit buffer: " "%" GST_PTR_FORMAT, (key_unit) ? "key" : "delta", (header) ? " header" : "", buffer); /* If it's a header buffer, it might potentially be for the next GOP */ if (header) { GST_LOG_OBJECT (sink, "Accumulating buffer to potential next GOP"); sink->potential_next_gop = g_list_append (sink->potential_next_gop, gst_buffer_ref (buffer)); } else { if (key_unit && avail > 0) { GstClockTime pts, dts; GST_LOG_OBJECT (sink, "Grabbing pending completed GOP"); pts = gst_adapter_prev_pts_at_offset (sink->gop_adapter, 0, NULL); dts = gst_adapter_prev_dts_at_offset (sink->gop_adapter, 0, NULL); gop_buffer = gst_adapter_take_buffer (sink->gop_adapter, avail); GST_BUFFER_PTS (gop_buffer) = pts; GST_BUFFER_DTS (gop_buffer) = dts; } /* just accumulate the buffer */ if (sink->potential_next_gop) { GList *tmp; GST_LOG_OBJECT (sink, "Carrying over pending next GOP data into adapter"); /* If we have pending data, put that first in the adapter */ for (tmp = sink->potential_next_gop; tmp; tmp = tmp->next) { GstBuffer *tmpb = (GstBuffer *) tmp->data; gst_adapter_push (sink->gop_adapter, tmpb); } g_list_free (sink->potential_next_gop); sink->potential_next_gop = NULL; } GST_LOG_OBJECT (sink, "storing buffer in adapter"); gst_adapter_push (sink->gop_adapter, gst_buffer_ref (buffer)); if (gop_buffer != NULL) { GST_DEBUG_OBJECT (sink, "writing out pending GOP, %u bytes", avail); GST_DEBUG_OBJECT (sink, "gop buffer pts:%" GST_TIME_FORMAT " dts:%" GST_TIME_FORMAT " duration:%" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (gop_buffer)), GST_TIME_ARGS (GST_BUFFER_DTS (gop_buffer)), GST_TIME_ARGS (GST_BUFFER_DURATION (gop_buffer))); flow = gst_multi_file_sink_write_buffer (sink, gop_buffer); gst_buffer_unref (gop_buffer); } } } else { flow = gst_multi_file_sink_write_buffer (sink, buffer); } return flow; }
static GstBuffer * gst_rtp_vp8_depay_process (GstRTPBaseDepayload * depay, GstBuffer * buf) { GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (depay); GstBuffer *payload; guint8 *data; guint hdrsize; guint size; GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT; if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (buf))) { GST_LOG_OBJECT (self, "Discontinuity, flushing adapter"); gst_adapter_clear (self->adapter); self->started = FALSE; } gst_rtp_buffer_map (buf, GST_MAP_READ, &rtpbuffer); size = gst_rtp_buffer_get_payload_len (&rtpbuffer); /* At least one header and one vp8 byte */ if (G_UNLIKELY (size < 2)) goto too_small; data = gst_rtp_buffer_get_payload (&rtpbuffer); if (G_UNLIKELY (!self->started)) { /* Check if this is the start of a VP8 frame, otherwise bail */ /* S=1 and PartID= 0 */ if ((data[0] & 0x1F) != 0x10) goto done; self->started = TRUE; } hdrsize = 1; /* Check X optional header */ if ((data[0] & 0x80) != 0) { hdrsize++; /* Check I optional header */ if ((data[1] & 0x80) != 0) { if (G_UNLIKELY (size < 3)) goto too_small; hdrsize++; /* Check for 16 bits PictureID */ if ((data[2] & 0x80) != 0) hdrsize++; } /* Check L optional header */ if ((data[1] & 0x40) != 0) hdrsize++; /* Check T or K optional headers */ if ((data[1] & 0x20) != 0 || (data[1] & 0x10) != 0) hdrsize++; } GST_DEBUG_OBJECT (depay, "hdrsize %u, size %u", hdrsize, size); if (G_UNLIKELY (hdrsize >= size)) goto too_small; payload = gst_rtp_buffer_get_payload_subbuffer (&rtpbuffer, hdrsize, -1); gst_adapter_push (self->adapter, payload); /* Marker indicates that it was the last rtp packet for this frame */ if (gst_rtp_buffer_get_marker (&rtpbuffer)) { GstBuffer *out; guint8 flag0; gst_adapter_copy (self->adapter, &flag0, 0, 1); out = gst_adapter_take_buffer (self->adapter, gst_adapter_available (self->adapter)); self->started = FALSE; gst_rtp_buffer_unmap (&rtpbuffer); /* mark keyframes */ out = gst_buffer_make_writable (out); if ((flag0 & 0x01)) GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DELTA_UNIT); else GST_BUFFER_FLAG_UNSET (out, GST_BUFFER_FLAG_DELTA_UNIT); return out; } done: gst_rtp_buffer_unmap (&rtpbuffer); return NULL; too_small: GST_LOG_OBJECT (self, "Invalid rtp packet (too small), ignoring"); gst_adapter_clear (self->adapter); self->started = FALSE; goto done; }
static void gst_amc_audio_dec_loop (GstAmcAudioDec * self) { GstFlowReturn flow_ret = GST_FLOW_OK; gboolean is_eos; GstAmcBuffer *buf; GstAmcBufferInfo buffer_info; gint idx; GError *err = NULL; GST_AUDIO_DECODER_STREAM_LOCK (self); retry: /*if (self->input_caps_changed) { idx = INFO_OUTPUT_FORMAT_CHANGED; } else { */ GST_DEBUG_OBJECT (self, "Waiting for available output buffer"); GST_AUDIO_DECODER_STREAM_UNLOCK (self); /* Wait at most 100ms here, some codecs don't fail dequeueing if * the codec is flushing, causing deadlocks during shutdown */ idx = gst_amc_codec_dequeue_output_buffer (self->codec, &buffer_info, 100000, &err); GST_AUDIO_DECODER_STREAM_LOCK (self); /*} */ if (idx < 0) { if (self->flushing) { g_clear_error (&err); goto flushing; } switch (idx) { case INFO_OUTPUT_BUFFERS_CHANGED: /* Handled internally */ g_assert_not_reached (); break; case INFO_OUTPUT_FORMAT_CHANGED:{ GstAmcFormat *format; gchar *format_string; GST_DEBUG_OBJECT (self, "Output format has changed"); format = gst_amc_codec_get_output_format (self->codec, &err); if (!format) goto format_error; format_string = gst_amc_format_to_string (format, &err); if (err) { gst_amc_format_free (format); goto format_error; } GST_DEBUG_OBJECT (self, "Got new output format: %s", format_string); g_free (format_string); if (!gst_amc_audio_dec_set_src_caps (self, format)) { gst_amc_format_free (format); goto format_error; } gst_amc_format_free (format); goto retry; } case INFO_TRY_AGAIN_LATER: GST_DEBUG_OBJECT (self, "Dequeueing output buffer timed out"); goto retry; case G_MININT: GST_ERROR_OBJECT (self, "Failure dequeueing output buffer"); goto dequeue_error; default: g_assert_not_reached (); break; } goto retry; } GST_DEBUG_OBJECT (self, "Got output buffer at index %d: offset %d size %d time %" G_GINT64_FORMAT " flags 0x%08x", idx, buffer_info.offset, buffer_info.size, buffer_info.presentation_time_us, buffer_info.flags); is_eos = ! !(buffer_info.flags & BUFFER_FLAG_END_OF_STREAM); buf = gst_amc_codec_get_output_buffer (self->codec, idx, &err); if (!buf) goto failed_to_get_output_buffer; if (buffer_info.size > 0) { GstBuffer *outbuf; GstMapInfo minfo; /* This sometimes happens at EOS or if the input is not properly framed, * let's handle it gracefully by allocating a new buffer for the current * caps and filling it */ if (buffer_info.size % self->info.bpf != 0) goto invalid_buffer_size; outbuf = gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (self), buffer_info.size); if (!outbuf) goto failed_allocate; gst_buffer_map (outbuf, &minfo, GST_MAP_WRITE); if (self->needs_reorder) { gint i, n_samples, c, n_channels; gint *reorder_map = self->reorder_map; gint16 *dest, *source; dest = (gint16 *) minfo.data; source = (gint16 *) (buf->data + buffer_info.offset); n_samples = buffer_info.size / self->info.bpf; n_channels = self->info.channels; for (i = 0; i < n_samples; i++) { for (c = 0; c < n_channels; c++) { dest[i * n_channels + reorder_map[c]] = source[i * n_channels + c]; } } } else { orc_memcpy (minfo.data, buf->data + buffer_info.offset, buffer_info.size); } gst_buffer_unmap (outbuf, &minfo); if (self->spf != -1) { gst_adapter_push (self->output_adapter, outbuf); } else { flow_ret = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (self), outbuf, 1); } } gst_amc_buffer_free (buf); buf = NULL; if (self->spf != -1) { GstBuffer *outbuf; guint avail = gst_adapter_available (self->output_adapter); guint nframes; /* On EOS we take the complete adapter content, no matter * if it is a multiple of the codec frame size or not. * Otherwise we take a multiple of codec frames and push * them downstream */ avail /= self->info.bpf; if (!is_eos) { nframes = avail / self->spf; avail = nframes * self->spf; } else { nframes = (avail + self->spf - 1) / self->spf; } avail *= self->info.bpf; if (avail > 0) { outbuf = gst_adapter_take_buffer (self->output_adapter, avail); flow_ret = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (self), outbuf, nframes); } } if (!gst_amc_codec_release_output_buffer (self->codec, idx, &err)) { if (self->flushing) { g_clear_error (&err); goto flushing; } goto failed_release; } if (is_eos || flow_ret == GST_FLOW_EOS) { GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); if (self->draining) { GST_DEBUG_OBJECT (self, "Drained"); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); } else if (flow_ret == GST_FLOW_OK) { GST_DEBUG_OBJECT (self, "Component signalled EOS"); flow_ret = GST_FLOW_EOS; } g_mutex_unlock (&self->drain_lock); GST_AUDIO_DECODER_STREAM_LOCK (self); } else { GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret)); } self->downstream_flow_ret = flow_ret; if (flow_ret != GST_FLOW_OK) goto flow_error; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; dequeue_error: { GST_ELEMENT_ERROR_FROM_ERROR (self, err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } format_error: { if (err) GST_ELEMENT_ERROR_FROM_ERROR (self, err); else GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Failed to handle format")); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } failed_release: { GST_AUDIO_DECODER_ERROR_FROM_ERROR (self, err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } flushing: { GST_DEBUG_OBJECT (self, "Flushing -- stopping task"); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_FLUSHING; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; } flow_error: { if (flow_ret == GST_FLOW_EOS) { GST_DEBUG_OBJECT (self, "EOS"); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); } else if (flow_ret < GST_FLOW_EOS) { GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", gst_flow_get_name (flow_ret))); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); } else if (flow_ret == GST_FLOW_FLUSHING) { GST_DEBUG_OBJECT (self, "Flushing -- stopping task"); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); } GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } failed_to_get_output_buffer: { GST_AUDIO_DECODER_ERROR_FROM_ERROR (self, err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } invalid_buffer_size: { GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Invalid buffer size %u (bfp %d)", buffer_info.size, self->info.bpf)); gst_amc_codec_release_output_buffer (self->codec, idx, &err); if (err && !self->flushing) GST_ELEMENT_WARNING_FROM_ERROR (self, err); g_clear_error (&err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } failed_allocate: { GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ("Failed to allocate output buffer")); gst_amc_codec_release_output_buffer (self->codec, idx, &err); if (err && !self->flushing) GST_ELEMENT_WARNING_FROM_ERROR (self, err); g_clear_error (&err); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); g_mutex_unlock (&self->drain_lock); return; } }
static GstFlowReturn gst_libde265_dec_parse_data (VIDEO_DECODER_BASE * parse, gboolean at_eos) #endif { GstLibde265Dec *dec = GST_LIBDE265_DEC (parse); const struct de265_image *img; de265_error ret = DE265_OK; int more = 0; #if GST_CHECK_VERSION(1,0,0) de265_PTS pts = (de265_PTS) frame->pts; #else de265_PTS pts = 0; #endif if (dec->buffer_full) { // return any pending images before decoding more data if ((img = de265_peek_next_picture(dec->ctx)) != NULL) { return _gst_libde265_image_available(parse, img); } dec->buffer_full = 0; } #if !GST_CHECK_VERSION(1,0,0) GstAdapter *adapter = parse->input_adapter; #endif gsize size = gst_adapter_available (adapter); if (size == 0) { return NEED_DATA_RESULT; } GstBuffer *buf = gst_adapter_take_buffer(adapter, size); uint8_t *frame_data; uint8_t *end_data; #if GST_CHECK_VERSION(1,0,0) GstMapInfo info; if (!gst_buffer_map(buf, &info, GST_MAP_READWRITE)) { return GST_FLOW_ERROR; } frame_data = info.data; #else frame_data = GST_BUFFER_DATA(buf); #endif end_data = frame_data + size; if (size > 0) { if (dec->mode == GST_TYPE_LIBDE265_DEC_PACKETIZED) { // replace 4-byte length fields with NAL start codes uint8_t *start_data = frame_data; while (start_data + 4 <= end_data) { int nal_size = READ_BE32(start_data); if (start_data + nal_size > end_data) { GST_ELEMENT_ERROR (parse, STREAM, DECODE, ("Overflow in input data, check data mode"), (NULL)); goto error; } ret = de265_push_NAL(dec->ctx, start_data + 4, nal_size, pts, NULL); if (ret != DE265_OK) { GST_ELEMENT_ERROR (parse, STREAM, DECODE, ("Error while pushing data: %s (code=%d)", de265_get_error_text(ret), ret), (NULL)); goto error; } start_data += 4 + nal_size; } } else { ret = de265_push_data(dec->ctx, frame_data, size, pts, NULL); } } else { ret = de265_flush_data(dec->ctx); } // decode as much as possible do { ret = de265_decode(dec->ctx, &more); } while (more && ret == DE265_OK); #if GST_CHECK_VERSION(1,0,0) gst_buffer_unmap(buf, &info); #endif gst_buffer_unref(buf); switch (ret) { case DE265_OK: break; case DE265_ERROR_IMAGE_BUFFER_FULL: dec->buffer_full = 1; if ((img = de265_peek_next_picture(dec->ctx)) == NULL) { return NEED_DATA_RESULT; } return _gst_libde265_image_available(parse, img);; case DE265_ERROR_WAITING_FOR_INPUT_DATA: return NEED_DATA_RESULT; default: GST_ELEMENT_ERROR (parse, STREAM, DECODE, ("Error while decoding: %s (code=%d)", de265_get_error_text(ret), ret), (NULL)); return GST_FLOW_ERROR; } while ((ret = de265_get_warning(dec->ctx)) != DE265_OK) { GST_ELEMENT_WARNING (parse, STREAM, DECODE, ("%s (code=%d)", de265_get_error_text(ret), ret), (NULL)); } if ((img = de265_peek_next_picture(dec->ctx)) == NULL) { // need more data return NEED_DATA_RESULT; } return _gst_libde265_image_available(parse, img); error: #if GST_CHECK_VERSION(1,0,0) gst_buffer_unmap(buf, &info); #endif gst_buffer_unref(buf); return GST_FLOW_ERROR; }
static GstFlowReturn gst_base_video_decoder_drain (GstBaseVideoDecoder * dec, gboolean at_eos) { GstBaseVideoDecoderClass *klass; GstBaseVideoDecoderScanResult res; guint size; klass = GST_BASE_VIDEO_DECODER_GET_CLASS (dec); if (gst_adapter_available (dec->input_adapter) == 0) return GST_FLOW_OK; lost_sync: if (!dec->have_sync) { gint n, m; GST_DEBUG ("no sync, scanning"); n = gst_adapter_available (dec->input_adapter); m = klass->scan_for_sync (dec, dec->input_adapter); if (m == -1) { gst_object_unref (dec); return GST_FLOW_OK; } if (m < 0) { g_warning ("subclass returned negative scan %d", m); } if (m >= n) { GST_ERROR ("subclass scanned past end %d >= %d", m, n); } gst_adapter_flush (dec->input_adapter, m); if (m < n) { GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n); /* this is only "maybe" sync */ dec->have_sync = TRUE; } if (!dec->have_sync) { return GST_FLOW_OK; } } res = klass->scan_for_packet_end (dec, dec->input_adapter, &size, at_eos); while (res == GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK) { GstBuffer *buf; GstFlowReturn ret; GST_DEBUG ("Packet size: %u", size); if (size > gst_adapter_available (dec->input_adapter)) return GST_FLOW_OK; buf = gst_adapter_take_buffer (dec->input_adapter, size); dec->prev_buf_offset = dec->current_buf_offset; dec->current_buf_offset = dec->input_offset - gst_adapter_available (dec->input_adapter); ret = klass->parse_data (dec, buf, at_eos); if (ret != GST_FLOW_OK) return ret; res = klass->scan_for_packet_end (dec, dec->input_adapter, &size, at_eos); } switch (res) { case GST_BASE_VIDEO_DECODER_SCAN_RESULT_LOST_SYNC: dec->have_sync = FALSE; goto lost_sync; case GST_BASE_VIDEO_DECODER_SCAN_RESULT_NEED_DATA: return GST_FLOW_OK; default: GST_ERROR_OBJECT (dec, "Subclass returned invalid scan result"); return GST_FLOW_ERROR; } }
static GstVaapiDecoderStatus decode_step (GstVaapiDecoder * decoder) { GstVaapiParserState *const ps = &decoder->parser_state; GstVaapiDecoderStatus status; GstBuffer *buffer; gboolean got_frame; guint got_unit_size, input_size; status = gst_vaapi_decoder_check_status (decoder); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) return status; /* Fill adapter with all buffers we have in the queue */ for (;;) { buffer = pop_buffer (decoder); if (!buffer) break; ps->at_eos = GST_BUFFER_IS_EOS (buffer); if (!ps->at_eos) gst_adapter_push (ps->input_adapter, buffer); } /* Parse and decode all decode units */ input_size = gst_adapter_available (ps->input_adapter); if (input_size == 0) { if (ps->at_eos) return GST_VAAPI_DECODER_STATUS_END_OF_STREAM; return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA; } do { if (!ps->current_frame) { ps->current_frame = g_slice_new0 (GstVideoCodecFrame); if (!ps->current_frame) return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; ps->current_frame->ref_count = 1; ps->current_frame->system_frame_number = ps->current_frame_number++; } status = do_parse (decoder, ps->current_frame, ps->input_adapter, ps->at_eos, &got_unit_size, &got_frame); GST_DEBUG ("parse frame (status = %d)", status); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) { if (status == GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA && ps->at_eos) status = GST_VAAPI_DECODER_STATUS_END_OF_STREAM; break; } if (got_unit_size > 0) { buffer = gst_adapter_take_buffer (ps->input_adapter, got_unit_size); input_size -= got_unit_size; if (gst_adapter_available (ps->output_adapter) == 0) { ps->current_frame->pts = gst_adapter_prev_pts (ps->input_adapter, NULL); } gst_adapter_push (ps->output_adapter, buffer); } if (got_frame) { ps->current_frame->input_buffer = gst_adapter_take_buffer (ps->output_adapter, gst_adapter_available (ps->output_adapter)); status = do_decode (decoder, ps->current_frame); GST_DEBUG ("decode frame (status = %d)", status); gst_video_codec_frame_unref (ps->current_frame); ps->current_frame = NULL; break; } } while (input_size > 0); return status; }
static GstBuffer * gst_rtp_xqt_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf) { GstRtpXQTDepay *rtpxqtdepay; GstBuffer *outbuf = NULL; gboolean m; GstRTPBuffer rtp = { NULL }; rtpxqtdepay = GST_RTP_XQT_DEPAY (depayload); gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp); if (!gst_rtp_buffer_validate (buf)) goto bad_packet; if (GST_BUFFER_IS_DISCONT (buf)) { /* discont, clear adapter and try to find a new packet start */ gst_adapter_clear (rtpxqtdepay->adapter); rtpxqtdepay->need_resync = TRUE; GST_DEBUG_OBJECT (rtpxqtdepay, "we need resync"); } m = gst_rtp_buffer_get_marker (&rtp); GST_LOG_OBJECT (rtpxqtdepay, "marker: %d", m); { gint payload_len; guint avail; guint8 *payload; guint8 ver, pck; gboolean s, q, l, d; payload_len = gst_rtp_buffer_get_payload_len (&rtp); payload = gst_rtp_buffer_get_payload (&rtp); /* 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | VER |PCK|S|Q|L| RES |D| QuickTime Payload ID | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ if (payload_len <= 4) goto wrong_length; ver = (payload[0] & 0xf0) >> 4; if (ver > 1) goto wrong_version; pck = (payload[0] & 0x0c) >> 2; if (pck == 0) goto pck_reserved; s = (payload[0] & 0x02) != 0; /* contains sync sample */ q = (payload[0] & 0x01) != 0; /* has payload description */ l = (payload[1] & 0x80) != 0; /* has packet specific information description */ d = (payload[2] & 0x80) != 0; /* don't cache info for payload id */ /* id used for caching info */ rtpxqtdepay->current_id = ((payload[2] & 0x7f) << 8) | payload[3]; GST_LOG_OBJECT (rtpxqtdepay, "VER: %d, PCK: %d, S: %d, Q: %d, L: %d, D: %d, ID: %d", ver, pck, s, q, l, d, rtpxqtdepay->current_id); if (rtpxqtdepay->need_resync) { /* we need to find the boundary of a new packet after a DISCONT */ if (pck != 3 || q) { /* non-fragmented packet or payload description present, packet starts * here. */ rtpxqtdepay->need_resync = FALSE; } else { /* fragmented packet without description */ if (m) { /* marker bit set, next packet is start of new one */ rtpxqtdepay->need_resync = FALSE; } goto need_resync; } } payload += 4; payload_len -= 4; if (q) { gboolean k, f, a, z; guint pdlen, pdpadded; gint padding; /* media_type only used for printing */ guint32 G_GNUC_UNUSED media_type; guint32 timescale; /* 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |K|F|A|Z| RES | QuickTime Payload Desc Length | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . QuickTime Payload Desc Data ... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ if (payload_len <= 4) goto wrong_length; k = (payload[0] & 0x80) != 0; /* keyframe */ f = (payload[0] & 0x40) != 0; /* sparse */ a = (payload[0] & 0x20) != 0; /* start of payload */ z = (payload[0] & 0x10) != 0; /* end of payload */ pdlen = (payload[2] << 8) | payload[3]; if (pdlen < 12) goto wrong_length; /* calc padding */ pdpadded = pdlen + 3; pdpadded -= pdpadded % 4; if (payload_len < pdpadded) goto wrong_length; padding = pdpadded - pdlen; GST_LOG_OBJECT (rtpxqtdepay, "K: %d, F: %d, A: %d, Z: %d, len: %d, padding %d", k, f, a, z, pdlen, padding); payload += 4; payload_len -= 4; /* 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | QuickTime Media Type | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Timescale | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . QuickTime TLVs ... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ media_type = (payload[0] << 24) | (payload[1] << 16) | (payload[2] << 8) | payload[3]; timescale = (payload[4] << 24) | (payload[5] << 16) | (payload[6] << 8) | payload[7]; GST_LOG_OBJECT (rtpxqtdepay, "media_type: %c%c%c%c, timescale %u", payload[0], payload[1], payload[2], payload[3], timescale); payload += 8; payload_len -= 8; pdlen -= 12; /* parse TLV (type-length-value triplets */ while (pdlen > 3) { guint16 tlv_len, tlv_type; /* 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | QuickTime TLV Length | QuickTime TLV Type | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . QuickTime TLV Value ... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ tlv_len = (payload[0] << 8) | payload[1]; tlv_type = (payload[2] << 8) | payload[3]; pdlen -= 4; if (tlv_len > pdlen) goto wrong_length; GST_LOG_OBJECT (rtpxqtdepay, "TLV '%c%c', len %d", payload[2], payload[3], tlv_len); payload += 4; payload_len -= 4; switch (tlv_type) { case TLV_sd: /* Session description */ if (!gst_rtp_quicktime_parse_sd (rtpxqtdepay, payload, tlv_len)) goto unknown_format; rtpxqtdepay->have_sd = TRUE; break; case TLV_qt: case TLV_ti: case TLV_ly: case TLV_vo: case TLV_mx: case TLV_tr: case TLV_tw: case TLV_th: case TLV_la: case TLV_rt: case TLV_gm: case TLV_oc: case TLV_cr: case TLV_du: case TLV_po: default: break; } pdlen -= tlv_len; payload += tlv_len; payload_len -= tlv_len; } payload += padding; payload_len -= padding; } if (l) { guint ssilen, ssipadded; gint padding; /* 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | RES | Sample-Specific Info Length | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . QuickTime TLVs ... * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ if (payload_len <= 4) goto wrong_length; ssilen = (payload[2] << 8) | payload[3]; if (ssilen < 4) goto wrong_length; /* calc padding */ ssipadded = ssilen + 3; ssipadded -= ssipadded % 4; if (payload_len < ssipadded) goto wrong_length; padding = ssipadded - ssilen; GST_LOG_OBJECT (rtpxqtdepay, "len: %d, padding %d", ssilen, padding); payload += 4; payload_len -= 4; ssilen -= 4; /* parse TLV (type-length-value triplets */ while (ssilen > 3) { guint16 tlv_len, tlv_type; /* 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | QuickTime TLV Length | QuickTime TLV Type | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . QuickTime TLV Value ... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ tlv_len = (payload[0] << 8) | payload[1]; tlv_type = (payload[2] << 8) | payload[3]; ssilen -= 4; if (tlv_len > ssilen) goto wrong_length; GST_LOG_OBJECT (rtpxqtdepay, "TLV '%c%c', len %d", payload[2], payload[3], tlv_len); payload += 4; payload_len -= 4; switch (tlv_type) { case TLV_sd: case TLV_qt: case TLV_ti: case TLV_ly: case TLV_vo: case TLV_mx: case TLV_tr: case TLV_tw: case TLV_th: case TLV_la: case TLV_rt: case TLV_gm: case TLV_oc: case TLV_cr: case TLV_du: case TLV_po: default: break; } ssilen -= tlv_len; payload += tlv_len; payload_len -= tlv_len; } payload += padding; payload_len -= padding; } rtpxqtdepay->previous_id = rtpxqtdepay->current_id; switch (pck) { case 1: { /* multiple samples per packet. */ outbuf = gst_buffer_new_and_alloc (payload_len); gst_buffer_fill (outbuf, 0, payload, payload_len); goto done; } case 2: { guint slen; /* multiple samples per packet. * 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |S| Reserved | Sample Length | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Sample Timestamp | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . Sample Data ... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |S| Reserved | Sample Length | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Sample Timestamp | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . Sample Data ... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * . ...... . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ while (payload_len > 8) { s = (payload[0] & 0x80) != 0; /* contains sync sample */ slen = (payload[2] << 8) | payload[3]; /* timestamp = * (payload[4] << 24) | (payload[5] << 16) | (payload[6] << 8) | * payload[7]; */ payload += 8; payload_len -= 8; if (slen > payload_len) slen = payload_len; outbuf = gst_buffer_new_and_alloc (slen); gst_buffer_fill (outbuf, 0, payload, slen); if (!s) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); gst_rtp_base_depayload_push (depayload, outbuf); /* aligned on 32 bit boundary */ slen = GST_ROUND_UP_4 (slen); payload += slen; payload_len -= slen; } break; } case 3: { /* one sample per packet, use adapter to combine based on marker bit. */ outbuf = gst_buffer_new_and_alloc (payload_len); gst_buffer_fill (outbuf, 0, payload, payload_len); gst_adapter_push (rtpxqtdepay->adapter, outbuf); if (!m) goto done; avail = gst_adapter_available (rtpxqtdepay->adapter); outbuf = gst_adapter_take_buffer (rtpxqtdepay->adapter, avail); GST_DEBUG_OBJECT (rtpxqtdepay, "gst_rtp_xqt_depay_chain: pushing buffer of size %u", avail); goto done; } } } done: gst_rtp_buffer_unmap (&rtp); return outbuf; bad_packet: { GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE, ("Packet did not validate."), (NULL)); goto done; } need_resync: { GST_DEBUG_OBJECT (rtpxqtdepay, "waiting for marker"); goto done; } wrong_version: { GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE, ("Unknown payload version."), (NULL)); goto done; } pck_reserved: { GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE, ("PCK reserved 0."), (NULL)); goto done; } wrong_length: { GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE, ("Wrong payload length."), (NULL)); goto done; } unknown_format: { GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE, ("Unknown payload format."), (NULL)); goto done; } }
GstFlowReturn gst_base_video_parse_finish_frame (GstBaseVideoParse * base_video_parse) { GstVideoFrame *frame = base_video_parse->current_frame; GstBuffer *buffer; GstBaseVideoParseClass *base_video_parse_class; GstFlowReturn ret; GST_DEBUG ("finish_frame"); base_video_parse_class = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse); buffer = gst_adapter_take_buffer (base_video_parse->output_adapter, gst_adapter_available (base_video_parse->output_adapter)); if (frame->is_sync_point) { base_video_parse->timestamp_offset = base_video_parse->last_timestamp - gst_util_uint64_scale (frame->presentation_frame_number, base_video_parse->state.fps_d * GST_SECOND, base_video_parse->state.fps_n); base_video_parse->distance_from_sync = 0; } frame->distance_from_sync = base_video_parse->distance_from_sync; base_video_parse->distance_from_sync++; frame->presentation_timestamp = gst_base_video_parse_get_timestamp (base_video_parse, frame->presentation_frame_number); frame->presentation_duration = gst_base_video_parse_get_timestamp (base_video_parse, frame->presentation_frame_number + 1) - frame->presentation_timestamp; frame->decode_timestamp = gst_base_video_parse_get_timestamp (base_video_parse, frame->decode_frame_number); GST_BUFFER_TIMESTAMP (buffer) = frame->presentation_timestamp; GST_BUFFER_DURATION (buffer) = frame->presentation_duration; if (frame->decode_frame_number < 0) { GST_BUFFER_OFFSET (buffer) = 0; } else { GST_BUFFER_OFFSET (buffer) = frame->decode_timestamp; } GST_BUFFER_OFFSET_END (buffer) = GST_CLOCK_TIME_NONE; GST_DEBUG ("pts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->presentation_timestamp)); GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp)); GST_DEBUG ("dist %d", frame->distance_from_sync); if (frame->is_sync_point) { GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); } else { GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); } frame->src_buffer = buffer; ret = base_video_parse_class->shape_output (base_video_parse, frame); gst_base_video_parse_free_frame (base_video_parse->current_frame); /* create new frame */ base_video_parse->current_frame = gst_base_video_parse_new_frame (base_video_parse); return ret; }