/* copy data into @dest, skipping @skip bytes from the head buffers */ static void copy_into_unchecked (GstAdapter * adapter, guint8 * dest, gsize skip, gsize size) { GSList *g; GstBuffer *buf; gsize bsize, csize; /* first step, do skipping */ /* we might well be copying where we were scanning */ if (adapter->scan_entry && (adapter->scan_offset <= skip)) { g = adapter->scan_entry; skip -= adapter->scan_offset; } else { g = adapter->buflist; } buf = g->data; bsize = gst_buffer_get_size (buf); while (G_UNLIKELY (skip >= bsize)) { skip -= bsize; g = g_slist_next (g); buf = g->data; bsize = gst_buffer_get_size (buf); } /* copy partial buffer */ csize = MIN (bsize - skip, size); GST_DEBUG ("bsize %" G_GSIZE_FORMAT ", skip %" G_GSIZE_FORMAT ", csize %" G_GSIZE_FORMAT, bsize, skip, csize); GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, adapter, "extract %" G_GSIZE_FORMAT " bytes", csize); gst_buffer_extract (buf, skip, dest, csize); size -= csize; dest += csize; /* second step, copy remainder */ while (size > 0) { g = g_slist_next (g); buf = g->data; bsize = gst_buffer_get_size (buf); if (G_LIKELY (bsize > 0)) { csize = MIN (bsize, size); GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, adapter, "extract %" G_GSIZE_FORMAT " bytes", csize); gst_buffer_extract (buf, 0, dest, csize); size -= csize; dest += csize; } } }
static GstFlowReturn output_vchain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstClockTime timestamp; guint8 b; gboolean jitter = video_gaps || video_overlaps || late_video; timestamp = GST_BUFFER_TIMESTAMP (buffer); if (!jitter) fail_unless_equals_int64 (timestamp, (video_buffer_count % n_vbuffers) * 25 * GST_MSECOND); timestamp = gst_segment_to_stream_time (¤t_video_segment, GST_FORMAT_TIME, timestamp); if (!jitter) fail_unless_equals_int64 (timestamp, (video_buffer_count % n_vbuffers) * 25 * GST_MSECOND); timestamp = GST_BUFFER_TIMESTAMP (buffer); timestamp = gst_segment_to_running_time (¤t_video_segment, GST_FORMAT_TIME, timestamp); if (!jitter) fail_unless_equals_int64 (timestamp, video_buffer_count * 25 * GST_MSECOND); gst_buffer_extract (buffer, 0, &b, 1); if (!jitter) fail_unless_equals_int (b, video_buffer_count % n_vbuffers); video_buffer_count++; gst_buffer_unref (buffer); return GST_FLOW_OK; }
static GstFlowReturn test_mpeg_audio_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame, gint * skipsize) { guint8 data[2]; gst_buffer_extract (frame->buffer, 0, data, 2); if ((GST_READ_UINT16_BE (data) & 0xffe0) == 0xffe0) { if (GST_BUFFER_OFFSET (frame->buffer) == 0) { GstCaps *caps; caps = gst_caps_new_simple ("audio/mpeg", "mpegversion", G_TYPE_INT, 1, "mpegaudioversion", G_TYPE_INT, 1, "layer", G_TYPE_INT, 3, "rate", G_TYPE_INT, 44100, "channels", G_TYPE_INT, 2, NULL); gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps); gst_caps_unref (caps); } /* this framesize is hard-coded for ../test.mp3 */ return gst_base_parse_finish_frame (parse, frame, 1045); } else { *skipsize = 1; return GST_FLOW_OK; } }
static GstFlowReturn gst_kate_parse_parse_packet (GstKateParse * parse, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; guint8 header[1]; gsize size; g_assert (parse); parse->packetno++; size = gst_buffer_extract (buf, 0, header, 1); GST_LOG_OBJECT (parse, "Got packet %02x, %" G_GSIZE_FORMAT " bytes", size ? header[0] : -1, gst_buffer_get_size (buf)); if (size > 0 && header[0] & 0x80) { GST_DEBUG_OBJECT (parse, "Found header %02x", header[0]); /* if 0x80 is set, it's streamheader, * so put it on the streamheader list and return */ parse->streamheader = g_list_append (parse->streamheader, buf); ret = GST_FLOW_OK; } else { if (!parse->streamheader_sent) { GST_DEBUG_OBJECT (parse, "Found non header, pushing headers seen so far"); ret = gst_kate_parse_push_headers (parse); } if (ret == GST_FLOW_OK) { ret = gst_kate_parse_queue_buffer (parse, buf); } } return ret; }
/** * gst_vaapi_profile_from_codec_data: * @codec: a #GstVaapiCodec * @buffer: a #GstBuffer holding code data * * Tries to parse VA profile from @buffer data and @codec information. * * Return value: the #GstVaapiProfile described in @buffer */ static GstVaapiProfile gst_vaapi_profile_from_codec_data_h264 (GstBuffer * buffer) { /* MPEG-4 Part 15: Advanced Video Coding (AVC) file format */ guchar buf[3]; if (gst_buffer_extract (buffer, 0, buf, sizeof (buf)) != sizeof (buf)) return 0; if (buf[0] != 1) /* configurationVersion = 1 */ return 0; switch (buf[1]) { /* AVCProfileIndication */ case 66: return ((buf[2] & 0x40) ? GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE : GST_VAAPI_PROFILE_H264_BASELINE); case 77: return GST_VAAPI_PROFILE_H264_MAIN; case 100: return GST_VAAPI_PROFILE_H264_HIGH; case 118: return GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH; case 128: return GST_VAAPI_PROFILE_H264_STEREO_HIGH; } return 0; }
static GstFlowReturn gst_rtp_celt_pay_flush_queued (GstRtpCELTPay * rtpceltpay) { GstFlowReturn ret; GstBuffer *buf, *outbuf; guint8 *payload, *spayload; guint payload_len; GstClockTime duration; GstRTPBuffer rtp = { NULL, }; payload_len = rtpceltpay->bytes + rtpceltpay->sbytes; duration = rtpceltpay->qduration; GST_DEBUG_OBJECT (rtpceltpay, "flushing out %u, duration %" GST_TIME_FORMAT, payload_len, GST_TIME_ARGS (rtpceltpay->qduration)); /* get a big enough packet for the sizes + payloads */ outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0); GST_BUFFER_DURATION (outbuf) = duration; gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); /* point to the payload for size headers and data */ spayload = gst_rtp_buffer_get_payload (&rtp); payload = spayload + rtpceltpay->sbytes; while ((buf = g_queue_pop_head (rtpceltpay->queue))) { guint size; /* copy first timestamp to output */ if (GST_BUFFER_TIMESTAMP (outbuf) == -1) GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf); /* write the size to the header */ size = gst_buffer_get_size (buf); while (size > 0xff) { *spayload++ = 0xff; size -= 0xff; } *spayload++ = size; /* copy payload */ size = gst_buffer_get_size (buf); gst_buffer_extract (buf, 0, payload, size); payload += size; gst_buffer_unref (buf); } gst_rtp_buffer_unmap (&rtp); /* we consumed it all */ rtpceltpay->bytes = 0; rtpceltpay->sbytes = 0; rtpceltpay->qduration = 0; ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpceltpay), outbuf); return ret; }
static GstFlowReturn gst_shm_sink_render (GstBaseSink * bsink, GstBuffer * buf) { GstShmSink *self = GST_SHM_SINK (bsink); int rv; GstMapInfo map; GST_OBJECT_LOCK (self); while (self->wait_for_connection && !self->clients) { g_cond_wait (self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { GST_OBJECT_UNLOCK (self); return GST_FLOW_FLUSHING; } } while (!gst_shm_sink_can_render (self, GST_BUFFER_TIMESTAMP (buf))) { g_cond_wait (self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { GST_OBJECT_UNLOCK (self); return GST_FLOW_FLUSHING; } } gst_buffer_map (buf, &map, GST_MAP_READ); rv = sp_writer_send_buf (self->pipe, (char *) map.data, map.size, GST_BUFFER_TIMESTAMP (buf)); gst_buffer_unmap (buf, &map); if (rv == -1) { ShmBlock *block = NULL; gchar *shmbuf = NULL; while ((block = sp_writer_alloc_block (self->pipe, gst_buffer_get_size (buf))) == NULL) { g_cond_wait (self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { GST_OBJECT_UNLOCK (self); return GST_FLOW_FLUSHING; } } while (self->wait_for_connection && !self->clients) { g_cond_wait (self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { sp_writer_free_block (block); GST_OBJECT_UNLOCK (self); return GST_FLOW_FLUSHING; } } shmbuf = sp_writer_block_get_buf (block); gst_buffer_extract (buf, 0, shmbuf, gst_buffer_get_size (buf)); sp_writer_send_buf (self->pipe, shmbuf, gst_buffer_get_size (buf), GST_BUFFER_TIMESTAMP (buf)); sp_writer_free_block (block); } GST_OBJECT_UNLOCK (self); return GST_FLOW_OK; }
/* submit the given buffer to the ogg sync */ static GstFlowReturn gst_ogg_parse_submit_buffer (GstOggParse * ogg, GstBuffer * buffer) { gsize size; gchar *oggbuffer; GstFlowReturn ret = GST_FLOW_OK; size = gst_buffer_get_size (buffer); GST_DEBUG_OBJECT (ogg, "submitting %" G_GSIZE_FORMAT " bytes", size); if (G_UNLIKELY (size == 0)) goto done; oggbuffer = ogg_sync_buffer (&ogg->sync, size); if (G_UNLIKELY (oggbuffer == NULL)) { GST_ELEMENT_ERROR (ogg, STREAM, DECODE, (NULL), ("failed to get ogg sync buffer")); ret = GST_FLOW_ERROR; goto done; } size = gst_buffer_extract (buffer, 0, oggbuffer, size); if (G_UNLIKELY (ogg_sync_wrote (&ogg->sync, size) < 0)) { GST_ELEMENT_ERROR (ogg, STREAM, DECODE, (NULL), ("failed to write %" G_GSIZE_FORMAT " bytes to the sync buffer", size)); ret = GST_FLOW_ERROR; } done: gst_buffer_unref (buffer); return ret; }
static GstVaapiProfile gst_vaapi_profile_from_codec_data_h265 (GstBuffer * buffer) { /* ISO/IEC 14496-15: HEVC file format */ guchar buf[3]; if (gst_buffer_extract (buffer, 0, buf, sizeof (buf)) != sizeof (buf)) return 0; if (buf[0] != 1) /* configurationVersion = 1 */ return 0; if (buf[1] & 0xc0) /* general_profile_space = 0 */ return 0; switch (buf[1] & 0x1f) { /* HEVCProfileIndication */ case 1: return GST_VAAPI_PROFILE_H265_MAIN; case 2: return GST_VAAPI_PROFILE_H265_MAIN10; case 3: return GST_VAAPI_PROFILE_H265_MAIN_STILL_PICTURE; case 4: return GST_VAAPI_PROFILE_H265_MAIN_422_10; case 5: return GST_VAAPI_PROFILE_H265_MAIN_444; } return 0; }
static GstFlowReturn gst_atdec_handle_frame (GstAudioDecoder * decoder, GstBuffer * buffer) { AudioTimeStamp timestamp = { 0 }; AudioStreamPacketDescription packet; AudioQueueBufferRef input_buffer, output_buffer; GstBuffer *out; GstMapInfo info; GstAudioInfo *audio_info; int size, out_frames; GstFlowReturn flow_ret = GST_FLOW_OK; GstATDec *atdec = GST_ATDEC (decoder); if (buffer == NULL) return GST_FLOW_OK; audio_info = gst_audio_decoder_get_audio_info (decoder); /* copy the input buffer into an AudioQueueBuffer */ size = gst_buffer_get_size (buffer); AudioQueueAllocateBuffer (atdec->queue, size, &input_buffer); gst_buffer_extract (buffer, 0, input_buffer->mAudioData, size); input_buffer->mAudioDataByteSize = size; /* assume framed input */ packet.mStartOffset = 0; packet.mVariableFramesInPacket = 1; packet.mDataByteSize = size; /* enqueue the buffer. It will get free'd once the gst_atdec_buffer_emptied * callback is called */ AudioQueueEnqueueBuffer (atdec->queue, input_buffer, 1, &packet); /* figure out how many frames we need to pull out of the queue */ out_frames = GST_CLOCK_TIME_TO_FRAMES (GST_BUFFER_DURATION (buffer), audio_info->rate); size = out_frames * audio_info->bpf; AudioQueueAllocateBuffer (atdec->queue, size, &output_buffer); /* pull the frames */ AudioQueueOfflineRender (atdec->queue, ×tamp, output_buffer, out_frames); if (output_buffer->mAudioDataByteSize) { out = gst_audio_decoder_allocate_output_buffer (decoder, output_buffer->mAudioDataByteSize); gst_buffer_map (out, &info, GST_MAP_WRITE); memcpy (info.data, output_buffer->mAudioData, output_buffer->mAudioDataByteSize); gst_buffer_unmap (out, &info); flow_ret = gst_audio_decoder_finish_frame (decoder, out, 1); } AudioQueueFreeBuffer (atdec->queue, output_buffer); return flow_ret; }
static GstFlowReturn gst_quarktv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame, GstVideoFrame * out_frame) { GstQuarkTV *filter = GST_QUARKTV (vfilter); gint area; guint32 *src, *dest; GstClockTime timestamp; GstBuffer **planetable; gint planes, current_plane; timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer); timestamp = gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment, GST_FORMAT_TIME, timestamp); GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp)); if (GST_CLOCK_TIME_IS_VALID (timestamp)) gst_object_sync_values (GST_OBJECT (filter), timestamp); if (G_UNLIKELY (filter->planetable == NULL)) return GST_FLOW_FLUSHING; src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0); dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0); GST_OBJECT_LOCK (filter); area = filter->area; planetable = filter->planetable; planes = filter->planes; current_plane = filter->current_plane; if (planetable[current_plane]) gst_buffer_unref (planetable[current_plane]); planetable[current_plane] = gst_buffer_ref (in_frame->buffer); /* For each pixel */ while (--area) { GstBuffer *rand; /* pick a random buffer */ rand = planetable[(current_plane + (fastrand () >> 24)) % planes]; /* Copy the pixel from the random buffer to dest, FIXME, slow */ if (rand) gst_buffer_extract (rand, area * 4, &dest[area], 4); else dest[area] = src[area]; } filter->current_plane--; if (filter->current_plane < 0) filter->current_plane = planes - 1; GST_OBJECT_UNLOCK (filter); return GST_FLOW_OK; }
GstBuffer * mpegpsmux_prepare_aac (GstBuffer * buf, MpegPsPadData * data, MpegPsMux * mux) { GstBuffer *out_buf; GstMemory *mem; gsize out_size; guint8 *adts_header, codec_data[2]; guint8 rate_idx = 0, channels = 0, obj_type = 0; GST_DEBUG_OBJECT (mux, "Preparing AAC buffer for output"); adts_header = g_malloc0 (7); /* We want the same data and metadata, and then prepend some bytes */ out_buf = gst_buffer_copy (buf); out_size = gst_buffer_get_size (buf) + 7; gst_buffer_extract (data->codec_data, 0, codec_data, 2); /* Generate ADTS header */ obj_type = (codec_data[0] & 0xC) >> 2; obj_type++; rate_idx = (codec_data[0] & 0x3) << 1; rate_idx |= (codec_data[1] & 0x80) >> 7; channels = (codec_data[1] & 0x78) >> 3; GST_DEBUG_OBJECT (mux, "Rate index %u, channels %u, object type %u", rate_idx, channels, obj_type); /* Sync point over a full byte */ adts_header[0] = 0xFF; /* Sync point continued over first 4 bits + static 4 bits * (ID, layer, protection)*/ adts_header[1] = 0xF1; /* Object type over first 2 bits */ adts_header[2] = obj_type << 6; /* rate index over next 4 bits */ adts_header[2] |= (rate_idx << 2); /* channels over last 2 bits */ adts_header[2] |= (channels & 0x4) >> 2; /* channels continued over next 2 bits + 4 bits at zero */ adts_header[3] = (channels & 0x3) << 6; /* frame size over last 2 bits */ adts_header[3] |= (gst_buffer_get_size (out_buf) & 0x1800) >> 11; /* frame size continued over full byte */ adts_header[4] = (out_size & 0x1FF8) >> 3; /* frame size continued first 3 bits */ adts_header[5] = (out_size & 0x7) << 5; /* buffer fullness (0x7FF for VBR) over 5 last bits */ adts_header[5] |= 0x1F; /* buffer fullness (0x7FF for VBR) continued over 6 first bits + 2 zeros for * number of raw data blocks */ adts_header[6] = 0xFC; /* Prepend ADTS header */ mem = gst_memory_new_wrapped (0, adts_header, 7, 0, 7, adts_header, g_free); gst_buffer_prepend_memory (out_buf, mem); return out_buf; }
static guint8 buffer_get_byte (GstBuffer * buffer, gint offset) { guint8 res; gst_buffer_extract (buffer, offset, &res, 1); return res; }
static void copyGstreamerBuffersToAudioChannel(GstBufferList* buffers, AudioChannel* audioChannel) { float* destination = audioChannel->mutableData(); unsigned bufferCount = gst_buffer_list_length(buffers); for (unsigned i = 0; i < bufferCount; ++i) { GstBuffer* buffer = gst_buffer_list_get(buffers, i); ASSERT(buffer); gsize bufferSize = gst_buffer_get_size(buffer); gst_buffer_extract(buffer, 0, destination, bufferSize); destination += bufferSize / sizeof(float); } }
static gboolean is_keyframe (GstBuffer * buf) { gsize size; guint8 data[1]; size = gst_buffer_get_size (buf); if (size == 0) return FALSE; gst_buffer_extract (buf, 0, data, 1); return ((data[0] & 0x40) == 0); }
static gchar * gst_vqe_buf_to_utf8_sdp (GstBuffer * buf) { gsize size = gst_buffer_get_size(buf); gchar *sdp = g_malloc(size + 1); sdp[size] = '\0'; gst_buffer_extract(buf, 0, sdp, size); if (!g_utf8_validate (sdp, size, NULL)) goto validate_error; return sdp; validate_error: g_free(sdp); return NULL; }
void GStreamerReader::CopyIntoImageBuffer(GstBuffer* aBuffer, GstBuffer** aOutBuffer, nsRefPtr<PlanarYCbCrImage> &image) { *aOutBuffer = gst_buffer_new_allocate(mAllocator, gst_buffer_get_size(aBuffer), nullptr); GstMemory *mem = gst_buffer_peek_memory(*aOutBuffer, 0); GstMapInfo map_info; gst_memory_map(mem, &map_info, GST_MAP_WRITE); gst_buffer_extract(aBuffer, 0, map_info.data, gst_buffer_get_size(aBuffer)); gst_memory_unmap(mem, &map_info); /* create a new gst buffer with the newly created memory and copy the * metadata over from the incoming buffer */ gst_buffer_copy_into(*aOutBuffer, aBuffer, (GstBufferCopyFlags)(GST_BUFFER_COPY_METADATA), 0, -1); image = GetImageFromBuffer(*aOutBuffer); }
static int gst_ffmpegdata_peek (URLContext * h, unsigned char *buf, int size) { GstProtocolInfo *info; GstBuffer *inbuf = NULL; GstFlowReturn ret; int total = 0; g_return_val_if_fail (h->flags == URL_RDONLY, AVERROR (EIO)); info = (GstProtocolInfo *) h->priv_data; GST_DEBUG ("Pulling %d bytes at position %" G_GUINT64_FORMAT, size, info->offset); ret = gst_pad_pull_range (info->pad, info->offset, (guint) size, &inbuf); switch (ret) { case GST_FLOW_OK: total = (gint) gst_buffer_get_size (inbuf); gst_buffer_extract (inbuf, 0, buf, total); gst_buffer_unref (inbuf); break; case GST_FLOW_EOS: total = 0; break; case GST_FLOW_FLUSHING: total = -1; break; default: case GST_FLOW_ERROR: total = -2; break; } GST_DEBUG ("Got %d (%s) return result %d", ret, gst_flow_get_name (ret), total); return total; }
GstFlowReturn GStreamerImageStream::on_new_sample(GstAppSink *appsink, GStreamerImageStream *user_data) { // get the buffer from appsink GstSample *sample = gst_app_sink_pull_sample(appsink); GstBuffer *buffer = gst_sample_get_buffer(sample); // upload data GstMapInfo info; gst_buffer_map(buffer, &info, GST_MAP_READ); gst_buffer_extract(buffer, 0, user_data->_internal_buffer, info.size); // data has been modified so dirty the image so the texture will updated user_data->dirty(); // clean resources gst_buffer_unmap(buffer, &info); gst_sample_unref(sample); return GST_FLOW_OK; }
static GstFlowReturn output_achain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstClockTime timestamp; guint8 b; gboolean audio_jitter = audio_nondiscont || audio_drift || early_video; timestamp = GST_BUFFER_TIMESTAMP (buffer); if (!audio_jitter) fail_unless_equals_int64 (timestamp, (audio_buffer_count % n_abuffers) * 1 * GST_SECOND); timestamp = gst_segment_to_stream_time (¤t_audio_segment, GST_FORMAT_TIME, timestamp); if (!audio_jitter) fail_unless_equals_int64 (timestamp, (audio_buffer_count % n_abuffers) * 1 * GST_SECOND); timestamp = GST_BUFFER_TIMESTAMP (buffer); timestamp = gst_segment_to_running_time (¤t_audio_segment, GST_FORMAT_TIME, timestamp); if (!audio_jitter) fail_unless_equals_int64 (timestamp, audio_buffer_count * 1 * GST_SECOND); gst_buffer_extract (buffer, 0, &b, 1); if (per_channel) { fail_unless_equals_int (b, fill_value_per_channel[0]); } else { fail_unless_equals_int (b, fill_value); } audio_buffer_count++; gst_buffer_unref (buffer); return GST_FLOW_OK; }
static GstFlowReturn gst_ogg_avi_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstFlowReturn result = GST_FLOW_OK; GstOggAviParse *ogg; guint size; gchar *oggbuf; gint ret = -1; ogg = GST_OGG_AVI_PARSE (parent); size = gst_buffer_get_size (buffer); GST_LOG_OBJECT (ogg, "Chain function received buffer of size %d", size); if (GST_BUFFER_IS_DISCONT (buffer)) { ogg_sync_reset (&ogg->sync); ogg->discont = TRUE; } /* write data to sync layer */ oggbuf = ogg_sync_buffer (&ogg->sync, size); gst_buffer_extract (buffer, 0, oggbuf, size); ogg_sync_wrote (&ogg->sync, size); gst_buffer_unref (buffer); /* try to get as many packets out of the stream as possible */ do { ogg_page page; /* try to swap out a page */ ret = ogg_sync_pageout (&ogg->sync, &page); if (ret == 0) { GST_DEBUG_OBJECT (ogg, "need more data"); break; } else if (ret == -1) { GST_DEBUG_OBJECT (ogg, "discont in pages"); ogg->discont = TRUE; } else { /* new unknown stream, init the ogg stream with the serial number of the * page. */ if (ogg->serial == -1) { ogg->serial = ogg_page_serialno (&page); ogg_stream_init (&ogg->stream, ogg->serial); } /* submit page */ if (ogg_stream_pagein (&ogg->stream, &page) != 0) { GST_WARNING_OBJECT (ogg, "ogg stream choked on page resetting stream"); ogg_sync_reset (&ogg->sync); ogg->discont = TRUE; continue; } /* try to get as many packets as possible out of the page */ do { ogg_packet packet; ret = ogg_stream_packetout (&ogg->stream, &packet); GST_LOG_OBJECT (ogg, "packetout gave %d", ret); switch (ret) { case 0: break; case -1: /* out of sync, We mark a DISCONT. */ ogg->discont = TRUE; break; case 1: result = gst_ogg_avi_parse_push_packet (ogg, &packet); if (result != GST_FLOW_OK) goto done; break; default: GST_WARNING_OBJECT (ogg, "invalid return value %d for ogg_stream_packetout, resetting stream", ret); break; } } while (ret != 0); } } while (ret != 0); done: return result; }
static GstFlowReturn gst_dvd_spu_subpic_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstDVDSpu *dvdspu = (GstDVDSpu *) parent; GstFlowReturn ret = GST_FLOW_OK; gsize size; g_return_val_if_fail (dvdspu != NULL, GST_FLOW_ERROR); GST_INFO_OBJECT (dvdspu, "Have subpicture buffer with timestamp %" GST_TIME_FORMAT " and size %" G_GSIZE_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), gst_buffer_get_size (buf)); DVD_SPU_LOCK (dvdspu); if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) { dvdspu->subp_seg.position = GST_BUFFER_TIMESTAMP (buf); } if (GST_BUFFER_IS_DISCONT (buf) && dvdspu->partial_spu) { gst_buffer_unref (dvdspu->partial_spu); dvdspu->partial_spu = NULL; } if (dvdspu->partial_spu != NULL) { if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) GST_WARNING_OBJECT (dvdspu, "Joining subpicture buffer with timestamp to previous"); dvdspu->partial_spu = gst_buffer_append (dvdspu->partial_spu, buf); } else { /* If we don't yet have a buffer, wait for one with a timestamp, * since that will avoid collecting the 2nd half of a partial buf */ if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) dvdspu->partial_spu = buf; else gst_buffer_unref (buf); } if (dvdspu->partial_spu == NULL) goto done; size = gst_buffer_get_size (dvdspu->partial_spu); switch (dvdspu->spu_input_type) { case SPU_INPUT_TYPE_VOBSUB: if (size >= 2) { guint8 header[2]; guint16 packet_size; gst_buffer_extract (dvdspu->partial_spu, 0, header, 2); packet_size = GST_READ_UINT16_BE (header); if (packet_size == size) { submit_new_spu_packet (dvdspu, dvdspu->partial_spu); dvdspu->partial_spu = NULL; } else if (packet_size == 0) { GST_LOG_OBJECT (dvdspu, "Discarding empty SPU buffer"); gst_buffer_unref (dvdspu->partial_spu); dvdspu->partial_spu = NULL; } else if (packet_size < size) { /* Somehow we collected too much - something is wrong. Drop the * packet entirely and wait for a new one */ GST_DEBUG_OBJECT (dvdspu, "Discarding invalid SPU buffer of size %" G_GSIZE_FORMAT, size); gst_buffer_unref (dvdspu->partial_spu); dvdspu->partial_spu = NULL; } else { GST_LOG_OBJECT (dvdspu, "SPU buffer claims to be of size %u. Collected %" G_GSIZE_FORMAT " so far.", packet_size, size); } } break; case SPU_INPUT_TYPE_PGS:{ /* Collect until we have a command buffer that ends exactly at the size * we've collected */ guint8 packet_type; guint16 packet_size; GstMapInfo map; guint8 *ptr, *end; gboolean invalid = FALSE; gst_buffer_map (dvdspu->partial_spu, &map, GST_MAP_READ); ptr = map.data; end = ptr + map.size; /* FIXME: There's no need to walk the command set each time. We can set a * marker and resume where we left off next time */ /* FIXME: Move the packet parsing and sanity checking into the format-specific modules */ while (ptr != end) { if (ptr + 3 > end) break; packet_type = *ptr++; packet_size = GST_READ_UINT16_BE (ptr); ptr += 2; if (ptr + packet_size > end) break; ptr += packet_size; /* 0x80 is the END command for PGS packets */ if (packet_type == 0x80 && ptr != end) { /* Extra cruft on the end of the packet -> assume invalid */ invalid = TRUE; break; } } gst_buffer_unmap (dvdspu->partial_spu, &map); if (invalid) { gst_buffer_unref (dvdspu->partial_spu); dvdspu->partial_spu = NULL; } else if (ptr == end) { GST_DEBUG_OBJECT (dvdspu, "Have complete PGS packet of size %" G_GSIZE_FORMAT ". Enqueueing.", map.size); submit_new_spu_packet (dvdspu, dvdspu->partial_spu); dvdspu->partial_spu = NULL; } break; } default: GST_ERROR_OBJECT (dvdspu, "Input type not configured before SPU passing"); goto caps_not_set; } done: DVD_SPU_UNLOCK (dvdspu); return ret; /* ERRORS */ caps_not_set: { GST_ELEMENT_ERROR (dvdspu, RESOURCE, NO_SPACE_LEFT, (_("Subpicture format was not configured before data flow")), (NULL)); ret = GST_FLOW_ERROR; goto done; } }
static gboolean gst_amc_audio_dec_set_format (GstAudioDecoder * decoder, GstCaps * caps) { GstAmcAudioDec *self; GstStructure *s; GstAmcFormat *format; const gchar *mime; gboolean is_format_change = FALSE; gboolean needs_disable = FALSE; gchar *format_string; gint rate, channels; GError *err = NULL; self = GST_AMC_AUDIO_DEC (decoder); GST_DEBUG_OBJECT (self, "Setting new caps %" GST_PTR_FORMAT, caps); /* Check if the caps change is a real format change or if only irrelevant * parts of the caps have changed or nothing at all. */ is_format_change |= (!self->input_caps || !gst_caps_is_equal (self->input_caps, caps)); needs_disable = self->started; /* If the component is not started and a real format change happens * we have to restart the component. If no real format change * happened we can just exit here. */ if (needs_disable && !is_format_change) { /* Framerate or something minor changed */ self->input_caps_changed = TRUE; GST_DEBUG_OBJECT (self, "Already running and caps did not change the format"); return TRUE; } if (needs_disable && is_format_change) { gst_amc_audio_dec_drain (self); GST_AUDIO_DECODER_STREAM_UNLOCK (self); gst_amc_audio_dec_stop (GST_AUDIO_DECODER (self)); GST_AUDIO_DECODER_STREAM_LOCK (self); gst_amc_audio_dec_close (GST_AUDIO_DECODER (self)); if (!gst_amc_audio_dec_open (GST_AUDIO_DECODER (self))) { GST_ERROR_OBJECT (self, "Failed to open codec again"); return FALSE; } if (!gst_amc_audio_dec_start (GST_AUDIO_DECODER (self))) { GST_ERROR_OBJECT (self, "Failed to start codec again"); } } /* srcpad task is not running at this point */ mime = caps_to_mime (caps); if (!mime) { GST_ERROR_OBJECT (self, "Failed to convert caps to mime"); return FALSE; } s = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (s, "rate", &rate) || !gst_structure_get_int (s, "channels", &channels)) { GST_ERROR_OBJECT (self, "Failed to get rate/channels"); return FALSE; } format = gst_amc_format_new_audio (mime, rate, channels, &err); if (!format) { GST_ELEMENT_ERROR_FROM_ERROR (self, err); return FALSE; } /* FIXME: These buffers needs to be valid until the codec is stopped again */ g_list_foreach (self->codec_datas, (GFunc) gst_buffer_unref, NULL); g_list_free (self->codec_datas); self->codec_datas = NULL; if (gst_structure_has_field (s, "codec_data")) { const GValue *h = gst_structure_get_value (s, "codec_data"); GstBuffer *codec_data = gst_value_get_buffer (h); GstMapInfo minfo; guint8 *data; gst_buffer_map (codec_data, &minfo, GST_MAP_READ); data = g_memdup (minfo.data, minfo.size); self->codec_datas = g_list_prepend (self->codec_datas, data); gst_amc_format_set_buffer (format, "csd-0", data, minfo.size, &err); if (err) GST_ELEMENT_WARNING_FROM_ERROR (self, err); gst_buffer_unmap (codec_data, &minfo); } else if (gst_structure_has_field (s, "streamheader")) { const GValue *sh = gst_structure_get_value (s, "streamheader"); gint nsheaders = gst_value_array_get_size (sh); GstBuffer *buf; const GValue *h; gint i, j; gchar *fname; GstMapInfo minfo; guint8 *data; for (i = 0, j = 0; i < nsheaders; i++) { h = gst_value_array_get_value (sh, i); buf = gst_value_get_buffer (h); if (strcmp (mime, "audio/vorbis") == 0) { guint8 header_type; gst_buffer_extract (buf, 0, &header_type, 1); /* Only use the identification and setup packets */ if (header_type != 0x01 && header_type != 0x05) continue; } fname = g_strdup_printf ("csd-%d", j); gst_buffer_map (buf, &minfo, GST_MAP_READ); data = g_memdup (minfo.data, minfo.size); self->codec_datas = g_list_prepend (self->codec_datas, data); gst_amc_format_set_buffer (format, fname, data, minfo.size, &err); if (err) GST_ELEMENT_WARNING_FROM_ERROR (self, err); gst_buffer_unmap (buf, &minfo); g_free (fname); j++; } } format_string = gst_amc_format_to_string (format, &err); if (err) GST_ELEMENT_WARNING_FROM_ERROR (self, err); GST_DEBUG_OBJECT (self, "Configuring codec with format: %s", GST_STR_NULL (format_string)); g_free (format_string); if (!gst_amc_codec_configure (self->codec, format, 0, &err)) { GST_ERROR_OBJECT (self, "Failed to configure codec"); GST_ELEMENT_ERROR_FROM_ERROR (self, err); return FALSE; } gst_amc_format_free (format); if (!gst_amc_codec_start (self->codec, &err)) { GST_ERROR_OBJECT (self, "Failed to start codec"); GST_ELEMENT_ERROR_FROM_ERROR (self, err); return FALSE; } self->spf = -1; /* TODO: Implement for other codecs too */ if (gst_structure_has_name (s, "audio/mpeg")) { gint mpegversion = -1; gst_structure_get_int (s, "mpegversion", &mpegversion); if (mpegversion == 1) { gint layer = -1, mpegaudioversion = -1; gst_structure_get_int (s, "layer", &layer); gst_structure_get_int (s, "mpegaudioversion", &mpegaudioversion); if (layer == 1) self->spf = 384; else if (layer == 2) self->spf = 1152; else if (layer == 3 && mpegaudioversion != -1) self->spf = (mpegaudioversion == 1 ? 1152 : 576); } } self->started = TRUE; self->input_caps_changed = TRUE; /* Start the srcpad loop again */ self->flushing = FALSE; self->downstream_flow_ret = GST_FLOW_OK; gst_pad_start_task (GST_AUDIO_DECODER_SRC_PAD (self), (GstTaskFunction) gst_amc_audio_dec_loop, decoder, NULL); return TRUE; }
GstFlowReturn gst_kate_util_decoder_base_chain_kate_packet (GstKateDecoderBase * decoder, GstElement * element, GstPad * pad, GstBuffer * buf, GstPad * srcpad, GstPad * tagpad, GstCaps ** src_caps, const kate_event ** ev) { kate_packet kp; int ret; GstFlowReturn rflow = GST_FLOW_OK; gboolean is_header; guint8 *data; gsize size; guint8 header[1]; size = gst_buffer_extract (buf, 0, header, 1); GST_DEBUG_OBJECT (element, "got kate packet, %u bytes, type %02x", gst_buffer_get_size (buf), size == 0 ? -1 : header[0]); is_header = size > 0 && (header[0] & 0x80); if (!is_header && decoder->tags) { /* after we've processed headers, send any tags before processing the data packet */ GST_DEBUG_OBJECT (element, "Not a header, sending tags for pad %s:%s", GST_DEBUG_PAD_NAME (tagpad)); gst_element_found_tags_for_pad (element, tagpad, decoder->tags); decoder->tags = NULL; } data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ); kate_packet_wrap (&kp, size, data); ret = kate_high_decode_packetin (&decoder->k, &kp, ev); gst_buffer_unmap (buf, data, size); if (G_UNLIKELY (ret < 0)) { GST_ELEMENT_ERROR (element, STREAM, DECODE, (NULL), ("Failed to decode Kate packet: %s", gst_kate_util_get_error_message (ret))); return GST_FLOW_ERROR; } if (G_UNLIKELY (ret > 0)) { GST_DEBUG_OBJECT (element, "kate_high_decode_packetin has received EOS packet"); } /* headers may be interesting to retrieve information from */ if (G_UNLIKELY (is_header)) { switch (header[0]) { case 0x80: /* ID header */ GST_INFO_OBJECT (element, "Parsed ID header: language %s, category %s", decoder->k.ki->language, decoder->k.ki->category); if (src_caps) { if (*src_caps) { gst_caps_unref (*src_caps); *src_caps = NULL; } if (strcmp (decoder->k.ki->category, "K-SPU") == 0 || strcmp (decoder->k.ki->category, "spu-subtitles") == 0) { *src_caps = gst_caps_new_empty_simple ("video/x-dvd-subpicture"); } else if (decoder->k.ki->text_markup_type == kate_markup_none) { *src_caps = gst_caps_new_empty_simple ("text/plain"); } else { *src_caps = gst_caps_new_empty_simple ("text/x-pango-markup"); } GST_INFO_OBJECT (srcpad, "Setting caps: %" GST_PTR_FORMAT, *src_caps); if (!gst_pad_set_caps (srcpad, *src_caps)) { GST_ERROR_OBJECT (srcpad, "Failed to set caps %" GST_PTR_FORMAT, *src_caps); } } if (decoder->k.ki->language && *decoder->k.ki->language) { GstTagList *old = decoder->tags, *tags = gst_tag_list_new_empty (); if (tags) { gchar *lang_code; /* en_GB -> en */ lang_code = g_ascii_strdown (decoder->k.ki->language, -1); g_strdelimit (lang_code, NULL, '\0'); gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_LANGUAGE_CODE, lang_code, NULL); g_free (lang_code); /* TODO: category - where should it go ? */ decoder->tags = gst_tag_list_merge (decoder->tags, tags, GST_TAG_MERGE_REPLACE); gst_tag_list_free (tags); if (old) gst_tag_list_free (old); } } /* update properties */ if (decoder->language) g_free (decoder->language); decoder->language = g_strdup (decoder->k.ki->language); if (decoder->category) g_free (decoder->category); decoder->category = g_strdup (decoder->k.ki->category); decoder->original_canvas_width = decoder->k.ki->original_canvas_width; decoder->original_canvas_height = decoder->k.ki->original_canvas_height; /* we can now send away any event we've delayed, as the src pad now has caps */ gst_kate_util_decoder_base_drain_event_queue (decoder); break; case 0x81: /* Vorbis comments header */ GST_INFO_OBJECT (element, "Parsed comments header"); { gchar *encoder = NULL; GstTagList *old = decoder->tags, *list = gst_tag_list_from_vorbiscomment_buffer (buf, (const guint8 *) "\201kate\0\0\0\0", 9, &encoder); if (list) { decoder->tags = gst_tag_list_merge (decoder->tags, list, GST_TAG_MERGE_REPLACE); gst_tag_list_free (list); } if (!decoder->tags) { GST_ERROR_OBJECT (element, "failed to decode comment header"); decoder->tags = gst_tag_list_new_empty (); } if (encoder) { gst_tag_list_add (decoder->tags, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER, encoder, NULL); g_free (encoder); } gst_tag_list_add (decoder->tags, GST_TAG_MERGE_REPLACE, GST_TAG_SUBTITLE_CODEC, "Kate", NULL); gst_tag_list_add (decoder->tags, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER_VERSION, decoder->k.ki->bitstream_version_major, NULL); if (old) gst_tag_list_free (old); if (decoder->initialized) { gst_element_found_tags_for_pad (element, tagpad, decoder->tags); decoder->tags = NULL; } else { /* Only push them as messages for the time being. * * They will be pushed on the pad once the decoder is initialized */ gst_element_post_message (element, gst_message_new_tag (GST_OBJECT (element), gst_tag_list_copy (decoder->tags))); } } break; default: break; } } #if ((KATE_VERSION_MAJOR<<16)|(KATE_VERSION_MINOR<<8)|KATE_VERSION_PATCH) >= 0x000400 else if (*ev && (*ev)->meta) { int count = kate_meta_query_count ((*ev)->meta); if (count > 0) { GstTagList *evtags = gst_tag_list_new_empty (); int idx; GST_DEBUG_OBJECT (decoder, "Kate event has %d attached metadata", count); for (idx = 0; idx < count; ++idx) { const char *tag, *value; size_t len; if (kate_meta_query ((*ev)->meta, idx, &tag, &value, &len) < 0) { GST_WARNING_OBJECT (decoder, "Failed to retrieve metadata %d", idx); } else { if (gst_kate_util_is_utf8_string (value, len)) { gchar *compound = g_strdup_printf ("%s=%s", tag, value); GST_DEBUG_OBJECT (decoder, "Metadata %d: %s=%s (%zu bytes)", idx, tag, value, len); gst_tag_list_add (evtags, GST_TAG_MERGE_APPEND, GST_TAG_EXTENDED_COMMENT, compound, NULL); g_free (compound); } else { GST_INFO_OBJECT (decoder, "Metadata %d, (%s, %zu bytes) is binary, ignored", idx, tag, len); } } } if (gst_tag_list_is_empty (evtags)) gst_tag_list_free (evtags); else gst_element_found_tags_for_pad (element, tagpad, evtags); } } #endif return rflow; }
static GstFlowReturn gst_dtsdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; GstDtsDec *dts = GST_DTSDEC (parent); gint first_access; if (dts->dvdmode) { guint8 data[2]; gsize size; gint offset, len; GstBuffer *subbuf; size = gst_buffer_get_size (buf); if (size < 2) goto not_enough_data; gst_buffer_extract (buf, 0, data, 2); first_access = (data[0] << 8) | data[1]; /* Skip the first_access header */ offset = 2; if (first_access > 1) { /* Length of data before first_access */ len = first_access - 1; if (len <= 0 || offset + len > size) goto bad_first_access_parameter; subbuf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, len); GST_BUFFER_TIMESTAMP (subbuf) = GST_CLOCK_TIME_NONE; ret = dts->base_chain (pad, parent, subbuf); if (ret != GST_FLOW_OK) { gst_buffer_unref (buf); goto done; } offset += len; len = size - offset; if (len > 0) { subbuf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, len); GST_BUFFER_TIMESTAMP (subbuf) = GST_BUFFER_TIMESTAMP (buf); ret = dts->base_chain (pad, parent, subbuf); } gst_buffer_unref (buf); } else { /* first_access = 0 or 1, so if there's a timestamp it applies to the first byte */ subbuf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, size - offset); GST_BUFFER_TIMESTAMP (subbuf) = GST_BUFFER_TIMESTAMP (buf); ret = dts->base_chain (pad, parent, subbuf); gst_buffer_unref (buf); } } else { ret = dts->base_chain (pad, parent, buf); } done: return ret; /* ERRORS */ not_enough_data: { GST_ELEMENT_ERROR (GST_ELEMENT (dts), STREAM, DECODE, (NULL), ("Insufficient data in buffer. Can't determine first_acess")); gst_buffer_unref (buf); return GST_FLOW_ERROR; } bad_first_access_parameter: { GST_ELEMENT_ERROR (GST_ELEMENT (dts), STREAM, DECODE, (NULL), ("Bad first_access parameter (%d) in buffer", first_access)); gst_buffer_unref (buf); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_flac_tag_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstFlacTag *tag; GstFlowReturn ret; GstMapInfo map; gsize size; ret = GST_FLOW_OK; tag = GST_FLAC_TAG (parent); gst_adapter_push (tag->adapter, buffer); /* Initial state, we don't even know if we are dealing with a flac file */ if (tag->state == GST_FLAC_TAG_STATE_INIT) { GstBuffer *id_buffer; if (gst_adapter_available (tag->adapter) < sizeof (FLAC_MAGIC)) goto cleanup; id_buffer = gst_adapter_take_buffer (tag->adapter, FLAC_MAGIC_SIZE); GST_DEBUG_OBJECT (tag, "looking for " FLAC_MAGIC " identifier"); if (gst_buffer_memcmp (id_buffer, 0, FLAC_MAGIC, FLAC_MAGIC_SIZE) == 0) { GST_DEBUG_OBJECT (tag, "pushing " FLAC_MAGIC " identifier buffer"); ret = gst_pad_push (tag->srcpad, id_buffer); if (ret != GST_FLOW_OK) goto cleanup; tag->state = GST_FLAC_TAG_STATE_METADATA_BLOCKS; } else { /* FIXME: does that work well with FLAC files containing ID3v2 tags ? */ gst_buffer_unref (id_buffer); GST_ELEMENT_ERROR (tag, STREAM, WRONG_TYPE, (NULL), (NULL)); ret = GST_FLOW_ERROR; } } /* The fLaC magic string has been skipped, try to detect the beginning * of a metadata block */ if (tag->state == GST_FLAC_TAG_STATE_METADATA_BLOCKS) { guint type; gboolean is_last; const guint8 *block_header; g_assert (tag->metadata_block_size == 0); g_assert (tag->metadata_last_block == FALSE); /* The header of a flac metadata block is 4 bytes long: * 1st bit: indicates whether this is the last metadata info block * 7 next bits: 4 if vorbis comment block * 24 next bits: size of the metadata to follow (big endian) */ if (gst_adapter_available (tag->adapter) < 4) goto cleanup; block_header = gst_adapter_map (tag->adapter, 4); is_last = ((block_header[0] & 0x80) == 0x80); type = block_header[0] & 0x7F; size = (block_header[1] << 16) | (block_header[2] << 8) | block_header[3]; gst_adapter_unmap (tag->adapter); /* The 4 bytes long header isn't included in the metadata size */ tag->metadata_block_size = size + 4; tag->metadata_last_block = is_last; GST_DEBUG_OBJECT (tag, "got metadata block: %" G_GSIZE_FORMAT " bytes, type %d, " "is vorbiscomment: %d, is last: %d", size, type, (type == 0x04), is_last); /* Metadata blocks of type 4 are vorbis comment blocks */ if (type == 0x04) { tag->state = GST_FLAC_TAG_STATE_VC_METADATA_BLOCK; } else { tag->state = GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK; } } /* Reads a metadata block */ if ((tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) || (tag->state == GST_FLAC_TAG_STATE_VC_METADATA_BLOCK)) { GstBuffer *metadata_buffer; if (gst_adapter_available (tag->adapter) < tag->metadata_block_size) goto cleanup; metadata_buffer = gst_adapter_take_buffer (tag->adapter, tag->metadata_block_size); /* clear the is-last flag, as the last metadata block will * be the vorbis comment block which we will build ourselves. */ gst_buffer_map (metadata_buffer, &map, GST_MAP_READWRITE); map.data[0] &= (~0x80); gst_buffer_unmap (metadata_buffer, &map); if (tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) { GST_DEBUG_OBJECT (tag, "pushing metadata block buffer"); ret = gst_pad_push (tag->srcpad, metadata_buffer); if (ret != GST_FLOW_OK) goto cleanup; } else { tag->vorbiscomment = metadata_buffer; } tag->metadata_block_size = 0; tag->state = GST_FLAC_TAG_STATE_METADATA_NEXT_BLOCK; } /* This state is mainly used to be able to stop as soon as we read * a vorbiscomment block from the flac file if we are in an only output * tags mode */ if (tag->state == GST_FLAC_TAG_STATE_METADATA_NEXT_BLOCK) { /* Check if in the previous iteration we read a vorbis comment metadata * block, and stop now if the user only wants to read tags */ if (tag->vorbiscomment != NULL) { guint8 id_data[4]; /* We found some tags, try to parse them and notify the other elements * that we encountered some tags */ GST_DEBUG_OBJECT (tag, "emitting vorbiscomment tags"); gst_buffer_extract (tag->vorbiscomment, 0, id_data, 4); tag->tags = gst_tag_list_from_vorbiscomment_buffer (tag->vorbiscomment, id_data, 4, NULL); if (tag->tags != NULL) { gst_pad_push_event (tag->srcpad, gst_event_new_tag (gst_tag_list_copy (tag->tags))); } gst_buffer_unref (tag->vorbiscomment); tag->vorbiscomment = NULL; } /* Skip to next state */ if (tag->metadata_last_block == FALSE) { tag->state = GST_FLAC_TAG_STATE_METADATA_BLOCKS; } else { tag->state = GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT; } } /* Creates a vorbis comment block from the metadata which was set * on the gstreamer element, and add it to the flac stream */ if (tag->state == GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT) { GstBuffer *buffer; const GstTagList *user_tags; GstTagList *merged_tags; /* merge the tag lists */ user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (tag)); if (user_tags != NULL) { merged_tags = gst_tag_list_merge (user_tags, tag->tags, gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (tag))); } else { merged_tags = gst_tag_list_copy (tag->tags); } if (merged_tags == NULL) { /* If we get a NULL list of tags, we must generate a padding block * which is marked as the last metadata block, otherwise we'll * end up with a corrupted flac file. */ GST_WARNING_OBJECT (tag, "No tags found"); buffer = gst_buffer_new_and_alloc (12); if (buffer == NULL) goto no_buffer; gst_buffer_map (buffer, &map, GST_MAP_WRITE); memset (map.data, 0, map.size); map.data[0] = 0x81; /* 0x80 = Last metadata block, * 0x01 = padding block */ gst_buffer_unmap (buffer, &map); } else { guchar header[4]; guint8 fbit[1]; memset (header, 0, sizeof (header)); header[0] = 0x84; /* 0x80 = Last metadata block, * 0x04 = vorbiscomment block */ buffer = gst_tag_list_to_vorbiscomment_buffer (merged_tags, header, sizeof (header), NULL); GST_DEBUG_OBJECT (tag, "Writing tags %" GST_PTR_FORMAT, merged_tags); gst_tag_list_free (merged_tags); if (buffer == NULL) goto no_comment; size = gst_buffer_get_size (buffer); if ((size < 4) || ((size - 4) > 0xFFFFFF)) goto comment_too_long; fbit[0] = 1; /* Get rid of the framing bit at the end of the vorbiscomment buffer * if it exists since libFLAC seems to lose sync because of this * bit in gstflacdec */ if (gst_buffer_memcmp (buffer, size - 1, fbit, 1) == 0) { buffer = gst_buffer_make_writable (buffer); gst_buffer_resize (buffer, 0, size - 1); } } /* The 4 byte metadata block header isn't accounted for in the total * size of the metadata block */ gst_buffer_map (buffer, &map, GST_MAP_WRITE); map.data[1] = (((map.size - 4) & 0xFF0000) >> 16); map.data[2] = (((map.size - 4) & 0x00FF00) >> 8); map.data[3] = ((map.size - 4) & 0x0000FF); gst_buffer_unmap (buffer, &map); GST_DEBUG_OBJECT (tag, "pushing %" G_GSIZE_FORMAT " byte vorbiscomment " "buffer", map.size); ret = gst_pad_push (tag->srcpad, buffer); if (ret != GST_FLOW_OK) { goto cleanup; } tag->state = GST_FLAC_TAG_STATE_AUDIO_DATA; }
static GstFlowReturn gst_shm_sink_render (GstBaseSink * bsink, GstBuffer * buf) { GstShmSink *self = GST_SHM_SINK (bsink); int rv = 0; GstMapInfo map; gboolean need_new_memory = FALSE; GstFlowReturn ret = GST_FLOW_OK; GstMemory *memory = NULL; GstBuffer *sendbuf = NULL; GST_OBJECT_LOCK (self); while (self->wait_for_connection && !self->clients) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) goto flushing; } while (!gst_shm_sink_can_render (self, GST_BUFFER_TIMESTAMP (buf))) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) goto flushing; } if (gst_buffer_n_memory (buf) > 1) { GST_LOG_OBJECT (self, "Buffer %p has %d GstMemory, we only support a single" " one, need to do a memcpy", buf, gst_buffer_n_memory (buf)); need_new_memory = TRUE; } else { memory = gst_buffer_peek_memory (buf, 0); if (memory->allocator != GST_ALLOCATOR (self->allocator)) { need_new_memory = TRUE; GST_LOG_OBJECT (self, "Memory in buffer %p was not allocated by " "%" GST_PTR_FORMAT ", will memcpy", buf, memory->allocator); } } if (need_new_memory) { if (gst_buffer_get_size (buf) > sp_writer_get_max_buf_size (self->pipe)) { gsize area_size = sp_writer_get_max_buf_size (self->pipe); GST_OBJECT_UNLOCK (self); GST_ELEMENT_ERROR (self, RESOURCE, NO_SPACE_LEFT, ("Shared memory area is too small"), ("Shared memory area of size %" G_GSIZE_FORMAT " is smaller than" "buffer of size %" G_GSIZE_FORMAT, area_size, gst_buffer_get_size (buf))); return GST_FLOW_ERROR; } while ((memory = gst_shm_sink_allocator_alloc_locked (self->allocator, gst_buffer_get_size (buf), &self->params)) == NULL) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) goto flushing; } while (self->wait_for_connection && !self->clients) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { gst_memory_unref (memory); GST_OBJECT_UNLOCK (self); return GST_FLOW_FLUSHING; } } gst_memory_map (memory, &map, GST_MAP_WRITE); gst_buffer_extract (buf, 0, map.data, map.size); gst_memory_unmap (memory, &map); sendbuf = gst_buffer_new (); gst_buffer_copy_into (sendbuf, buf, GST_BUFFER_COPY_METADATA, 0, -1); gst_buffer_append_memory (sendbuf, memory); } else { sendbuf = gst_buffer_ref (buf); } gst_buffer_map (sendbuf, &map, GST_MAP_READ); /* Make the memory readonly as of now as we've sent it to the other side * We know it's not mapped for writing anywhere as we just mapped it for * reading */ rv = sp_writer_send_buf (self->pipe, (char *) map.data, map.size, sendbuf); gst_buffer_unmap (sendbuf, &map); GST_OBJECT_UNLOCK (self); if (rv == 0) { GST_DEBUG_OBJECT (self, "No clients connected, unreffing buffer"); gst_buffer_unref (sendbuf); } else if (rv == -1) { GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Invalid allocated buffer"), ("The shmpipe library rejects our buffer, this is a bug")); ret = GST_FLOW_ERROR; } /* If we allocated our own memory, then unmap it */ return ret; flushing: GST_OBJECT_UNLOCK (self); return GST_FLOW_FLUSHING; }
static GstFlowReturn gst_shm_sink_render (GstBaseSink * bsink, GstBuffer * buf) { GstShmSink *self = GST_SHM_SINK (bsink); int rv = 0; GstMapInfo map; gboolean need_new_memory = FALSE; GstFlowReturn ret = GST_FLOW_OK; GstMemory *memory = NULL; GstBuffer *sendbuf = NULL; gsize written_bytes; GST_OBJECT_LOCK (self); while (self->wait_for_connection && !self->clients) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { GST_OBJECT_UNLOCK (self); ret = gst_base_sink_wait_preroll (bsink); if (ret == GST_FLOW_OK) GST_OBJECT_LOCK (self); else return ret; } } while (!gst_shm_sink_can_render (self, GST_BUFFER_TIMESTAMP (buf))) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { GST_OBJECT_UNLOCK (self); ret = gst_base_sink_wait_preroll (bsink); if (ret == GST_FLOW_OK) GST_OBJECT_LOCK (self); else return ret; } } if (gst_buffer_n_memory (buf) > 1) { GST_LOG_OBJECT (self, "Buffer %p has %d GstMemory, we only support a single" " one, need to do a memcpy", buf, gst_buffer_n_memory (buf)); need_new_memory = TRUE; } else { memory = gst_buffer_peek_memory (buf, 0); if (memory->allocator != GST_ALLOCATOR (self->allocator)) { need_new_memory = TRUE; GST_LOG_OBJECT (self, "Memory in buffer %p was not allocated by " "%" GST_PTR_FORMAT ", will memcpy", buf, memory->allocator); } } if (need_new_memory) { if (gst_buffer_get_size (buf) > sp_writer_get_max_buf_size (self->pipe)) { gsize area_size = sp_writer_get_max_buf_size (self->pipe); GST_ELEMENT_ERROR (self, RESOURCE, NO_SPACE_LEFT, (NULL), ("Shared memory area of size %" G_GSIZE_FORMAT " is smaller than" "buffer of size %" G_GSIZE_FORMAT, area_size, gst_buffer_get_size (buf))); goto error; } while ((memory = gst_shm_sink_allocator_alloc_locked (self->allocator, gst_buffer_get_size (buf), &self->params)) == NULL) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { GST_OBJECT_UNLOCK (self); ret = gst_base_sink_wait_preroll (bsink); if (ret == GST_FLOW_OK) GST_OBJECT_LOCK (self); else return ret; } } while (self->wait_for_connection && !self->clients) { g_cond_wait (&self->cond, GST_OBJECT_GET_LOCK (self)); if (self->unlock) { GST_OBJECT_UNLOCK (self); ret = gst_base_sink_wait_preroll (bsink); if (ret == GST_FLOW_OK) { GST_OBJECT_LOCK (self); } else { gst_memory_unref (memory); return ret; } } } if (!gst_memory_map (memory, &map, GST_MAP_WRITE)) { GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), ("Failed to map memory")); goto error; } GST_DEBUG_OBJECT (self, "Copying %" G_GSIZE_FORMAT " bytes into map of size %" G_GSIZE_FORMAT " bytes.", gst_buffer_get_size (buf), map.size); written_bytes = gst_buffer_extract (buf, 0, map.data, map.size); GST_DEBUG_OBJECT (self, "Copied %" G_GSIZE_FORMAT " bytes.", written_bytes); gst_memory_unmap (memory, &map); sendbuf = gst_buffer_new (); if (!gst_buffer_copy_into (sendbuf, buf, GST_BUFFER_COPY_METADATA, 0, -1)) { GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), ("Failed to copy data into send buffer")); gst_buffer_unref (sendbuf); goto error; } gst_buffer_append_memory (sendbuf, memory); } else { sendbuf = gst_buffer_ref (buf); } if (!gst_buffer_map (sendbuf, &map, GST_MAP_READ)) { GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), ("Failed to map data into send buffer")); goto error; } /* Make the memory readonly as of now as we've sent it to the other side * We know it's not mapped for writing anywhere as we just mapped it for * reading */ rv = sp_writer_send_buf (self->pipe, (char *) map.data, map.size, sendbuf); if (rv == -1) { GST_ELEMENT_ERROR (self, STREAM, FAILED, (NULL), ("Failed to send data over SHM")); gst_buffer_unmap (sendbuf, &map); goto error; } gst_buffer_unmap (sendbuf, &map); GST_OBJECT_UNLOCK (self); if (rv == 0) { GST_DEBUG_OBJECT (self, "No clients connected, unreffing buffer"); gst_buffer_unref (sendbuf); } return ret; error: GST_OBJECT_UNLOCK (self); return GST_FLOW_ERROR; }
static GstFlowReturn gst_rtp_asf_pay_handle_packet (GstRtpAsfPay * rtpasfpay, GstBuffer * buffer) { GstRTPBasePayload *rtppay; GstAsfPacketInfo *packetinfo; guint8 flags; guint8 *data; guint32 packet_util_size; guint32 packet_offset; guint32 size_left; GstFlowReturn ret = GST_FLOW_OK; rtppay = GST_RTP_BASE_PAYLOAD (rtpasfpay); packetinfo = &rtpasfpay->packetinfo; if (!gst_asf_parse_packet (buffer, packetinfo, TRUE, rtpasfpay->asfinfo.packet_size)) { GST_ERROR_OBJECT (rtpasfpay, "Error while parsing asf packet"); gst_buffer_unref (buffer); return GST_FLOW_ERROR; } if (packetinfo->packet_size == 0) packetinfo->packet_size = rtpasfpay->asfinfo.packet_size; GST_LOG_OBJECT (rtpasfpay, "Packet size: %" G_GUINT32_FORMAT ", padding: %" G_GUINT32_FORMAT, packetinfo->packet_size, packetinfo->padding); /* update padding field to 0 */ if (packetinfo->padding > 0) { GstAsfPacketInfo info; /* find padding field offset */ guint offset = packetinfo->err_cor_len + 2 + gst_asf_get_var_size_field_len (packetinfo->packet_field_type) + gst_asf_get_var_size_field_len (packetinfo->seq_field_type); buffer = gst_buffer_make_writable (buffer); switch (packetinfo->padd_field_type) { case ASF_FIELD_TYPE_DWORD: gst_buffer_memset (buffer, offset, 0, 4); break; case ASF_FIELD_TYPE_WORD: gst_buffer_memset (buffer, offset, 0, 2); break; case ASF_FIELD_TYPE_BYTE: gst_buffer_memset (buffer, offset, 0, 1); break; case ASF_FIELD_TYPE_NONE: default: break; } gst_asf_parse_packet (buffer, &info, FALSE, 0); } if (packetinfo->padding != 0) packet_util_size = rtpasfpay->asfinfo.packet_size - packetinfo->padding; else packet_util_size = packetinfo->packet_size; packet_offset = 0; while (packet_util_size > 0) { /* Even if we don't fill completely an output buffer we * push it when we add an fragment. Because it seems that * it is not possible to determine where a asf packet * fragment ends inside a rtp packet payload. * This flag tells us to push the packet. */ gboolean force_push = FALSE; GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; /* we have no output buffer pending, create one */ if (rtpasfpay->current == NULL) { GST_LOG_OBJECT (rtpasfpay, "Creating new output buffer"); rtpasfpay->current = gst_rtp_buffer_new_allocate_len (GST_RTP_BASE_PAYLOAD_MTU (rtpasfpay), 0, 0); rtpasfpay->cur_off = 0; rtpasfpay->has_ts = FALSE; rtpasfpay->marker = FALSE; } gst_rtp_buffer_map (rtpasfpay->current, GST_MAP_READWRITE, &rtp); data = gst_rtp_buffer_get_payload (&rtp); data += rtpasfpay->cur_off; size_left = gst_rtp_buffer_get_payload_len (&rtp) - rtpasfpay->cur_off; GST_DEBUG_OBJECT (rtpasfpay, "Input buffer bytes consumed: %" G_GUINT32_FORMAT "/%" G_GSIZE_FORMAT, packet_offset, gst_buffer_get_size (buffer)); GST_DEBUG_OBJECT (rtpasfpay, "Output rtpbuffer status"); GST_DEBUG_OBJECT (rtpasfpay, "Current offset: %" G_GUINT32_FORMAT, rtpasfpay->cur_off); GST_DEBUG_OBJECT (rtpasfpay, "Size left: %" G_GUINT32_FORMAT, size_left); GST_DEBUG_OBJECT (rtpasfpay, "Has ts: %s", rtpasfpay->has_ts ? "yes" : "no"); if (rtpasfpay->has_ts) { GST_DEBUG_OBJECT (rtpasfpay, "Ts: %" G_GUINT32_FORMAT, rtpasfpay->ts); } flags = 0; if (packetinfo->has_keyframe) { flags = flags | 0x80; } flags = flags | 0x20; /* Relative timestamp is present */ if (!rtpasfpay->has_ts) { /* this is the first asf packet, its send time is the * rtp packet timestamp */ rtpasfpay->has_ts = TRUE; rtpasfpay->ts = packetinfo->send_time; } if (size_left >= packet_util_size + 8) { /* enough space for the rest of the packet */ if (packet_offset == 0) { flags = flags | 0x40; GST_WRITE_UINT24_BE (data + 1, packet_util_size); } else { GST_WRITE_UINT24_BE (data + 1, packet_offset); force_push = TRUE; } data[0] = flags; GST_WRITE_UINT32_BE (data + 4, (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts); gst_buffer_extract (buffer, packet_offset, data + 8, packet_util_size); /* updating status variables */ rtpasfpay->cur_off += 8 + packet_util_size; size_left -= packet_util_size + 8; packet_offset += packet_util_size; packet_util_size = 0; rtpasfpay->marker = TRUE; } else { /* fragment packet */ data[0] = flags; GST_WRITE_UINT24_BE (data + 1, packet_offset); GST_WRITE_UINT32_BE (data + 4, (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts); gst_buffer_extract (buffer, packet_offset, data + 8, size_left - 8); /* updating status variables */ rtpasfpay->cur_off += size_left; packet_offset += size_left - 8; packet_util_size -= size_left - 8; size_left = 0; force_push = TRUE; } /* there is not enough room for any more buffers */ if (force_push || size_left <= 8) { gst_rtp_buffer_set_ssrc (&rtp, rtppay->current_ssrc); gst_rtp_buffer_set_marker (&rtp, rtpasfpay->marker); gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_BASE_PAYLOAD_PT (rtppay)); gst_rtp_buffer_set_seq (&rtp, rtppay->seqnum + 1); gst_rtp_buffer_set_timestamp (&rtp, packetinfo->send_time); gst_rtp_buffer_unmap (&rtp); /* trim remaining bytes not used */ if (size_left != 0) { gst_buffer_set_size (rtpasfpay->current, gst_buffer_get_size (rtpasfpay->current) - size_left); } GST_BUFFER_TIMESTAMP (rtpasfpay->current) = GST_BUFFER_TIMESTAMP (buffer); rtppay->seqnum++; rtppay->timestamp = packetinfo->send_time; GST_DEBUG_OBJECT (rtpasfpay, "Pushing rtp buffer"); ret = gst_rtp_base_payload_push (rtppay, rtpasfpay->current); rtpasfpay->current = NULL; if (ret != GST_FLOW_OK) { gst_buffer_unref (buffer); return ret; } } } gst_buffer_unref (buffer); return ret; }
static GstFlowReturn gst_decklink_sink_videosink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstDecklinkSink *decklinksink; IDeckLinkMutableVideoFrame *frame; void *data; GstFlowReturn ret; const GstDecklinkMode *mode; decklinksink = GST_DECKLINK_SINK (parent); #if 0 if (!decklinksink->video_enabled) { HRESULT ret; ret = decklinksink->output->EnableVideoOutput (decklinksink->display_mode, bmdVideoOutputFlagDefault); if (ret != S_OK) { GST_WARNING ("failed to enable video output"); //return FALSE; } decklinksink->video_enabled = TRUE; } #endif mode = gst_decklink_get_mode (decklinksink->mode); decklinksink->output->CreateVideoFrame (mode->width, mode->height, mode->width * 2, decklinksink->pixel_format, bmdFrameFlagDefault, &frame); frame->GetBytes (&data); gst_buffer_extract (buffer, 0, data, gst_buffer_get_size (buffer)); gst_buffer_unref (buffer); g_mutex_lock (&decklinksink->mutex); while (decklinksink->queued_frames > 2 && !decklinksink->stop) { g_cond_wait (&decklinksink->cond, &decklinksink->mutex); } if (!decklinksink->stop) { decklinksink->queued_frames++; } g_mutex_unlock (&decklinksink->mutex); if (!decklinksink->stop) { decklinksink->output->ScheduleVideoFrame (frame, decklinksink->num_frames * mode->fps_d, mode->fps_d, mode->fps_n); decklinksink->num_frames++; if (!decklinksink->sched_started) { decklinksink->output->StartScheduledPlayback (0, mode->fps_d, 1.0); decklinksink->sched_started = TRUE; } ret = GST_FLOW_OK; } else { ret = GST_FLOW_FLUSHING; } frame->Release (); return ret; }