static GstFlowReturn gst_wavenc_write_toc (GstWavEnc * wavenc) { GList *list; GstToc *toc; GstTocEntry *entry, *subentry; GstBuffer *buf; GstMapInfo map; guint8 *data; guint32 ncues, size, cues_size, labls_size, notes_size; if (!wavenc->toc) { GST_DEBUG_OBJECT (wavenc, "have no toc, checking toc_setter"); wavenc->toc = gst_toc_setter_get_toc (GST_TOC_SETTER (wavenc)); } if (!wavenc->toc) { GST_WARNING_OBJECT (wavenc, "have no toc"); return GST_FLOW_OK; } toc = gst_toc_ref (wavenc->toc); size = 0; cues_size = 0; labls_size = 0; notes_size = 0; /* check if the TOC entries is valid */ list = gst_toc_get_entries (toc); entry = list->data; if (gst_toc_entry_is_alternative (entry)) { list = gst_toc_entry_get_sub_entries (entry); while (list) { subentry = list->data; if (!gst_toc_entry_is_sequence (subentry)) return FALSE; list = g_list_next (list); } list = gst_toc_entry_get_sub_entries (entry); } if (gst_toc_entry_is_sequence (entry)) { while (list) { entry = list->data; if (!gst_toc_entry_is_sequence (entry)) return FALSE; list = g_list_next (list); } list = gst_toc_get_entries (toc); } ncues = g_list_length (list); GST_DEBUG_OBJECT (wavenc, "number of cue entries: %d", ncues); while (list) { guint32 id = 0; gint64 id64; const gchar *uid; entry = list->data; uid = gst_toc_entry_get_uid (entry); id64 = g_ascii_strtoll (uid, NULL, 0); /* check if id unique compatible with guint32 else generate random */ if (id64 >= 0 && gst_wavenc_is_cue_id_unique (id64, wavenc->cues)) { id = (guint32) id64; } else { do { id = g_random_int (); } while (!gst_wavenc_is_cue_id_unique (id, wavenc->cues)); } gst_wavenc_parse_cue (wavenc, id, entry); gst_wavenc_parse_labl (wavenc, id, entry); gst_wavenc_parse_note (wavenc, id, entry); list = g_list_next (list); } /* count cues size */ if (wavenc->cues) { cues_size = 24 * g_list_length (wavenc->cues); size += 12 + cues_size; } else { GST_WARNING_OBJECT (wavenc, "cue's not found"); return FALSE; } /* count labls size */ if (wavenc->labls) { list = wavenc->labls; while (list) { GstWavEncLabl *labl; labl = list->data; labls_size += 8 + GST_ROUND_UP_2 (labl->chunk_data_size); list = g_list_next (list); } size += labls_size; } /* count notes size */ if (wavenc->notes) { list = wavenc->notes; while (list) { GstWavEncNote *note; note = list->data; notes_size += 8 + GST_ROUND_UP_2 (note->chunk_data_size); list = g_list_next (list); } size += notes_size; } if (wavenc->labls || wavenc->notes) { size += 12; } buf = gst_buffer_new_and_alloc (size); gst_buffer_map (buf, &map, GST_MAP_WRITE); data = map.data; memset (data, 0, size); /* write Cue Chunk */ if (wavenc->cues) { memcpy (data, (gchar *) "cue ", 4); GST_WRITE_UINT32_LE (data + 4, 4 + cues_size); GST_WRITE_UINT32_LE (data + 8, ncues); data += 12; gst_wavenc_write_cues (&data, wavenc->cues); /* write Associated Data List Chunk */ if (wavenc->labls || wavenc->notes) { memcpy (data, (gchar *) "LIST", 4); GST_WRITE_UINT32_LE (data + 4, 4 + labls_size + notes_size); memcpy (data + 8, (gchar *) "adtl", 4); data += 12; if (wavenc->labls) gst_wavenc_write_labls (&data, wavenc->labls); if (wavenc->notes) gst_wavenc_write_notes (&data, wavenc->notes); } } /* free resources */ if (toc) gst_toc_unref (toc); if (wavenc->cues) g_list_free_full (wavenc->cues, g_free); if (wavenc->labls) g_list_free_full (wavenc->labls, g_free); if (wavenc->notes) g_list_free_full (wavenc->notes, g_free); gst_buffer_unmap (buf, &map); wavenc->meta_length += gst_buffer_get_size (buf); return gst_pad_push (wavenc->srcpad, buf); }
static GstFlowReturn gst_vorbis_enc_handle_frame (GstAudioEncoder * enc, GstBuffer * buffer) { GstVorbisEnc *vorbisenc; GstFlowReturn ret = GST_FLOW_OK; GstMapInfo map; gfloat *ptr; gulong size; gulong i, j; float **vorbis_buffer; GstBuffer *buf1, *buf2, *buf3; vorbisenc = GST_VORBISENC (enc); if (G_UNLIKELY (!vorbisenc->setup)) { if (buffer) { GST_DEBUG_OBJECT (vorbisenc, "forcing setup"); /* should not fail, as setup before same way */ if (!gst_vorbis_enc_setup (vorbisenc)) return GST_FLOW_ERROR; } else { /* end draining */ GST_LOG_OBJECT (vorbisenc, "already drained"); return GST_FLOW_OK; } } if (!vorbisenc->header_sent) { /* Vorbis streams begin with three headers; the initial header (with most of the codec setup parameters) which is mandated by the Ogg bitstream spec. The second header holds any comment fields. The third header holds the bitstream codebook. We merely need to make the headers, then pass them to libvorbis one at a time; libvorbis handles the additional Ogg bitstream constraints */ ogg_packet header; ogg_packet header_comm; ogg_packet header_code; GstCaps *caps; GList *headers; GST_DEBUG_OBJECT (vorbisenc, "creating and sending header packets"); gst_vorbis_enc_set_metadata (vorbisenc); vorbis_analysis_headerout (&vorbisenc->vd, &vorbisenc->vc, &header, &header_comm, &header_code); vorbis_comment_clear (&vorbisenc->vc); /* create header buffers */ buf1 = gst_vorbis_enc_buffer_from_header_packet (vorbisenc, &header); buf2 = gst_vorbis_enc_buffer_from_header_packet (vorbisenc, &header_comm); buf3 = gst_vorbis_enc_buffer_from_header_packet (vorbisenc, &header_code); /* mark and put on caps */ caps = gst_caps_new_simple ("audio/x-vorbis", "rate", G_TYPE_INT, vorbisenc->frequency, "channels", G_TYPE_INT, vorbisenc->channels, NULL); caps = _gst_caps_set_buffer_array (caps, "streamheader", buf1, buf2, buf3, NULL); /* negotiate with these caps */ GST_DEBUG_OBJECT (vorbisenc, "here are the caps: %" GST_PTR_FORMAT, caps); gst_audio_encoder_set_output_format (GST_AUDIO_ENCODER (vorbisenc), caps); gst_caps_unref (caps); /* store buffers for later pre_push sending */ headers = NULL; GST_DEBUG_OBJECT (vorbisenc, "storing header buffers"); headers = g_list_prepend (headers, buf3); headers = g_list_prepend (headers, buf2); headers = g_list_prepend (headers, buf1); gst_audio_encoder_set_headers (enc, headers); vorbisenc->header_sent = TRUE; } if (!buffer) return gst_vorbis_enc_clear (vorbisenc); gst_buffer_map (buffer, &map, GST_MAP_WRITE); /* data to encode */ size = map.size / (vorbisenc->channels * sizeof (float)); ptr = (gfloat *) map.data; /* expose the buffer to submit data */ vorbis_buffer = vorbis_analysis_buffer (&vorbisenc->vd, size); /* deinterleave samples, write the buffer data */ if (vorbisenc->channels < 2 || vorbisenc->channels > 8) { for (i = 0; i < size; i++) { for (j = 0; j < vorbisenc->channels; j++) { vorbis_buffer[j][i] = *ptr++; } } } else { gint i, j; /* Reorder */ for (i = 0; i < size; i++) { for (j = 0; j < vorbisenc->channels; j++) { vorbis_buffer[gst_vorbis_reorder_map[vorbisenc->channels - 1][j]][i] = ptr[j]; } ptr += vorbisenc->channels; } } /* tell the library how much we actually submitted */ vorbis_analysis_wrote (&vorbisenc->vd, size); gst_buffer_unmap (buffer, &map); GST_LOG_OBJECT (vorbisenc, "wrote %lu samples to vorbis", size); vorbisenc->samples_in += size; ret = gst_vorbis_enc_output_buffers (vorbisenc); return ret; }
static GstFlowReturn kms_pointer_detector_transform_frame_ip (GstVideoFilter * filter, GstVideoFrame * frame) { KmsPointerDetector *pointerdetector = KMS_POINTER_DETECTOR (filter); GstMapInfo info; double min_Bhattacharyya = 1.0, bhattacharyya = 1, bhattacharyya2 = 1, bhattacharyya3 = 1; int i = 0; pointerdetector->frameSize = cvSize (frame->info.width, frame->info.height); kms_pointer_detector_initialize_images (pointerdetector, frame); gst_buffer_map (frame->buffer, &info, GST_MAP_READ); pointerdetector->cvImage->imageData = (char *) info.data; if ((pointerdetector->iteration > FRAMES_TO_RESET) && (pointerdetector->state != CAPTURING_SECOND_HIST)) { get_histogram (pointerdetector->cvImage, pointerdetector->upCornerRect1, pointerdetector->trackinRectSize, pointerdetector->histSetUpRef); pointerdetector->histRefCapturesCounter = 0; pointerdetector->secondHistCapturesCounter = 0; pointerdetector->state = CAPTURING_REF_HIST; pointerdetector->colorRect1 = WHITE; pointerdetector->colorRect2 = WHITE; pointerdetector->iteration = 6; } if (pointerdetector->iteration == 5) { get_histogram (pointerdetector->cvImage, pointerdetector->upCornerRect1, pointerdetector->trackinRectSize, pointerdetector->histSetUpRef); pointerdetector->state = CAPTURING_REF_HIST; goto end; } if (pointerdetector->iteration < 6) goto end; get_histogram (pointerdetector->cvImage, pointerdetector->upCornerRect1, pointerdetector->trackinRectSize, pointerdetector->histSetUp1); bhattacharyya2 = cvCompareHist (pointerdetector->histSetUp1, pointerdetector->histSetUpRef, CV_COMP_BHATTACHARYYA); if ((bhattacharyya2 >= COMPARE_THRESH_SECOND_HIST) && (pointerdetector->state == CAPTURING_REF_HIST)) { pointerdetector->histRefCapturesCounter++; if (pointerdetector->histRefCapturesCounter > 20) { pointerdetector->histRefCapturesCounter = 0; pointerdetector->colorRect1 = CV_RGB (0, 255, 0); pointerdetector->state = CAPTURING_SECOND_HIST; } } if (pointerdetector->state == CAPTURING_SECOND_HIST) { get_histogram (pointerdetector->cvImage, pointerdetector->upCornerRect2, pointerdetector->trackinRectSize, pointerdetector->histSetUp2); bhattacharyya3 = cvCompareHist (pointerdetector->histSetUp1, pointerdetector->histSetUp2, CV_COMP_BHATTACHARYYA); if (bhattacharyya3 < COMPARE_THRESH_2_RECT) { pointerdetector->secondHistCapturesCounter++; if (pointerdetector->secondHistCapturesCounter > 15) { pointerdetector->secondHistCapturesCounter = 0; pointerdetector->state = BOTH_HIST_SIMILAR; pointerdetector->colorRect2 = CV_RGB (0, 255, 0); cvCopyHist (pointerdetector->histSetUp2, &pointerdetector->histModel); pointerdetector->upCornerFinalRect.x = 10; pointerdetector->upCornerFinalRect.y = 10; pointerdetector->histRefCapturesCounter = 0; pointerdetector->secondHistCapturesCounter = 0; } } } for (i = 0; i < pointerdetector->numOfRegions; i++) { int horizOffset = pointerdetector->upCornerFinalRect.x + pointerdetector->windowScale * (rand () % pointerdetector->trackinRectSize.width - pointerdetector->trackinRectSize.width / 2); int vertOffset = pointerdetector->upCornerFinalRect.y + pointerdetector->windowScale * (rand () % pointerdetector->trackinRectSize.height - pointerdetector->trackinRectSize.height / 2); pointerdetector->trackingPoint1Aux.x = horizOffset; pointerdetector->trackingPoint1Aux.y = vertOffset; pointerdetector->trackingPoint2Aux.x = horizOffset + pointerdetector->trackinRectSize.width; pointerdetector->trackingPoint2Aux.y = vertOffset + pointerdetector->trackinRectSize.height; if ((horizOffset > 0) && (pointerdetector->trackingPoint2Aux.x < pointerdetector->cvImage->width) && (vertOffset > 0) && (pointerdetector->trackingPoint2Aux.y < pointerdetector->cvImage->height)) { if (pointerdetector->show_debug_info) cvRectangle (pointerdetector->cvImage, pointerdetector->trackingPoint1Aux, pointerdetector->trackingPoint2Aux, CV_RGB (0, 255, 0), 1, 8, 0); cvSetImageROI (pointerdetector->cvImage, cvRect (pointerdetector->trackingPoint1Aux.x, pointerdetector->trackingPoint1Aux.y, pointerdetector->trackinRectSize.width, pointerdetector->trackinRectSize.height)); cvCopy (pointerdetector->cvImage, pointerdetector->cvImageAux1, 0); cvResetImageROI (pointerdetector->cvImage); calc_histogram (pointerdetector->cvImageAux1, pointerdetector->histCompare); bhattacharyya = cvCompareHist (pointerdetector->histModel, pointerdetector->histCompare, CV_COMP_BHATTACHARYYA); if ((bhattacharyya < min_Bhattacharyya) && (bhattacharyya < COMPARE_THRESH_HIST_REF)) { min_Bhattacharyya = bhattacharyya; pointerdetector->trackingPoint1 = pointerdetector->trackingPoint1Aux; pointerdetector->trackingPoint2 = pointerdetector->trackingPoint2Aux; } } } cvRectangle (pointerdetector->cvImage, pointerdetector->upCornerRect1, pointerdetector->downCornerRect1, pointerdetector->colorRect1, 1, 8, 0); cvRectangle (pointerdetector->cvImage, pointerdetector->upCornerRect2, pointerdetector->downCornerRect2, pointerdetector->colorRect2, 1, 8, 0); if (min_Bhattacharyya < 0.95) { pointerdetector->windowScale = pointerdetector->windowScaleRef; } else { pointerdetector->windowScale = pointerdetector->cvImage->width / 8; } CvPoint finalPointerPositionAux; finalPointerPositionAux.x = pointerdetector->upCornerFinalRect.x + pointerdetector->trackinRectSize.width / 2; finalPointerPositionAux.y = pointerdetector->upCornerFinalRect.y + pointerdetector->trackinRectSize.height / 2; if (abs (pointerdetector->finalPointerPosition.x - finalPointerPositionAux.x) < 55 || abs (pointerdetector->finalPointerPosition.y - finalPointerPositionAux.y) < 55) { finalPointerPositionAux.x = (finalPointerPositionAux.x + pointerdetector->finalPointerPosition.x) / 2; finalPointerPositionAux.y = (finalPointerPositionAux.y + pointerdetector->finalPointerPosition.y) / 2; } pointerdetector->upCornerFinalRect = pointerdetector->trackingPoint1; pointerdetector->downCornerFinalRect = pointerdetector->trackingPoint2; pointerdetector->finalPointerPosition.x = finalPointerPositionAux.x; pointerdetector->finalPointerPosition.y = finalPointerPositionAux.y; cvCircle (pointerdetector->cvImage, pointerdetector->finalPointerPosition, 10.0, WHITE, -1, 8, 0); kms_pointer_detector_check_pointer_position (pointerdetector); end: pointerdetector->iteration++; gst_buffer_unmap (frame->buffer, &info); return GST_FLOW_OK; }
static GstFlowReturn gst_png_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame, gint * skipsize) { GstPngParse *pngparse = GST_PNG_PARSE (parse); GstMapInfo map; GstByteReader reader; GstFlowReturn ret = GST_FLOW_OK; guint64 signature; guint width = 0, height = 0; gst_buffer_map (frame->buffer, &map, GST_MAP_READ); gst_byte_reader_init (&reader, map.data, map.size); if (!gst_byte_reader_peek_uint64_be (&reader, &signature)) goto beach; if (signature != PNG_SIGNATURE) { for (;;) { guint offset; offset = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff, 0x89504E47, 0, gst_byte_reader_get_remaining (&reader)); if (offset == -1) { *skipsize = gst_byte_reader_get_remaining (&reader) - 4; goto beach; } gst_byte_reader_skip (&reader, offset); if (!gst_byte_reader_peek_uint64_be (&reader, &signature)) goto beach; if (signature == PNG_SIGNATURE) { /* We're skipping, go out, we'll be back */ *skipsize = gst_byte_reader_get_pos (&reader); goto beach; } gst_byte_reader_skip (&reader, 4); } } gst_byte_reader_skip (&reader, 8); for (;;) { guint32 length; guint32 code; if (!gst_byte_reader_get_uint32_be (&reader, &length)) goto beach; if (!gst_byte_reader_get_uint32_le (&reader, &code)) goto beach; GST_TRACE_OBJECT (parse, "%" GST_FOURCC_FORMAT " chunk, %u bytes", GST_FOURCC_ARGS (code), length); if (code == GST_MAKE_FOURCC ('I', 'H', 'D', 'R')) { if (!gst_byte_reader_get_uint32_be (&reader, &width)) goto beach; if (!gst_byte_reader_get_uint32_be (&reader, &height)) goto beach; length -= 8; } else if (code == GST_MAKE_FOURCC ('I', 'D', 'A', 'T')) { gst_base_parse_set_min_frame_size (parse, gst_byte_reader_get_pos (&reader) + 4 + length + 12); } if (!gst_byte_reader_skip (&reader, length + 4)) goto beach; if (code == GST_MAKE_FOURCC ('I', 'E', 'N', 'D')) { /* the start code and at least 2 empty frames (IHDR and IEND) */ gst_base_parse_set_min_frame_size (parse, 8 + 12 + 12); if (pngparse->width != width || pngparse->height != height) { GstCaps *caps, *sink_caps; pngparse->height = height; pngparse->width = width; caps = gst_caps_new_simple ("image/png", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); sink_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (pngparse)); if (sink_caps) { GstStructure *st; gint fr_num, fr_denom; st = gst_caps_get_structure (sink_caps, 0); if (st && gst_structure_get_fraction (st, "framerate", &fr_num, &fr_denom)) { gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, fr_num, fr_denom, NULL); } else { GST_WARNING_OBJECT (pngparse, "No framerate set"); } gst_caps_unref (sink_caps); } if (!gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps)) ret = GST_FLOW_NOT_NEGOTIATED; gst_caps_unref (caps); if (ret != GST_FLOW_OK) goto beach; } gst_buffer_unmap (frame->buffer, &map); return gst_base_parse_finish_frame (parse, frame, gst_byte_reader_get_pos (&reader)); } } beach: gst_buffer_unmap (frame->buffer, &map); return ret; }
void GStreamerReader::ReadAndPushData(guint aLength) { MediaResource* resource = mDecoder->GetResource(); NS_ASSERTION(resource, "Decoder has no media resource"); int64_t offset1 = resource->Tell(); unused << offset1; nsresult rv = NS_OK; GstBuffer* buffer = gst_buffer_new_and_alloc(aLength); #if GST_VERSION_MAJOR >= 1 GstMapInfo info; gst_buffer_map(buffer, &info, GST_MAP_WRITE); guint8 *data = info.data; #else guint8* data = GST_BUFFER_DATA(buffer); #endif uint32_t size = 0, bytesRead = 0; while(bytesRead < aLength) { rv = resource->Read(reinterpret_cast<char*>(data + bytesRead), aLength - bytesRead, &size); if (NS_FAILED(rv) || size == 0) break; bytesRead += size; } int64_t offset2 = resource->Tell(); unused << offset2; #if GST_VERSION_MAJOR >= 1 gst_buffer_unmap(buffer, &info); gst_buffer_set_size(buffer, bytesRead); #else GST_BUFFER_SIZE(buffer) = bytesRead; #endif GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer)); if (ret != GST_FLOW_OK) { LOG(PR_LOG_ERROR, "ReadAndPushData push ret %s(%d)", gst_flow_get_name(ret), ret); } if (NS_FAILED(rv)) { /* Terminate the stream if there is an error in reading */ LOG(PR_LOG_ERROR, "ReadAndPushData read error, rv=%x", rv); gst_app_src_end_of_stream(mSource); } else if (bytesRead < aLength) { /* If we read less than what we wanted, we reached the end */ LOG(PR_LOG_WARNING, "ReadAndPushData read underflow, " "bytesRead=%u, aLength=%u, offset(%lld,%lld)", bytesRead, aLength, offset1, offset2); gst_app_src_end_of_stream(mSource); } gst_buffer_unref(buffer); /* Ensure offset change is consistent in this function. * If there are other stream operations on another thread at the same time, * it will disturb the GStreamer state machine. */ MOZ_ASSERT(offset1 + bytesRead == offset2); }
static gboolean gst_jpeg_parse_read_header (GstJpegParse * parse, GstBuffer * buffer) { GstByteReader reader; guint8 marker = 0; gboolean foundSOF = FALSE; GstMapInfo map; gst_buffer_map (buffer, &map, GST_MAP_READ); gst_byte_reader_init (&reader, map.data, map.size); if (!gst_byte_reader_peek_uint8 (&reader, &marker)) goto error; while (marker == 0xff) { if (!gst_byte_reader_skip (&reader, 1)) goto error; if (!gst_byte_reader_get_uint8 (&reader, &marker)) goto error; GST_DEBUG_OBJECT (parse, "marker = %x", marker); switch (marker) { case SOS: /* start of scan (begins compressed data) */ goto done; case SOI: break; case DRI: if (!gst_byte_reader_skip (&reader, 4)) /* fixed size */ goto error; break; case COM: if (!gst_jpeg_parse_com (parse, &reader)) goto error; break; case APP1: if (!gst_jpeg_parse_app1 (parse, &reader)) goto error; break; case DHT: case DQT: /* Ignore these codes */ if (!gst_jpeg_parse_skip_marker (parse, &reader, marker)) goto error; break; case SOF2: parse->priv->interlaced = TRUE; /* fall through */ case SOF0: /* parse Start Of Frame */ if (!gst_jpeg_parse_sof (parse, &reader)) goto error; foundSOF = TRUE; goto done; default: if (marker == JPG || (marker >= JPG0 && marker <= JPG13)) { /* we'd like to remove them from the buffer */ if (!gst_jpeg_parse_remove_marker (parse, &reader, marker, buffer)) goto error; } else if (marker >= APP0 && marker <= APP15) { if (!gst_jpeg_parse_skip_marker (parse, &reader, marker)) goto error; } else goto unhandled; } if (!gst_byte_reader_peek_uint8 (&reader, &marker)) goto error; } done: gst_buffer_unmap (buffer, &map); return foundSOF; /* ERRORS */ error: { GST_WARNING_OBJECT (parse, "Error parsing image header (need more than %u bytes available)", gst_byte_reader_get_remaining (&reader)); gst_buffer_unmap (buffer, &map); return FALSE; } unhandled: { GST_WARNING_OBJECT (parse, "unhandled marker %x, leaving", marker); /* Not SOF or SOI. Must not be a JPEG file (or file pointer * is placed wrong). In either case, it's an error. */ gst_buffer_unmap (buffer, &map); return FALSE; } }
static GstFlowReturn gst_amc_video_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) { GstAmcVideoDec *self; gint idx; GstAmcBuffer *buf; GstAmcBufferInfo buffer_info; guint offset = 0; GstClockTime timestamp, duration, timestamp_offset = 0; GstMapInfo minfo; GError *err = NULL; memset (&minfo, 0, sizeof (minfo)); self = GST_AMC_VIDEO_DEC (decoder); GST_DEBUG_OBJECT (self, "Handling frame"); if (!self->started) { GST_ERROR_OBJECT (self, "Codec not started yet"); gst_video_codec_frame_unref (frame); return GST_FLOW_NOT_NEGOTIATED; } if (self->flushing) goto flushing; if (self->downstream_flow_ret != GST_FLOW_OK) goto downstream_error; timestamp = frame->pts; duration = frame->duration; gst_buffer_map (frame->input_buffer, &minfo, GST_MAP_READ); while (offset < minfo.size) { /* Make sure to release the base class stream lock, otherwise * _loop() can't call _finish_frame() and we might block forever * because no input buffers are released */ GST_VIDEO_DECODER_STREAM_UNLOCK (self); /* Wait at most 100ms here, some codecs don't fail dequeueing if * the codec is flushing, causing deadlocks during shutdown */ idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000, &err); GST_VIDEO_DECODER_STREAM_LOCK (self); if (idx < 0) { if (self->flushing || self->downstream_flow_ret == GST_FLOW_FLUSHING) { g_clear_error (&err); goto flushing; } switch (idx) { case INFO_TRY_AGAIN_LATER: GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out"); continue; /* next try */ break; case G_MININT: GST_ERROR_OBJECT (self, "Failed to dequeue input buffer"); goto dequeue_error; default: g_assert_not_reached (); break; } continue; } if (self->flushing) { memset (&buffer_info, 0, sizeof (buffer_info)); gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, NULL); goto flushing; } if (self->downstream_flow_ret != GST_FLOW_OK) { memset (&buffer_info, 0, sizeof (buffer_info)); gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err); if (err && !self->flushing) GST_ELEMENT_WARNING_FROM_ERROR (self, err); g_clear_error (&err); goto downstream_error; } /* Now handle the frame */ /* Copy the buffer content in chunks of size as requested * by the port */ buf = gst_amc_codec_get_input_buffer (self->codec, idx, &err); if (!buf) goto failed_to_get_input_buffer; memset (&buffer_info, 0, sizeof (buffer_info)); buffer_info.offset = 0; buffer_info.size = MIN (minfo.size - offset, buf->size); gst_amc_buffer_set_position_and_limit (buf, NULL, buffer_info.offset, buffer_info.size); orc_memcpy (buf->data, minfo.data + offset, buffer_info.size); gst_amc_buffer_free (buf); buf = NULL; /* Interpolate timestamps if we're passing the buffer * in multiple chunks */ if (offset != 0 && duration != GST_CLOCK_TIME_NONE) { timestamp_offset = gst_util_uint64_scale (offset, duration, minfo.size); } if (timestamp != GST_CLOCK_TIME_NONE) { buffer_info.presentation_time_us = gst_util_uint64_scale (timestamp + timestamp_offset, 1, GST_USECOND); self->last_upstream_ts = timestamp + timestamp_offset; } if (duration != GST_CLOCK_TIME_NONE) self->last_upstream_ts += duration; if (offset == 0) { BufferIdentification *id = buffer_identification_new (timestamp + timestamp_offset); if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) buffer_info.flags |= BUFFER_FLAG_SYNC_FRAME; gst_video_codec_frame_set_user_data (frame, id, (GDestroyNotify) buffer_identification_free); } offset += buffer_info.size; GST_DEBUG_OBJECT (self, "Queueing buffer %d: size %d time %" G_GINT64_FORMAT " flags 0x%08x", idx, buffer_info.size, buffer_info.presentation_time_us, buffer_info.flags); if (!gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err)) { if (self->flushing) { g_clear_error (&err); goto flushing; } goto queue_error; } self->drained = FALSE; } gst_buffer_unmap (frame->input_buffer, &minfo); gst_video_codec_frame_unref (frame); return self->downstream_flow_ret; downstream_error: { GST_ERROR_OBJECT (self, "Downstream returned %s", gst_flow_get_name (self->downstream_flow_ret)); if (minfo.data) gst_buffer_unmap (frame->input_buffer, &minfo); gst_video_codec_frame_unref (frame); return self->downstream_flow_ret; } failed_to_get_input_buffer: { GST_ELEMENT_ERROR_FROM_ERROR (self, err); if (minfo.data) gst_buffer_unmap (frame->input_buffer, &minfo); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } dequeue_error: { GST_ELEMENT_ERROR_FROM_ERROR (self, err); if (minfo.data) gst_buffer_unmap (frame->input_buffer, &minfo); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } queue_error: { GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err); if (minfo.data) gst_buffer_unmap (frame->input_buffer, &minfo); gst_video_codec_frame_unref (frame); return GST_FLOW_ERROR; } flushing: { GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING"); if (minfo.data) gst_buffer_unmap (frame->input_buffer, &minfo); gst_video_codec_frame_unref (frame); return GST_FLOW_FLUSHING; } }
GstFlowReturn ofGstVideoUtils::process_sample(shared_ptr<GstSample> sample){ GstBuffer * _buffer = gst_sample_get_buffer(sample.get()); #ifdef OF_USE_GST_GL if (gst_buffer_map (_buffer, &mapinfo, (GstMapFlags)(GST_MAP_READ | GST_MAP_GL))){ if (gst_is_gl_memory (mapinfo.memory)) { bufferQueue.push(sample); gst_buffer_unmap(_buffer, &mapinfo); bool newTexture=false; std::unique_lock<std::mutex> lock(mutex); while(bufferQueue.size()>2){ backBuffer = bufferQueue.front(); bufferQueue.pop(); newTexture = true; } if(newTexture){ GstBuffer * _buffer = gst_sample_get_buffer(backBuffer.get()); gst_buffer_map (_buffer, &mapinfo, (GstMapFlags)(GST_MAP_READ | GST_MAP_GL)); auto texId = *(guint*)mapinfo.data; backTexture.setUseExternalTextureID(texId); ofTextureData & texData = backTexture.getTextureData(); texData.bAllocated = true; texData.bFlipTexture = false; texData.glInternalFormat = GL_RGBA; texData.height = getHeight(); texData.width = getWidth(); texData.magFilter = GL_LINEAR; texData.minFilter = GL_LINEAR; texData.tex_h = getHeight(); texData.tex_w = getWidth(); texData.tex_u = 1; texData.tex_t = 1; texData.textureID = texId; texData.textureTarget = GL_TEXTURE_2D; texData.wrapModeHorizontal = GL_CLAMP_TO_EDGE; texData.wrapModeVertical = GL_CLAMP_TO_EDGE; bBackPixelsChanged=true; gst_buffer_unmap(_buffer,&mapinfo); } return GST_FLOW_OK; } } #endif // video frame has normal texture gst_buffer_map (_buffer, &mapinfo, GST_MAP_READ); guint size = mapinfo.size; int stride = 0; if(pixels.isAllocated() && pixels.getTotalBytes()!=(int)size){ GstVideoInfo v_info = getVideoInfo(sample.get()); stride = v_info.stride[0]; if(stride == (pixels.getWidth() * pixels.getBytesPerPixel())) { ofLogError("ofGstVideoUtils") << "buffer_cb(): error on new buffer, buffer size: " << size << "!= init size: " << pixels.getTotalBytes(); return GST_FLOW_ERROR; } } mutex.lock(); if(!copyPixels){ backBuffer = sample; } if(pixels.isAllocated()){ if(stride > 0) { if(pixels.getPixelFormat() == OF_PIXELS_I420){ GstVideoInfo v_info = getVideoInfo(sample.get()); std::vector<int> strides{v_info.stride[0],v_info.stride[1],v_info.stride[2]}; backPixels.setFromAlignedPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getPixelFormat(),strides); } else { backPixels.setFromAlignedPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getPixelFormat(),stride); } } else if(!copyPixels){ backPixels.setFromExternalPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getPixelFormat()); eventPixels.setFromExternalPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getPixelFormat()); }else{ backPixels.setFromPixels(mapinfo.data,pixels.getWidth(),pixels.getHeight(),pixels.getPixelFormat()); } bBackPixelsChanged=true; mutex.unlock(); if(stride == 0) { ofNotifyEvent(prerollEvent,eventPixels); } }else{ mutex.unlock(); if(appsink){ appsink->on_stream_prepared(); }else{ GstVideoInfo v_info = getVideoInfo(sample.get()); allocate(v_info.width,v_info.height,getOFFormat(v_info.finfo->format)); } } gst_buffer_unmap(_buffer, &mapinfo); return GST_FLOW_OK; }
static GstFlowReturn gst_avdtp_src_create (GstBaseSrc * bsrc, guint64 offset, guint length, GstBuffer ** outbuf) { GstAvdtpSrc *avdtpsrc = GST_AVDTP_SRC (bsrc); GstBuffer *buf = NULL; GstMapInfo info; int ret; if (g_atomic_int_get (&avdtpsrc->unlocked)) return GST_FLOW_FLUSHING; /* We don't operate in GST_FORMAT_BYTES, so offset is ignored */ while ((ret = gst_poll_wait (avdtpsrc->poll, GST_CLOCK_TIME_NONE))) { if (g_atomic_int_get (&avdtpsrc->unlocked)) /* We're unlocked, time to gtfo */ return GST_FLOW_FLUSHING; if (ret < 0) /* Something went wrong */ goto read_error; if (ret > 0) /* Got some data */ break; } ret = GST_BASE_SRC_CLASS (parent_class)->alloc (bsrc, offset, length, outbuf); if (G_UNLIKELY (ret != GST_FLOW_OK)) goto alloc_failed; buf = *outbuf; gst_buffer_map (buf, &info, GST_MAP_WRITE); ret = read (avdtpsrc->pfd.fd, info.data, length); if (ret < 0) goto read_error; else if (ret == 0) { GST_INFO_OBJECT (avdtpsrc, "Got EOF on the transport fd"); goto eof; } if (ret < length) gst_buffer_set_size (buf, ret); GST_LOG_OBJECT (avdtpsrc, "Read %d bytes", ret); gst_buffer_unmap (buf, &info); *outbuf = buf; return GST_FLOW_OK; alloc_failed: { GST_DEBUG_OBJECT (bsrc, "alloc failed: %s", gst_flow_get_name (ret)); return ret; } read_error: GST_ERROR_OBJECT (avdtpsrc, "Error while reading audio data: %s", strerror (errno)); gst_buffer_unref (buf); return GST_FLOW_ERROR; eof: gst_buffer_unref (buf); return GST_FLOW_EOS; }
static GstFlowReturn gst_spectrum_transform_ip (GstBaseTransform * trans, GstBuffer * buffer) { GstSpectrum *spectrum = GST_SPECTRUM (trans); guint rate = GST_AUDIO_FILTER_RATE (spectrum); guint channels = GST_AUDIO_FILTER_CHANNELS (spectrum); guint bps = GST_AUDIO_FILTER_BPS (spectrum); guint bpf = GST_AUDIO_FILTER_BPF (spectrum); guint output_channels = spectrum->multi_channel ? channels : 1; guint c; gfloat max_value = (1UL << ((bps << 3) - 1)) - 1; guint bands = spectrum->bands; guint nfft = 2 * bands - 2; guint input_pos; gfloat *input; GstMapInfo map; const guint8 *data; gsize size; guint fft_todo, msg_todo, block_size; gboolean have_full_interval; GstSpectrumChannel *cd; GstSpectrumInputData input_data; g_mutex_lock (&spectrum->lock); gst_buffer_map (buffer, &map, GST_MAP_READ); data = map.data; size = map.size; GST_LOG_OBJECT (spectrum, "input size: %" G_GSIZE_FORMAT " bytes", size); if (GST_BUFFER_IS_DISCONT (buffer)) { GST_DEBUG_OBJECT (spectrum, "Discontinuity detected -- flushing"); gst_spectrum_flush (spectrum); } /* If we don't have a FFT context yet (or it was reset due to parameter * changes) get one and allocate memory for everything */ if (spectrum->channel_data == NULL) { GST_DEBUG_OBJECT (spectrum, "allocating for bands %u", bands); gst_spectrum_alloc_channel_data (spectrum); /* number of sample frames we process before posting a message * interval is in ns */ spectrum->frames_per_interval = gst_util_uint64_scale (spectrum->interval, rate, GST_SECOND); spectrum->frames_todo = spectrum->frames_per_interval; /* rounding error for frames_per_interval in ns, * aggregated it in accumulated_error */ spectrum->error_per_interval = (spectrum->interval * rate) % GST_SECOND; if (spectrum->frames_per_interval == 0) spectrum->frames_per_interval = 1; GST_INFO_OBJECT (spectrum, "interval %" GST_TIME_FORMAT ", fpi %" G_GUINT64_FORMAT ", error %" GST_TIME_FORMAT, GST_TIME_ARGS (spectrum->interval), spectrum->frames_per_interval, GST_TIME_ARGS (spectrum->error_per_interval)); spectrum->input_pos = 0; gst_spectrum_flush (spectrum); } if (spectrum->num_frames == 0) spectrum->message_ts = GST_BUFFER_TIMESTAMP (buffer); input_pos = spectrum->input_pos; input_data = spectrum->input_data; while (size >= bpf) { /* run input_data for a chunk of data */ fft_todo = nfft - (spectrum->num_frames % nfft); msg_todo = spectrum->frames_todo - spectrum->num_frames; GST_LOG_OBJECT (spectrum, "message frames todo: %u, fft frames todo: %u, input frames %" G_GSIZE_FORMAT, msg_todo, fft_todo, (size / bpf)); block_size = msg_todo; if (block_size > (size / bpf)) block_size = (size / bpf); if (block_size > fft_todo) block_size = fft_todo; for (c = 0; c < output_channels; c++) { cd = &spectrum->channel_data[c]; input = cd->input; /* Move the current frames into our ringbuffers */ input_data (data + c * bps, input, block_size, channels, max_value, input_pos, nfft); } data += block_size * bpf; size -= block_size * bpf; input_pos = (input_pos + block_size) % nfft; spectrum->num_frames += block_size; have_full_interval = (spectrum->num_frames == spectrum->frames_todo); GST_LOG_OBJECT (spectrum, "size: %" G_GSIZE_FORMAT ", do-fft = %d, do-message = %d", size, (spectrum->num_frames % nfft == 0), have_full_interval); /* If we have enough frames for an FFT or we have all frames required for * the interval and we haven't run a FFT, then run an FFT */ if ((spectrum->num_frames % nfft == 0) || (have_full_interval && !spectrum->num_fft)) { for (c = 0; c < output_channels; c++) { cd = &spectrum->channel_data[c]; gst_spectrum_run_fft (spectrum, cd, input_pos); } spectrum->num_fft++; } /* Do we have the FFTs for one interval? */ if (have_full_interval) { GST_DEBUG_OBJECT (spectrum, "nfft: %u frames: %" G_GUINT64_FORMAT " fpi: %" G_GUINT64_FORMAT " error: %" GST_TIME_FORMAT, nfft, spectrum->num_frames, spectrum->frames_per_interval, GST_TIME_ARGS (spectrum->accumulated_error)); spectrum->frames_todo = spectrum->frames_per_interval; if (spectrum->accumulated_error >= GST_SECOND) { spectrum->accumulated_error -= GST_SECOND; spectrum->frames_todo++; } spectrum->accumulated_error += spectrum->error_per_interval; if (spectrum->post_messages) { GstMessage *m; for (c = 0; c < output_channels; c++) { cd = &spectrum->channel_data[c]; gst_spectrum_prepare_message_data (spectrum, cd); } m = gst_spectrum_message_new (spectrum, spectrum->message_ts, spectrum->interval); gst_element_post_message (GST_ELEMENT (spectrum), m); } if (GST_CLOCK_TIME_IS_VALID (spectrum->message_ts)) spectrum->message_ts += gst_util_uint64_scale (spectrum->num_frames, GST_SECOND, rate); for (c = 0; c < output_channels; c++) { cd = &spectrum->channel_data[c]; gst_spectrum_reset_message_data (spectrum, cd); } spectrum->num_frames = 0; spectrum->num_fft = 0; } } spectrum->input_pos = input_pos; gst_buffer_unmap (buffer, &map); g_mutex_unlock (&spectrum->lock); g_assert (size == 0); return GST_FLOW_OK; }
static GstFlowReturn gst_flxdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstByteReader reader; GstBuffer *input; GstMapInfo map_info; GstCaps *caps; guint available; GstFlowReturn res = GST_FLOW_OK; GstFlxDec *flxdec; FlxHeader *flxh; g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR); flxdec = (GstFlxDec *) parent; g_return_val_if_fail (flxdec != NULL, GST_FLOW_ERROR); gst_adapter_push (flxdec->adapter, buf); available = gst_adapter_available (flxdec->adapter); input = gst_adapter_get_buffer (flxdec->adapter, available); if (!gst_buffer_map (input, &map_info, GST_MAP_READ)) { GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Failed to map buffer"), (NULL)); goto error; } gst_byte_reader_init (&reader, map_info.data, map_info.size); if (flxdec->state == GST_FLXDEC_READ_HEADER) { if (available >= FlxHeaderSize) { GstByteReader header; GstCaps *templ; if (!gst_byte_reader_get_sub_reader (&reader, &header, FlxHeaderSize)) { GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Could not read header"), (NULL)); goto unmap_input_error; } gst_adapter_flush (flxdec->adapter, FlxHeaderSize); available -= FlxHeaderSize; if (!_read_flx_header (flxdec, &header, &flxdec->hdr)) { GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Failed to parse header"), (NULL)); goto unmap_input_error; } flxh = &flxdec->hdr; /* check header */ if (flxh->type != FLX_MAGICHDR_FLI && flxh->type != FLX_MAGICHDR_FLC && flxh->type != FLX_MAGICHDR_FLX) { GST_ELEMENT_ERROR (flxdec, STREAM, WRONG_TYPE, (NULL), ("not a flx file (type %x)", flxh->type)); goto unmap_input_error; } GST_INFO_OBJECT (flxdec, "size : %d", flxh->size); GST_INFO_OBJECT (flxdec, "frames : %d", flxh->frames); GST_INFO_OBJECT (flxdec, "width : %d", flxh->width); GST_INFO_OBJECT (flxdec, "height : %d", flxh->height); GST_INFO_OBJECT (flxdec, "depth : %d", flxh->depth); GST_INFO_OBJECT (flxdec, "speed : %d", flxh->speed); flxdec->next_time = 0; if (flxh->type == FLX_MAGICHDR_FLI) { flxdec->frame_time = JIFFIE * flxh->speed; } else if (flxh->speed == 0) { flxdec->frame_time = GST_SECOND / 70; } else { flxdec->frame_time = flxh->speed * GST_MSECOND; } flxdec->duration = flxh->frames * flxdec->frame_time; GST_LOG ("duration : %" GST_TIME_FORMAT, GST_TIME_ARGS (flxdec->duration)); templ = gst_pad_get_pad_template_caps (flxdec->srcpad); caps = gst_caps_copy (templ); gst_caps_unref (templ); gst_caps_set_simple (caps, "width", G_TYPE_INT, flxh->width, "height", G_TYPE_INT, flxh->height, "framerate", GST_TYPE_FRACTION, (gint) GST_MSECOND, (gint) flxdec->frame_time / 1000, NULL); gst_pad_set_caps (flxdec->srcpad, caps); gst_caps_unref (caps); if (flxdec->need_segment) { gst_pad_push_event (flxdec->srcpad, gst_event_new_segment (&flxdec->segment)); flxdec->need_segment = FALSE; } /* zero means 8 */ if (flxh->depth == 0) flxh->depth = 8; if (flxh->depth != 8) { GST_ELEMENT_ERROR (flxdec, STREAM, WRONG_TYPE, ("%s", "Don't know how to decode non 8 bit depth streams"), (NULL)); goto unmap_input_error; } flxdec->converter = flx_colorspace_converter_new (flxh->width, flxh->height); if (flxh->type == FLX_MAGICHDR_FLC || flxh->type == FLX_MAGICHDR_FLX) { GST_INFO_OBJECT (flxdec, "(FLC) aspect_dx : %d", flxh->aspect_dx); GST_INFO_OBJECT (flxdec, "(FLC) aspect_dy : %d", flxh->aspect_dy); GST_INFO_OBJECT (flxdec, "(FLC) oframe1 : 0x%08x", flxh->oframe1); GST_INFO_OBJECT (flxdec, "(FLC) oframe2 : 0x%08x", flxh->oframe2); } flxdec->size = ((guint) flxh->width * (guint) flxh->height); if (flxdec->size >= G_MAXSIZE / 4) { GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Cannot allocate required memory"), (NULL)); goto unmap_input_error; } /* create delta and output frame */ flxdec->frame_data = g_malloc0 (flxdec->size); flxdec->delta_data = g_malloc0 (flxdec->size); flxdec->state = GST_FLXDEC_PLAYING; } } else if (flxdec->state == GST_FLXDEC_PLAYING) { GstBuffer *out; /* while we have enough data in the adapter */ while (available >= FlxFrameChunkSize && res == GST_FLOW_OK) { guint32 size; guint16 type; if (!gst_byte_reader_get_uint32_le (&reader, &size)) goto parse_error; if (available < size) goto need_more_data; available -= size; gst_adapter_flush (flxdec->adapter, size); if (!gst_byte_reader_get_uint16_le (&reader, &type)) goto parse_error; switch (type) { case FLX_FRAME_TYPE:{ GstByteReader chunks; GstByteWriter writer; guint16 n_chunks; GstMapInfo map; GST_LOG_OBJECT (flxdec, "Have frame type 0x%02x of size %d", type, size); if (!gst_byte_reader_get_sub_reader (&reader, &chunks, size - FlxFrameChunkSize)) goto parse_error; if (!gst_byte_reader_get_uint16_le (&chunks, &n_chunks)) goto parse_error; GST_LOG_OBJECT (flxdec, "Have %d chunks", n_chunks); if (n_chunks == 0) break; if (!gst_byte_reader_skip (&chunks, 8)) /* reserved */ goto parse_error; gst_byte_writer_init_with_data (&writer, flxdec->frame_data, flxdec->size, TRUE); /* decode chunks */ if (!flx_decode_chunks (flxdec, n_chunks, &chunks, &writer)) { GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Could not decode chunk"), NULL); goto unmap_input_error; } gst_byte_writer_reset (&writer); /* save copy of the current frame for possible delta. */ memcpy (flxdec->delta_data, flxdec->frame_data, flxdec->size); out = gst_buffer_new_and_alloc (flxdec->size * 4); if (!gst_buffer_map (out, &map, GST_MAP_WRITE)) { GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Could not map output buffer"), NULL); gst_buffer_unref (out); goto unmap_input_error; } /* convert current frame. */ flx_colorspace_convert (flxdec->converter, flxdec->frame_data, map.data); gst_buffer_unmap (out, &map); GST_BUFFER_TIMESTAMP (out) = flxdec->next_time; flxdec->next_time += flxdec->frame_time; res = gst_pad_push (flxdec->srcpad, out); break; } default: GST_DEBUG_OBJECT (flxdec, "Unknown frame type 0x%02x, skipping %d", type, size); if (!gst_byte_reader_skip (&reader, size - FlxFrameChunkSize)) goto parse_error; break; } } } need_more_data: gst_buffer_unmap (input, &map_info); gst_buffer_unref (input); return res; /* ERRORS */ parse_error: GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Failed to parse stream"), (NULL)); unmap_input_error: gst_buffer_unmap (input, &map_info); error: gst_buffer_unref (input); return GST_FLOW_ERROR; }
static GstFlowReturn gst_tcp_server_src_create (GstPushSrc * psrc, GstBuffer ** outbuf) { GstTCPServerSrc *src; GstFlowReturn ret = GST_FLOW_OK; gssize rret, avail; gsize read; GError *err = NULL; GstMapInfo map; src = GST_TCP_SERVER_SRC (psrc); if (!GST_OBJECT_FLAG_IS_SET (src, GST_TCP_SERVER_SRC_OPEN)) goto wrong_state; if (!src->client_socket) { /* wait on server socket for connections */ src->client_socket = g_socket_accept (src->server_socket, src->cancellable, &err); if (!src->client_socket) goto accept_error; /* now read from the socket. */ } /* if we have a client, wait for read */ GST_LOG_OBJECT (src, "asked for a buffer"); /* read the buffer header */ avail = g_socket_get_available_bytes (src->client_socket); if (avail < 0) { goto get_available_error; } else if (avail == 0) { GIOCondition condition; if (!g_socket_condition_wait (src->client_socket, G_IO_IN | G_IO_PRI | G_IO_ERR | G_IO_HUP, src->cancellable, &err)) goto select_error; condition = g_socket_condition_check (src->client_socket, G_IO_IN | G_IO_PRI | G_IO_ERR | G_IO_HUP); if ((condition & G_IO_ERR)) { GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("Socket in error state")); *outbuf = NULL; ret = GST_FLOW_ERROR; goto done; } else if ((condition & G_IO_HUP)) { GST_DEBUG_OBJECT (src, "Connection closed"); *outbuf = NULL; ret = GST_FLOW_EOS; goto done; } avail = g_socket_get_available_bytes (src->client_socket); if (avail < 0) goto get_available_error; } if (avail > 0) { read = MIN (avail, MAX_READ_SIZE); *outbuf = gst_buffer_new_and_alloc (read); gst_buffer_map (*outbuf, &map, GST_MAP_READWRITE); rret = g_socket_receive (src->client_socket, (gchar *) map.data, read, src->cancellable, &err); } else { /* Connection closed */ rret = 0; *outbuf = NULL; read = 0; } if (rret == 0) { GST_DEBUG_OBJECT (src, "Connection closed"); ret = GST_FLOW_EOS; if (*outbuf) { gst_buffer_unmap (*outbuf, &map); gst_buffer_unref (*outbuf); } *outbuf = NULL; } else if (rret < 0) { if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) { ret = GST_FLOW_FLUSHING; GST_DEBUG_OBJECT (src, "Cancelled reading from socket"); } else { ret = GST_FLOW_ERROR; GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("Failed to read from socket: %s", err->message)); } gst_buffer_unmap (*outbuf, &map); gst_buffer_unref (*outbuf); *outbuf = NULL; } else { ret = GST_FLOW_OK; gst_buffer_unmap (*outbuf, &map); gst_buffer_resize (*outbuf, 0, rret); GST_LOG_OBJECT (src, "Returning buffer from _get of size %" G_GSIZE_FORMAT ", ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", offset %" G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT, gst_buffer_get_size (*outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*outbuf)), GST_TIME_ARGS (GST_BUFFER_DURATION (*outbuf)), GST_BUFFER_OFFSET (*outbuf), GST_BUFFER_OFFSET_END (*outbuf)); } g_clear_error (&err); done: return ret; wrong_state: { GST_DEBUG_OBJECT (src, "connection to closed, cannot read data"); return GST_FLOW_FLUSHING; } accept_error: { if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) { GST_DEBUG_OBJECT (src, "Cancelled accepting of client"); } else { GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL), ("Failed to accept client: %s", err->message)); } g_clear_error (&err); return GST_FLOW_ERROR; } select_error: { GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("Select failed: %s", err->message)); g_clear_error (&err); return GST_FLOW_ERROR; } get_available_error: { GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("Failed to get available bytes from socket")); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_audio_fx_base_fir_filter_transform (GstBaseTransform * base, GstBuffer * inbuf, GstBuffer * outbuf) { GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base); GstClockTime timestamp, expected_timestamp; gint channels = GST_AUDIO_FILTER_CHANNELS (self); gint rate = GST_AUDIO_FILTER_RATE (self); gint bps = GST_AUDIO_FILTER_BPS (self); GstMapInfo inmap, outmap; guint input_samples; guint output_samples; guint generated_samples; guint64 output_offset; gint64 diff = 0; GstClockTime stream_time; timestamp = GST_BUFFER_TIMESTAMP (outbuf); if (!GST_CLOCK_TIME_IS_VALID (timestamp) && !GST_CLOCK_TIME_IS_VALID (self->start_ts)) { GST_ERROR_OBJECT (self, "Invalid timestamp"); return GST_FLOW_ERROR; } g_mutex_lock (&self->lock); stream_time = gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp); GST_DEBUG_OBJECT (self, "sync to %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp)); if (GST_CLOCK_TIME_IS_VALID (stream_time)) gst_object_sync_values (GST_OBJECT (self), stream_time); g_return_val_if_fail (self->kernel != NULL, GST_FLOW_ERROR); g_return_val_if_fail (channels != 0, GST_FLOW_ERROR); if (GST_CLOCK_TIME_IS_VALID (self->start_ts)) expected_timestamp = self->start_ts + gst_util_uint64_scale_int (self->nsamples_in, GST_SECOND, rate); else expected_timestamp = GST_CLOCK_TIME_NONE; /* Reset the residue if already existing on discont buffers */ if (GST_BUFFER_IS_DISCONT (inbuf) || (GST_CLOCK_TIME_IS_VALID (expected_timestamp) && (ABS (GST_CLOCK_DIFF (timestamp, expected_timestamp) > 5 * GST_MSECOND)))) { GST_DEBUG_OBJECT (self, "Discontinuity detected - flushing"); if (GST_CLOCK_TIME_IS_VALID (expected_timestamp)) gst_audio_fx_base_fir_filter_push_residue (self); self->buffer_fill = 0; g_free (self->buffer); self->buffer = NULL; self->start_ts = timestamp; self->start_off = GST_BUFFER_OFFSET (inbuf); self->nsamples_out = 0; self->nsamples_in = 0; } else if (!GST_CLOCK_TIME_IS_VALID (self->start_ts)) { self->start_ts = timestamp; self->start_off = GST_BUFFER_OFFSET (inbuf); } gst_buffer_map (inbuf, &inmap, GST_MAP_READ); gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE); input_samples = (inmap.size / bps) / channels; output_samples = (outmap.size / bps) / channels; self->nsamples_in += input_samples; generated_samples = self->process (self, inmap.data, outmap.data, input_samples); gst_buffer_unmap (inbuf, &inmap); gst_buffer_unmap (outbuf, &outmap); g_assert (generated_samples <= output_samples); self->nsamples_out += generated_samples; if (generated_samples == 0) goto no_samples; /* Calculate the number of samples we can push out now without outputting * latency zeros in the beginning */ diff = ((gint64) self->nsamples_out) - ((gint64) self->latency); if (diff < 0) goto no_samples; if (diff < generated_samples) { gint64 tmp = diff; diff = generated_samples - diff; generated_samples = tmp; } else { diff = 0; } gst_buffer_resize (outbuf, diff * bps * channels, generated_samples * bps * channels); output_offset = self->nsamples_out - self->latency - generated_samples; GST_BUFFER_TIMESTAMP (outbuf) = self->start_ts + gst_util_uint64_scale_int (output_offset, GST_SECOND, rate); GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale_int (output_samples, GST_SECOND, rate); if (self->start_off != GST_BUFFER_OFFSET_NONE) { GST_BUFFER_OFFSET (outbuf) = self->start_off + output_offset; GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET (outbuf) + generated_samples; } else { GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_NONE; } g_mutex_unlock (&self->lock); GST_DEBUG_OBJECT (self, "Pushing buffer of size %" G_GSIZE_FORMAT " with timestamp: %" GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %" G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d", gst_buffer_get_size (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf), GST_BUFFER_OFFSET_END (outbuf), generated_samples); return GST_FLOW_OK; no_samples: { g_mutex_unlock (&self->lock); return GST_BASE_TRANSFORM_FLOW_DROPPED; } }
void gst_audio_fx_base_fir_filter_push_residue (GstAudioFXBaseFIRFilter * self) { GstBuffer *outbuf; GstFlowReturn res; gint rate = GST_AUDIO_FILTER_RATE (self); gint channels = GST_AUDIO_FILTER_CHANNELS (self); gint bps = GST_AUDIO_FILTER_BPS (self); gint outsize, outsamples; GstMapInfo map; guint8 *in, *out; if (channels == 0 || rate == 0 || self->nsamples_in == 0) { self->buffer_fill = 0; g_free (self->buffer); self->buffer = NULL; return; } /* Calculate the number of samples and their memory size that * should be pushed from the residue */ outsamples = self->nsamples_in - (self->nsamples_out - self->latency); if (outsamples <= 0) { self->buffer_fill = 0; g_free (self->buffer); self->buffer = NULL; return; } outsize = outsamples * channels * bps; if (!self->fft || self->low_latency) { gint64 diffsize, diffsamples; /* Process the difference between latency and residue length samples * to start at the actual data instead of starting at the zeros before * when we only got one buffer smaller than latency */ diffsamples = ((gint64) self->latency) - ((gint64) self->buffer_fill) / channels; if (diffsamples > 0) { diffsize = diffsamples * channels * bps; in = g_new0 (guint8, diffsize); out = g_new0 (guint8, diffsize); self->nsamples_out += self->process (self, in, out, diffsamples); g_free (in); g_free (out); } outbuf = gst_buffer_new_and_alloc (outsize); /* Convolve the residue with zeros to get the actual remaining data */ in = g_new0 (guint8, outsize); gst_buffer_map (outbuf, &map, GST_MAP_READWRITE); self->nsamples_out += self->process (self, in, map.data, outsamples); gst_buffer_unmap (outbuf, &map); g_free (in); } else { guint gensamples = 0; outbuf = gst_buffer_new_and_alloc (outsize); gst_buffer_map (outbuf, &map, GST_MAP_READWRITE); while (gensamples < outsamples) { guint step_insamples = self->block_length - self->buffer_fill; guint8 *zeroes = g_new0 (guint8, step_insamples * channels * bps); guint8 *out = g_new (guint8, self->block_length * channels * bps); guint step_gensamples; step_gensamples = self->process (self, zeroes, out, step_insamples); g_free (zeroes); memcpy (map.data + gensamples * bps, out, MIN (step_gensamples, outsamples - gensamples) * bps); gensamples += MIN (step_gensamples, outsamples - gensamples); g_free (out); } self->nsamples_out += gensamples; gst_buffer_unmap (outbuf, &map); } /* Set timestamp, offset, etc from the values we * saved when processing the regular buffers */ if (GST_CLOCK_TIME_IS_VALID (self->start_ts)) GST_BUFFER_TIMESTAMP (outbuf) = self->start_ts; else GST_BUFFER_TIMESTAMP (outbuf) = 0; GST_BUFFER_TIMESTAMP (outbuf) += gst_util_uint64_scale_int (self->nsamples_out - outsamples - self->latency, GST_SECOND, rate); GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale_int (outsamples, GST_SECOND, rate); if (self->start_off != GST_BUFFER_OFFSET_NONE) { GST_BUFFER_OFFSET (outbuf) = self->start_off + self->nsamples_out - outsamples - self->latency; GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET (outbuf) + outsamples; } GST_DEBUG_OBJECT (self, "Pushing residue buffer of size %" G_GSIZE_FORMAT " with timestamp: %" GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %" G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d", gst_buffer_get_size (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf), GST_BUFFER_OFFSET_END (outbuf), outsamples); res = gst_pad_push (GST_BASE_TRANSFORM_CAST (self)->srcpad, outbuf); if (G_UNLIKELY (res != GST_FLOW_OK)) { GST_WARNING_OBJECT (self, "failed to push residue"); } self->buffer_fill = 0; }
static gboolean gst_amc_audio_dec_set_format (GstAudioDecoder * decoder, GstCaps * caps) { GstAmcAudioDec *self; GstStructure *s; GstAmcFormat *format; const gchar *mime; gboolean is_format_change = FALSE; gboolean needs_disable = FALSE; gchar *format_string; gint rate, channels; self = GST_AMC_AUDIO_DEC (decoder); GST_DEBUG_OBJECT (self, "Setting new caps %" GST_PTR_FORMAT, caps); /* Check if the caps change is a real format change or if only irrelevant * parts of the caps have changed or nothing at all. */ is_format_change |= (!self->input_caps || !gst_caps_is_equal (self->input_caps, caps)); needs_disable = self->started; /* If the component is not started and a real format change happens * we have to restart the component. If no real format change * happened we can just exit here. */ if (needs_disable && !is_format_change) { /* Framerate or something minor changed */ self->input_caps_changed = TRUE; GST_DEBUG_OBJECT (self, "Already running and caps did not change the format"); return TRUE; } if (needs_disable && is_format_change) { gst_amc_audio_dec_drain (self); GST_AUDIO_DECODER_STREAM_UNLOCK (self); gst_amc_audio_dec_stop (GST_AUDIO_DECODER (self)); GST_AUDIO_DECODER_STREAM_LOCK (self); gst_amc_audio_dec_close (GST_AUDIO_DECODER (self)); if (!gst_amc_audio_dec_open (GST_AUDIO_DECODER (self))) { GST_ERROR_OBJECT (self, "Failed to open codec again"); return FALSE; } if (!gst_amc_audio_dec_start (GST_AUDIO_DECODER (self))) { GST_ERROR_OBJECT (self, "Failed to start codec again"); } } /* srcpad task is not running at this point */ mime = caps_to_mime (caps); if (!mime) { GST_ERROR_OBJECT (self, "Failed to convert caps to mime"); return FALSE; } s = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (s, "rate", &rate) || !gst_structure_get_int (s, "channels", &channels)) { GST_ERROR_OBJECT (self, "Failed to get rate/channels"); return FALSE; } format = gst_amc_format_new_audio (mime, rate, channels); if (!format) { GST_ERROR_OBJECT (self, "Failed to create audio format"); return FALSE; } /* FIXME: These buffers needs to be valid until the codec is stopped again */ g_list_foreach (self->codec_datas, (GFunc) gst_buffer_unref, NULL); g_list_free (self->codec_datas); self->codec_datas = NULL; if (gst_structure_has_field (s, "codec_data")) { const GValue *h = gst_structure_get_value (s, "codec_data"); GstBuffer *codec_data = gst_value_get_buffer (h); GstMapInfo minfo; guint8 *data; gst_buffer_map (codec_data, &minfo, GST_MAP_READ); data = g_memdup (minfo.data, minfo.size); self->codec_datas = g_list_prepend (self->codec_datas, data); gst_amc_format_set_buffer (format, "csd-0", data, minfo.size); gst_buffer_unmap (codec_data, &minfo); } else if (gst_structure_has_field (s, "streamheader")) { const GValue *sh = gst_structure_get_value (s, "streamheader"); gint nsheaders = gst_value_array_get_size (sh); GstBuffer *buf; const GValue *h; gint i, j; gchar *fname; GstMapInfo minfo; guint8 *data; for (i = 0, j = 0; i < nsheaders; i++) { h = gst_value_array_get_value (sh, i); buf = gst_value_get_buffer (h); if (strcmp (mime, "audio/vorbis") == 0) { guint8 header_type; gst_buffer_extract (buf, 0, &header_type, 1); /* Only use the identification and setup packets */ if (header_type != 0x01 && header_type != 0x05) continue; } fname = g_strdup_printf ("csd-%d", j); gst_buffer_map (buf, &minfo, GST_MAP_READ); data = g_memdup (minfo.data, minfo.size); self->codec_datas = g_list_prepend (self->codec_datas, data); gst_amc_format_set_buffer (format, fname, data, minfo.size); gst_buffer_unmap (buf, &minfo); g_free (fname); j++; } } format_string = gst_amc_format_to_string (format); GST_DEBUG_OBJECT (self, "Configuring codec with format: %s", format_string); g_free (format_string); self->n_buffers = 0; if (!gst_amc_codec_configure (self->codec, format, NULL, 0)) { GST_ERROR_OBJECT (self, "Failed to configure codec"); return FALSE; } gst_amc_format_free (format); if (!gst_amc_codec_start (self->codec)) { GST_ERROR_OBJECT (self, "Failed to start codec"); return FALSE; } if (self->input_buffers) gst_amc_codec_free_buffers (self->input_buffers, self->n_input_buffers); self->input_buffers = gst_amc_codec_get_input_buffers (self->codec, &self->n_input_buffers); if (!self->input_buffers) { GST_ERROR_OBJECT (self, "Failed to get input buffers"); return FALSE; } self->started = TRUE; self->input_caps_changed = TRUE; /* Start the srcpad loop again */ self->flushing = FALSE; self->downstream_flow_ret = GST_FLOW_OK; gst_pad_start_task (GST_AUDIO_DECODER_SRC_PAD (self), (GstTaskFunction) gst_amc_audio_dec_loop, decoder, NULL); return TRUE; }
static void send_message_locked (GstSoupHttpClientSink * souphttpsink) { GList *g; guint64 n; if (souphttpsink->queued_buffers == NULL || souphttpsink->message) { return; } /* If the URI went away, drop all these buffers */ if (souphttpsink->location == NULL) { GST_DEBUG_OBJECT (souphttpsink, "URI went away, dropping queued buffers"); g_list_free_full (souphttpsink->queued_buffers, (GDestroyNotify) gst_buffer_unref); souphttpsink->queued_buffers = NULL; return; } souphttpsink->message = soup_message_new ("PUT", souphttpsink->location); if (souphttpsink->message == NULL) { GST_WARNING_OBJECT (souphttpsink, "URI could not be parsed while creating message."); g_list_free_full (souphttpsink->queued_buffers, (GDestroyNotify) gst_buffer_unref); souphttpsink->queued_buffers = NULL; return; } soup_message_set_flags (souphttpsink->message, (souphttpsink->automatic_redirect ? 0 : SOUP_MESSAGE_NO_REDIRECT)); if (souphttpsink->cookies) { gchar **cookie; for (cookie = souphttpsink->cookies; *cookie != NULL; cookie++) { soup_message_headers_append (souphttpsink->message->request_headers, "Cookie", *cookie); } } n = 0; if (souphttpsink->offset == 0) { for (g = souphttpsink->streamheader_buffers; g; g = g_list_next (g)) { GstBuffer *buffer = g->data; GstMapInfo map; GST_DEBUG_OBJECT (souphttpsink, "queueing stream headers"); gst_buffer_map (buffer, &map, GST_MAP_READ); /* Stream headers are updated whenever ::set_caps is called, so there's * no guarantees about their lifetime and we ask libsoup to copy them * into the message body with SOUP_MEMORY_COPY. */ soup_message_body_append (souphttpsink->message->request_body, SOUP_MEMORY_COPY, map.data, map.size); n += map.size; gst_buffer_unmap (buffer, &map); } } for (g = souphttpsink->queued_buffers; g; g = g_list_next (g)) { GstBuffer *buffer = g->data; if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER)) { GstMapInfo map; gst_buffer_map (buffer, &map, GST_MAP_READ); /* Queued buffers are only freed in the next iteration of the mainloop * after the message body has been written out, so we don't need libsoup * to copy those while appending to the body. However, if the buffer is * used elsewhere, it should be copied. Hence, SOUP_MEMORY_TEMPORARY. */ soup_message_body_append (souphttpsink->message->request_body, SOUP_MEMORY_TEMPORARY, map.data, map.size); n += map.size; gst_buffer_unmap (buffer, &map); } } if (souphttpsink->offset != 0) { char *s; s = g_strdup_printf ("bytes %" G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT "/*", souphttpsink->offset, souphttpsink->offset + n - 1); soup_message_headers_append (souphttpsink->message->request_headers, "Content-Range", s); g_free (s); } if (n == 0) { GST_DEBUG_OBJECT (souphttpsink, "total size of buffers queued is 0, freeing everything"); g_list_free_full (souphttpsink->queued_buffers, (GDestroyNotify) gst_buffer_unref); souphttpsink->queued_buffers = NULL; g_object_unref (souphttpsink->message); souphttpsink->message = NULL; return; } souphttpsink->sent_buffers = souphttpsink->queued_buffers; souphttpsink->queued_buffers = NULL; GST_DEBUG_OBJECT (souphttpsink, "queue message %" G_GUINT64_FORMAT " %" G_GUINT64_FORMAT, souphttpsink->offset, n); soup_session_queue_message (souphttpsink->session, souphttpsink->message, callback, souphttpsink); souphttpsink->offset += n; }
int main (int argc, char **argv) { GstElement *source, *filter, *encoder, *conv, *resampler, *sink, *oggmux; GstCaps *caps; GstBus *bus; guint bus_watch_id; struct AudioMessage audio_message; int abort_send = 0; typedef void (*SignalHandlerPointer) (int); SignalHandlerPointer inthandler, termhandler; inthandler = signal (SIGINT, signalhandler); termhandler = signal (SIGTERM, signalhandler); #ifdef DEBUG_RECORD_PURE_OGG dump_pure_ogg = getenv ("GNUNET_RECORD_PURE_OGG") ? 1 : 0; #endif #ifdef WINDOWS setmode (1, _O_BINARY); #endif /* Initialisation */ gst_init (&argc, &argv); GNUNET_assert (GNUNET_OK == GNUNET_log_setup ("gnunet-helper-audio-record", "WARNING", NULL)); GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "Audio source starts\n"); audio_message.header.type = htons (GNUNET_MESSAGE_TYPE_CONVERSATION_AUDIO); /* Create gstreamer elements */ pipeline = gst_pipeline_new ("audio-recorder"); source = gst_element_factory_make ("autoaudiosrc", "audiosource"); filter = gst_element_factory_make ("capsfilter", "filter"); conv = gst_element_factory_make ("audioconvert", "converter"); resampler= gst_element_factory_make ("audioresample", "resampler"); encoder = gst_element_factory_make ("opusenc", "opus-encoder"); oggmux = gst_element_factory_make ("oggmux", "ogg-muxer"); sink = gst_element_factory_make ("appsink", "audio-output"); if (!pipeline || !filter || !source || !conv || !resampler || !encoder || !oggmux || !sink) { GNUNET_log (GNUNET_ERROR_TYPE_ERROR, "One element could not be created. Exiting.\n"); return -1; } g_signal_connect (source, "child-added", G_CALLBACK (source_child_added), NULL); /* Set up the pipeline */ caps = gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, "S16LE", /* "rate", G_TYPE_INT, SAMPLING_RATE,*/ "channels", G_TYPE_INT, OPUS_CHANNELS, /* "layout", G_TYPE_STRING, "interleaved",*/ NULL); g_object_set (G_OBJECT (filter), "caps", caps, NULL); gst_caps_unref (caps); g_object_set (G_OBJECT (encoder), /* "bitrate", 64000, */ /* "bandwidth", OPUS_BANDWIDTH_FULLBAND, */ "inband-fec", INBAND_FEC_MODE, "packet-loss-percentage", PACKET_LOSS_PERCENTAGE, "max-payload-size", MAX_PAYLOAD_SIZE, "audio", FALSE, /* VoIP, not audio */ "frame-size", OPUS_FRAME_SIZE, NULL); g_object_set (G_OBJECT (oggmux), "max-delay", OGG_MAX_DELAY, "max-page-delay", OGG_MAX_PAGE_DELAY, NULL); /* we add a message handler */ bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); bus_watch_id = gst_bus_add_watch (bus, bus_call, pipeline); gst_object_unref (bus); /* we add all elements into the pipeline */ /* audiosource | converter | resampler | opus-encoder | audio-output */ gst_bin_add_many (GST_BIN (pipeline), source, filter, conv, resampler, encoder, oggmux, sink, NULL); /* we link the elements together */ gst_element_link_many (source, filter, conv, resampler, encoder, oggmux, sink, NULL); /* Set the pipeline to "playing" state*/ GNUNET_log (GNUNET_ERROR_TYPE_INFO, "Now playing\n"); gst_element_set_state (pipeline, GST_STATE_PLAYING); GNUNET_log (GNUNET_ERROR_TYPE_INFO, "Running...\n"); /* Iterate */ while (!abort_send) { GstSample *s; GstBuffer *b; GstMapInfo m; size_t len, msg_size; const char *ptr; int phase; GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "pulling...\n"); s = gst_app_sink_pull_sample (GST_APP_SINK (sink)); if (NULL == s) { GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "pulled NULL\n"); break; } GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "...pulled!\n"); { const GstStructure *si; char *si_str; GstCaps *s_caps; char *caps_str; si = gst_sample_get_info (s); if (si) { si_str = gst_structure_to_string (si); if (si_str) { GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "Got sample %s\n", si_str); g_free (si_str); } } else GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "Got sample with no info\n"); s_caps = gst_sample_get_caps (s); if (s_caps) { caps_str = gst_caps_to_string (s_caps); if (caps_str) { GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "Got sample with caps %s\n", caps_str); g_free (caps_str); } } else GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "Got sample with no caps\n"); } b = gst_sample_get_buffer (s); if (NULL == b || !gst_buffer_map (b, &m, GST_MAP_READ)) { GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "got NULL buffer %p or failed to map the buffer\n", b); gst_sample_unref (s); continue; } len = m.size; if (len > UINT16_MAX - sizeof (struct AudioMessage)) { GNUNET_break (0); len = UINT16_MAX - sizeof (struct AudioMessage); } msg_size = sizeof (struct AudioMessage) + len; audio_message.header.size = htons ((uint16_t) msg_size); GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "Sending %u bytes of audio data\n", (unsigned int) msg_size); for (phase = 0; phase < 2; phase++) { size_t offset; size_t to_send; ssize_t ret; if (0 == phase) { #ifdef DEBUG_RECORD_PURE_OGG if (dump_pure_ogg) continue; #endif ptr = (const char *) &audio_message; to_send = sizeof (audio_message); } else { ptr = (const char *) m.data; to_send = len; } GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "Sending %u bytes on phase %d\n", (unsigned int) to_send, phase); for (offset = 0; offset < to_send; offset += ret) { ret = write (1, &ptr[offset], to_send - offset); if (0 >= ret) { if (-1 == ret) GNUNET_log (GNUNET_ERROR_TYPE_DEBUG, "Failed to write %u bytes at offset %u (total %u) in phase %d: %s\n", (unsigned int) to_send - offset, (unsigned int) offset, (unsigned int) (to_send + offset), phase, strerror (errno)); abort_send = 1; break; } } if (abort_send) break; } gst_buffer_unmap (b, &m); gst_sample_unref (s); } signal (SIGINT, inthandler); signal (SIGINT, termhandler); GNUNET_log (GNUNET_ERROR_TYPE_INFO, "Returned, stopping playback\n"); quit (); GNUNET_log (GNUNET_ERROR_TYPE_INFO, "Deleting pipeline\n"); gst_object_unref (GST_OBJECT (pipeline)); pipeline = NULL; g_source_remove (bus_watch_id); return 0; }
static GstFlowReturn webkitMediaPlayReadyDecryptTransformInPlace(GstBaseTransform* base, GstBuffer* buffer) { WebKitMediaPlayReadyDecrypt* self = WEBKIT_MEDIA_PLAYREADY_DECRYPT(base); GstFlowReturn result = GST_FLOW_OK; GstMapInfo map; const GValue* value; guint sampleIndex = 0; int errorCode; uint32_t trackID = 0; GstPad* pad; GstCaps* caps; GstMapInfo boxMap; GstBuffer* box = nullptr; GstProtectionMeta* protectionMeta = 0; gboolean boxMapped = FALSE; gboolean bufferMapped = FALSE; GST_TRACE_OBJECT(self, "Processing buffer"); g_mutex_lock(&self->mutex); GST_TRACE_OBJECT(self, "Mutex acquired, stream received: %s", self->streamReceived ? "yes":"no"); // The key might not have been received yet. Wait for it. if (!self->streamReceived) g_cond_wait(&self->condition, &self->mutex); if (!self->streamReceived) { GST_DEBUG_OBJECT(self, "Condition signaled from state change transition. Aborting."); result = GST_FLOW_NOT_SUPPORTED; goto beach; } GST_TRACE_OBJECT(self, "Proceeding with decryption"); protectionMeta = reinterpret_cast<GstProtectionMeta*>(gst_buffer_get_protection_meta(buffer)); if (!protectionMeta || !buffer) { if (!protectionMeta) GST_ERROR_OBJECT(self, "Failed to get GstProtection metadata from buffer %p", buffer); if (!buffer) GST_ERROR_OBJECT(self, "Failed to get writable buffer"); result = GST_FLOW_NOT_SUPPORTED; goto beach; } bufferMapped = gst_buffer_map(buffer, &map, static_cast<GstMapFlags>(GST_MAP_READWRITE)); if (!bufferMapped) { GST_ERROR_OBJECT(self, "Failed to map buffer"); result = GST_FLOW_NOT_SUPPORTED; goto beach; } pad = gst_element_get_static_pad(GST_ELEMENT(self), "src"); caps = gst_pad_get_current_caps(pad); if (g_str_has_prefix(gst_structure_get_name(gst_caps_get_structure(caps, 0)), "video/")) trackID = 1; else trackID = 2; gst_caps_unref(caps); gst_object_unref(pad); if (!gst_structure_get_uint(protectionMeta->info, "sample-index", &sampleIndex)) { GST_ERROR_OBJECT(self, "failed to get sample-index"); result = GST_FLOW_NOT_SUPPORTED; goto beach; } value = gst_structure_get_value(protectionMeta->info, "box"); if (!value) { GST_ERROR_OBJECT(self, "Failed to get encryption box for sample"); result = GST_FLOW_NOT_SUPPORTED; goto beach; } box = gst_value_get_buffer(value); boxMapped = gst_buffer_map(box, &boxMap, GST_MAP_READ); if (!boxMapped) { GST_ERROR_OBJECT(self, "Failed to map encryption box"); result = GST_FLOW_NOT_SUPPORTED; goto beach; } GST_TRACE_OBJECT(self, "decrypt sample %u", sampleIndex); if ((errorCode = self->sessionMetaData->decrypt(static_cast<void*>(map.data), static_cast<uint32_t>(map.size), static_cast<void*>(boxMap.data), static_cast<uint32_t>(boxMap.size), static_cast<uint32_t>(sampleIndex), trackID))) { GST_WARNING_OBJECT(self, "ERROR - packet decryption failed [%d]", errorCode); GST_MEMDUMP_OBJECT(self, "box", boxMap.data, boxMap.size); result = GST_FLOW_ERROR; goto beach; } beach: if (boxMapped) gst_buffer_unmap(box, &boxMap); if (bufferMapped) gst_buffer_unmap(buffer, &map); if (protectionMeta) gst_buffer_remove_meta(buffer, reinterpret_cast<GstMeta*>(protectionMeta)); GST_TRACE_OBJECT(self, "Unlocking mutex"); g_mutex_unlock(&self->mutex); return result; }
static GstBuffer * gst_dtmf_src_generate_tone (GstRtpDTMFDepay * rtpdtmfdepay, GstRTPDTMFPayload payload) { GstBuffer *buf; GstMapInfo map; gint16 *p; gint tone_size; double i = 0; double amplitude, f1, f2; double volume_factor; DTMF_KEY key = DTMF_KEYS[payload.event]; guint32 clock_rate = 8000 /* default */ ; GstRTPBaseDepayload *depayload = GST_RTP_BASE_DEPAYLOAD (rtpdtmfdepay); gint volume; static GstAllocationParams params = { 0, 0, 0, 1, }; clock_rate = depayload->clock_rate; /* Create a buffer for the tone */ tone_size = (payload.duration * SAMPLE_SIZE * CHANNELS) / 8; buf = gst_buffer_new_allocate (NULL, tone_size, ¶ms); GST_BUFFER_DURATION (buf) = payload.duration * GST_SECOND / clock_rate; volume = payload.volume; gst_buffer_map (buf, &map, GST_MAP_WRITE); p = (gint16 *) map.data; volume_factor = pow (10, (-volume) / 20); /* * For each sample point we calculate 'x' as the * the amplitude value. */ for (i = 0; i < (tone_size / (SAMPLE_SIZE / 8)); i++) { /* * We add the fundamental frequencies together. */ f1 = sin (2 * M_PI * key.low_frequency * (rtpdtmfdepay->sample / clock_rate)); f2 = sin (2 * M_PI * key.high_frequency * (rtpdtmfdepay->sample / clock_rate)); amplitude = (f1 + f2) / 2; /* Adjust the volume */ amplitude *= volume_factor; /* Make the [-1:1] interval into a [-32767:32767] interval */ amplitude *= 32767; /* Store it in the data buffer */ *(p++) = (gint16) amplitude; (rtpdtmfdepay->sample)++; } gst_buffer_unmap (buf, &map); return buf; }
static void gst_droidadec_data_available (void *data, DroidMediaCodecData * encoded) { GstFlowReturn flow_ret; GstDroidADec *dec = (GstDroidADec *) data; GstAudioDecoder *decoder = GST_AUDIO_DECODER (dec); GstBuffer *out; GstMapInfo info; GST_DEBUG_OBJECT (dec, "data available of size %d", encoded->data.size); GST_AUDIO_DECODER_STREAM_LOCK (decoder); if (G_UNLIKELY (dec->downstream_flow_ret != GST_FLOW_OK)) { GST_DEBUG_OBJECT (dec, "not handling data in error state: %s", gst_flow_get_name (dec->downstream_flow_ret)); flow_ret = dec->downstream_flow_ret; gst_audio_decoder_finish_frame (decoder, NULL, 1); goto out; } if (G_UNLIKELY (gst_audio_decoder_get_audio_info (GST_AUDIO_DECODER (dec))->finfo->format == GST_AUDIO_FORMAT_UNKNOWN)) { DroidMediaCodecMetaData md; DroidMediaRect crop; /* TODO: get rid of that */ GstAudioInfo info; memset (&md, 0x0, sizeof (md)); droid_media_codec_get_output_info (dec->codec, &md, &crop); GST_INFO_OBJECT (dec, "output rate=%d, output channels=%d", md.sample_rate, md.channels); gst_audio_info_init (&info); gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, md.sample_rate, md.channels, NULL); if (!gst_audio_decoder_set_output_format (decoder, &info)) { flow_ret = GST_FLOW_ERROR; goto out; } dec->info = gst_audio_decoder_get_audio_info (GST_AUDIO_DECODER (dec)); } out = gst_audio_decoder_allocate_output_buffer (decoder, encoded->data.size); gst_buffer_map (out, &info, GST_MAP_READWRITE); orc_memcpy (info.data, encoded->data.data, encoded->data.size); gst_buffer_unmap (out, &info); // GST_WARNING_OBJECT (dec, "bpf %d, bps %d", dec->info->bpf, GST_AUDIO_INFO_BPS(dec->info)); if (dec->spf == -1 || (encoded->data.size == dec->spf * dec->info->bpf && gst_adapter_available (dec->adapter) == 0)) { /* fast path. no need for anything */ goto push; } gst_adapter_push (dec->adapter, out); if (gst_adapter_available (dec->adapter) >= dec->spf * dec->info->bpf) { out = gst_adapter_take_buffer (dec->adapter, dec->spf * dec->info->bpf); } else { flow_ret = GST_FLOW_OK; goto out; } push: GST_DEBUG_OBJECT (dec, "pushing %d bytes out", gst_buffer_get_size (out)); flow_ret = gst_audio_decoder_finish_frame (decoder, out, 1); if (flow_ret == GST_FLOW_OK || flow_ret == GST_FLOW_FLUSHING) { goto out; } else if (flow_ret == GST_FLOW_EOS) { GST_INFO_OBJECT (dec, "eos"); } else if (flow_ret < GST_FLOW_OK) { GST_ELEMENT_ERROR (dec, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", gst_flow_get_name (flow_ret))); } out: dec->downstream_flow_ret = flow_ret; GST_AUDIO_DECODER_STREAM_UNLOCK (decoder); }
static gboolean gst_amc_video_dec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state) { GstAmcVideoDec *self; GstAmcFormat *format; const gchar *mime; gboolean is_format_change = FALSE; gboolean needs_disable = FALSE; gchar *format_string; guint8 *codec_data = NULL; gsize codec_data_size = 0; GError *err = NULL; self = GST_AMC_VIDEO_DEC (decoder); GST_DEBUG_OBJECT (self, "Setting new caps %" GST_PTR_FORMAT, state->caps); /* Check if the caps change is a real format change or if only irrelevant * parts of the caps have changed or nothing at all. */ is_format_change |= self->color_format_info.width != state->info.width; is_format_change |= self->color_format_info.height != state->info.height; if (state->codec_data) { GstMapInfo cminfo; gst_buffer_map (state->codec_data, &cminfo, GST_MAP_READ); codec_data = g_memdup (cminfo.data, cminfo.size); codec_data_size = cminfo.size; is_format_change |= (!self->codec_data || self->codec_data_size != codec_data_size || memcmp (self->codec_data, codec_data, codec_data_size) != 0); gst_buffer_unmap (state->codec_data, &cminfo); } else if (self->codec_data) { is_format_change |= TRUE; } needs_disable = self->started; /* If the component is not started and a real format change happens * we have to restart the component. If no real format change * happened we can just exit here. */ if (needs_disable && !is_format_change) { g_free (codec_data); codec_data = NULL; codec_data_size = 0; /* Framerate or something minor changed */ self->input_state_changed = TRUE; if (self->input_state) gst_video_codec_state_unref (self->input_state); self->input_state = gst_video_codec_state_ref (state); GST_DEBUG_OBJECT (self, "Already running and caps did not change the format"); return TRUE; } if (needs_disable && is_format_change) { gst_amc_video_dec_drain (self); GST_VIDEO_DECODER_STREAM_UNLOCK (self); gst_amc_video_dec_stop (GST_VIDEO_DECODER (self)); GST_VIDEO_DECODER_STREAM_LOCK (self); gst_amc_video_dec_close (GST_VIDEO_DECODER (self)); if (!gst_amc_video_dec_open (GST_VIDEO_DECODER (self))) { GST_ERROR_OBJECT (self, "Failed to open codec again"); return FALSE; } if (!gst_amc_video_dec_start (GST_VIDEO_DECODER (self))) { GST_ERROR_OBJECT (self, "Failed to start codec again"); } } /* srcpad task is not running at this point */ if (self->input_state) gst_video_codec_state_unref (self->input_state); self->input_state = NULL; g_free (self->codec_data); self->codec_data = codec_data; self->codec_data_size = codec_data_size; mime = caps_to_mime (state->caps); if (!mime) { GST_ERROR_OBJECT (self, "Failed to convert caps to mime"); return FALSE; } format = gst_amc_format_new_video (mime, state->info.width, state->info.height, &err); if (!format) { GST_ERROR_OBJECT (self, "Failed to create video format"); GST_ELEMENT_ERROR_FROM_ERROR (self, err); return FALSE; } /* FIXME: This buffer needs to be valid until the codec is stopped again */ if (self->codec_data) { gst_amc_format_set_buffer (format, "csd-0", self->codec_data, self->codec_data_size, &err); if (err) GST_ELEMENT_WARNING_FROM_ERROR (self, err); } format_string = gst_amc_format_to_string (format, &err); if (err) GST_ELEMENT_WARNING_FROM_ERROR (self, err); GST_DEBUG_OBJECT (self, "Configuring codec with format: %s", GST_STR_NULL (format_string)); g_free (format_string); if (!gst_amc_codec_configure (self->codec, format, 0, &err)) { GST_ERROR_OBJECT (self, "Failed to configure codec"); GST_ELEMENT_ERROR_FROM_ERROR (self, err); return FALSE; } gst_amc_format_free (format); if (!gst_amc_codec_start (self->codec, &err)) { GST_ERROR_OBJECT (self, "Failed to start codec"); GST_ELEMENT_ERROR_FROM_ERROR (self, err); return FALSE; } self->started = TRUE; self->input_state = gst_video_codec_state_ref (state); self->input_state_changed = TRUE; /* Start the srcpad loop again */ self->flushing = FALSE; self->downstream_flow_ret = GST_FLOW_OK; gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self), (GstTaskFunction) gst_amc_video_dec_loop, decoder, NULL); return TRUE; }
static GstFlowReturn gst_speex_dec_parse_header (GstSpeexDec * dec, GstBuffer * buf) { GstMapInfo map; GstAudioInfo info; static const GstAudioChannelPosition chan_pos[2][2] = { {GST_AUDIO_CHANNEL_POSITION_MONO}, {GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT} }; /* get the header */ gst_buffer_map (buf, &map, GST_MAP_READ); dec->header = speex_packet_to_header ((gchar *) map.data, map.size); gst_buffer_unmap (buf, &map); if (!dec->header) goto no_header; if (dec->header->mode >= SPEEX_NB_MODES || dec->header->mode < 0) goto mode_too_old; dec->mode = speex_lib_get_mode (dec->header->mode); /* initialize the decoder */ dec->state = speex_decoder_init (dec->mode); if (!dec->state) goto init_failed; speex_decoder_ctl (dec->state, SPEEX_SET_ENH, &dec->enh); speex_decoder_ctl (dec->state, SPEEX_GET_FRAME_SIZE, &dec->frame_size); if (dec->header->nb_channels != 1) { dec->stereo = speex_stereo_state_init (); dec->callback.callback_id = SPEEX_INBAND_STEREO; dec->callback.func = speex_std_stereo_request_handler; dec->callback.data = dec->stereo; speex_decoder_ctl (dec->state, SPEEX_SET_HANDLER, &dec->callback); } speex_decoder_ctl (dec->state, SPEEX_SET_SAMPLING_RATE, &dec->header->rate); dec->frame_duration = gst_util_uint64_scale_int (dec->frame_size, GST_SECOND, dec->header->rate); speex_bits_init (&dec->bits); /* set caps */ gst_audio_info_init (&info); gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, dec->header->rate, dec->header->nb_channels, chan_pos[dec->header->nb_channels - 1]); if (!gst_audio_decoder_set_output_format (GST_AUDIO_DECODER (dec), &info)) goto nego_failed; return GST_FLOW_OK; /* ERRORS */ no_header: { GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE, (NULL), ("couldn't read header")); return GST_FLOW_ERROR; } mode_too_old: { GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE, (NULL), ("Mode number %d does not (yet/any longer) exist in this version", dec->header->mode)); return GST_FLOW_ERROR; } init_failed: { GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE, (NULL), ("couldn't initialize decoder")); return GST_FLOW_ERROR; } nego_failed: { GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE, (NULL), ("couldn't negotiate format")); return GST_FLOW_NOT_NEGOTIATED; } }
nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags) { NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); nsresult ret = NS_OK; /* * Parse MP3 headers before we kick off the GStreamer pipeline otherwise there * might be concurrent stream operations happening on both decoding and gstreamer * threads which will screw the GStreamer state machine. */ bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3); if (isMP3) { ParseMP3Headers(); } /* We do 3 attempts here: decoding audio and video, decoding video only, * decoding audio only. This allows us to play streams that have one broken * stream but that are otherwise decodeable. */ guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO, static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)}; guint default_flags, current_flags; g_object_get(mPlayBin, "flags", &default_flags, nullptr); GstMessage* message = nullptr; for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) { current_flags = default_flags & flags[i]; g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr); /* reset filter caps to ANY */ GstCaps* caps = gst_caps_new_any(); GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter"); g_object_set(filter, "caps", caps, nullptr); gst_object_unref(filter); filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter"); g_object_set(filter, "caps", caps, nullptr); gst_object_unref(filter); gst_caps_unref(caps); filter = nullptr; if (!(current_flags & GST_PLAY_FLAG_AUDIO)) filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter"); else if (!(current_flags & GST_PLAY_FLAG_VIDEO)) filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter"); if (filter) { /* Little trick: set the target caps to "skip" so that playbin2 fails to * find a decoder for the stream we want to skip. */ GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr, nullptr); g_object_set(filter, "caps", filterCaps, nullptr); gst_caps_unref(filterCaps); gst_object_unref(filter); } LOG(PR_LOG_DEBUG, "starting metadata pipeline"); if (gst_element_set_state(mPlayBin, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) { LOG(PR_LOG_DEBUG, "metadata pipeline state change failed"); ret = NS_ERROR_FAILURE; continue; } /* Wait for ASYNC_DONE, which is emitted when the pipeline is built, * prerolled and ready to play. Also watch for errors. */ message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS)); if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ASYNC_DONE) { LOG(PR_LOG_DEBUG, "read metadata pipeline prerolled"); gst_message_unref(message); ret = NS_OK; break; } else { LOG(PR_LOG_DEBUG, "read metadata pipeline failed to preroll: %s", gst_message_type_get_name (GST_MESSAGE_TYPE (message))); if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) { GError* error; gchar* debug; gst_message_parse_error(message, &error, &debug); LOG(PR_LOG_ERROR, "read metadata error: %s: %s", error->message, debug); g_error_free(error); g_free(debug); } /* Unexpected stream close/EOS or other error. We'll give up if all * streams are in error/eos. */ gst_element_set_state(mPlayBin, GST_STATE_NULL); gst_message_unref(message); ret = NS_ERROR_FAILURE; } } if (NS_SUCCEEDED(ret)) ret = CheckSupportedFormats(); if (NS_FAILED(ret)) /* we couldn't get this to play */ return ret; /* report the duration */ gint64 duration; if (isMP3 && mMP3FrameParser.IsMP3()) { // The MP3FrameParser has reported a duration; use that over the gstreamer // reported duration for inter-platform consistency. ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); mUseParserDuration = true; mLastParserDuration = mMP3FrameParser.GetDuration(); mDecoder->SetMediaDuration(mLastParserDuration); } else { LOG(PR_LOG_DEBUG, "querying duration"); // Otherwise use the gstreamer duration. #if GST_VERSION_MAJOR >= 1 if (gst_element_query_duration(GST_ELEMENT(mPlayBin), GST_FORMAT_TIME, &duration)) { #else GstFormat format = GST_FORMAT_TIME; if (gst_element_query_duration(GST_ELEMENT(mPlayBin), &format, &duration) && format == GST_FORMAT_TIME) { #endif ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor()); LOG(PR_LOG_DEBUG, "have duration %" GST_TIME_FORMAT, GST_TIME_ARGS(duration)); duration = GST_TIME_AS_USECONDS (duration); mDecoder->SetMediaDuration(duration); } else { mDecoder->SetMediaSeekable(false); } } int n_video = 0, n_audio = 0; g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr); mInfo.mVideo.mHasVideo = n_video != 0; mInfo.mAudio.mHasAudio = n_audio != 0; *aInfo = mInfo; *aTags = nullptr; // Watch the pipeline for fatal errors #if GST_VERSION_MAJOR >= 1 gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this, nullptr); #else gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this); #endif /* set the pipeline to PLAYING so that it starts decoding and queueing data in * the appsinks */ gst_element_set_state(mPlayBin, GST_STATE_PLAYING); return NS_OK; } nsresult GStreamerReader::CheckSupportedFormats() { bool done = false; bool unsupported = false; GstIterator* it = gst_bin_iterate_recurse(GST_BIN(mPlayBin)); while (!done) { GstIteratorResult res; GstElement* element; #if GST_VERSION_MAJOR >= 1 GValue value = {0,}; res = gst_iterator_next(it, &value); #else res = gst_iterator_next(it, (void **) &element); #endif switch(res) { case GST_ITERATOR_OK: { #if GST_VERSION_MAJOR >= 1 element = GST_ELEMENT (g_value_get_object (&value)); #endif GstElementFactory* factory = gst_element_get_factory(element); if (factory) { const char* klass = gst_element_factory_get_klass(factory); GstPad* pad = gst_element_get_static_pad(element, "sink"); if (pad) { GstCaps* caps; #if GST_VERSION_MAJOR >= 1 caps = gst_pad_get_current_caps(pad); #else caps = gst_pad_get_negotiated_caps(pad); #endif if (caps) { /* check for demuxers but ignore elements like id3demux */ if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata")) unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps); else if (strstr (klass, "Decoder") && !strstr(klass, "Generic")) unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps); gst_caps_unref(caps); } gst_object_unref(pad); } } #if GST_VERSION_MAJOR >= 1 g_value_unset (&value); #else gst_object_unref(element); #endif done = unsupported; break; } case GST_ITERATOR_RESYNC: unsupported = false; done = false; break; case GST_ITERATOR_ERROR: done = true; break; case GST_ITERATOR_DONE: done = true; break; } } return unsupported ? NS_ERROR_FAILURE : NS_OK; } nsresult GStreamerReader::ResetDecode() { nsresult res = NS_OK; LOG(PR_LOG_DEBUG, "reset decode"); if (NS_FAILED(MediaDecoderReader::ResetDecode())) { res = NS_ERROR_FAILURE; } mVideoQueue.Reset(); mAudioQueue.Reset(); mVideoSinkBufferCount = 0; mAudioSinkBufferCount = 0; mReachedAudioEos = false; mReachedVideoEos = false; #if GST_VERSION_MAJOR >= 1 mConfigureAlignment = true; #endif LOG(PR_LOG_DEBUG, "reset decode done"); return res; } bool GStreamerReader::DecodeAudioData() { NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); GstBuffer *buffer = nullptr; { ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); if (mReachedAudioEos && !mAudioSinkBufferCount) { return false; } /* Wait something to be decoded before return or continue */ if (!mAudioSinkBufferCount) { if(!mVideoSinkBufferCount) { /* We have nothing decoded so it makes no sense to return to the state machine * as it will call us back immediately, we'll return again and so on, wasting * CPU cycles for no job done. So, block here until there is either video or * audio data available */ mon.Wait(); if (!mAudioSinkBufferCount) { /* There is still no audio data available, so either there is video data or * something else has happened (Eos, etc...). Return to the state machine * to process it. */ return true; } } else { return true; } } #if GST_VERSION_MAJOR >= 1 GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink); buffer = gst_buffer_ref(gst_sample_get_buffer(sample)); gst_sample_unref(sample); #else buffer = gst_app_sink_pull_buffer(mAudioAppSink); #endif mAudioSinkBufferCount--; } int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer); timestamp = gst_segment_to_stream_time(&mAudioSegment, GST_FORMAT_TIME, timestamp); timestamp = GST_TIME_AS_USECONDS(timestamp); int64_t offset = GST_BUFFER_OFFSET(buffer); guint8* data; #if GST_VERSION_MAJOR >= 1 GstMapInfo info; gst_buffer_map(buffer, &info, GST_MAP_READ); unsigned int size = info.size; data = info.data; #else unsigned int size = GST_BUFFER_SIZE(buffer); data = GST_BUFFER_DATA(buffer); #endif int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels; typedef AudioCompactor::NativeCopy GstCopy; mAudioCompactor.Push(offset, timestamp, mInfo.mAudio.mRate, frames, mInfo.mAudio.mChannels, GstCopy(data, size, mInfo.mAudio.mChannels)); #if GST_VERSION_MAJOR >= 1 gst_buffer_unmap(buffer, &info); #endif gst_buffer_unref(buffer); return true; }
static GstFlowReturn gst_speex_dec_parse_data (GstSpeexDec * dec, GstBuffer * buf) { GstFlowReturn res = GST_FLOW_OK; gint i, fpp; SpeexBits *bits; GstMapInfo map; if (!dec->frame_duration) goto not_negotiated; if (G_LIKELY (gst_buffer_get_size (buf))) { /* send data to the bitstream */ gst_buffer_map (buf, &map, GST_MAP_READ); speex_bits_read_from (&dec->bits, (gchar *) map.data, map.size); gst_buffer_unmap (buf, &map); fpp = dec->header->frames_per_packet; bits = &dec->bits; GST_DEBUG_OBJECT (dec, "received buffer of size %" G_GSIZE_FORMAT ", fpp %d, %d bits", map.size, fpp, speex_bits_remaining (bits)); } else { /* FIXME ? actually consider how much concealment is needed */ /* concealment data, pass NULL as the bits parameters */ GST_DEBUG_OBJECT (dec, "creating concealment data"); fpp = dec->header->frames_per_packet; bits = NULL; } /* now decode each frame, catering for unknown number of them (e.g. rtp) */ for (i = 0; i < fpp; i++) { GstBuffer *outbuf; gboolean corrupted = FALSE; gint ret; GST_LOG_OBJECT (dec, "decoding frame %d/%d, %d bits remaining", i, fpp, bits ? speex_bits_remaining (bits) : -1); #if 0 res = gst_pad_alloc_buffer_and_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec), GST_BUFFER_OFFSET_NONE, dec->frame_size * dec->header->nb_channels * 2, GST_PAD_CAPS (GST_AUDIO_DECODER_SRC_PAD (dec)), &outbuf); if (res != GST_FLOW_OK) { GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res)); return res; } #endif /* FIXME, we can use a bufferpool because we have fixed size buffers. We * could also use an allocator */ outbuf = gst_buffer_new_allocate (NULL, dec->frame_size * dec->header->nb_channels * 2, NULL); gst_buffer_map (outbuf, &map, GST_MAP_WRITE); ret = speex_decode_int (dec->state, bits, (spx_int16_t *) map.data); if (ret == -1) { /* uh? end of stream */ GST_WARNING_OBJECT (dec, "Unexpected end of stream found"); corrupted = TRUE; } else if (ret == -2) { GST_WARNING_OBJECT (dec, "Decoding error: corrupted stream?"); corrupted = TRUE; } if (bits && speex_bits_remaining (bits) < 0) { GST_WARNING_OBJECT (dec, "Decoding overflow: corrupted stream?"); corrupted = TRUE; } if (dec->header->nb_channels == 2) speex_decode_stereo_int ((spx_int16_t *) map.data, dec->frame_size, dec->stereo); gst_buffer_unmap (outbuf, &map); if (!corrupted) { res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), outbuf, 1); } else { res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), NULL, 1); gst_buffer_unref (outbuf); } if (res != GST_FLOW_OK) { GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res)); break; } } return res; /* ERRORS */ not_negotiated: { GST_ELEMENT_ERROR (dec, CORE, NEGOTIATION, (NULL), ("decoder not initialized")); return GST_FLOW_NOT_NEGOTIATED; } }
static GstFlowReturn gst_mpeg2dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) { GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder); GstBuffer *buf = frame->input_buffer; GstMapInfo minfo; const mpeg2_info_t *info; mpeg2_state_t state; gboolean done = FALSE; GstFlowReturn ret = GST_FLOW_OK; GST_LOG_OBJECT (mpeg2dec, "received frame %d, timestamp %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT, frame->system_frame_number, GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (frame->duration)); gst_buffer_ref (buf); if (!gst_buffer_map (buf, &minfo, GST_MAP_READ)) { GST_ERROR_OBJECT (mpeg2dec, "Failed to map input buffer"); return GST_FLOW_ERROR; } info = mpeg2dec->info; GST_LOG_OBJECT (mpeg2dec, "calling mpeg2_buffer"); mpeg2_buffer (mpeg2dec->decoder, minfo.data, minfo.data + minfo.size); GST_LOG_OBJECT (mpeg2dec, "calling mpeg2_buffer done"); while (!done) { GST_LOG_OBJECT (mpeg2dec, "calling parse"); state = mpeg2_parse (mpeg2dec->decoder); GST_DEBUG_OBJECT (mpeg2dec, "parse state %d", state); switch (state) { #if MPEG2_RELEASE >= MPEG2_VERSION (0, 5, 0) case STATE_SEQUENCE_MODIFIED: GST_DEBUG_OBJECT (mpeg2dec, "sequence modified"); mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_PICTURE; gst_mpeg2dec_clear_buffers (mpeg2dec); /* fall through */ #endif case STATE_SEQUENCE: ret = handle_sequence (mpeg2dec, info); /* if there is an error handling the sequence * reset the decoder, maybe something more elegant * could be done. */ if (ret == GST_FLOW_ERROR) { GST_VIDEO_DECODER_ERROR (decoder, 1, STREAM, DECODE, ("decoding error"), ("Bad sequence header"), ret); gst_video_decoder_drop_frame (decoder, frame); gst_mpeg2dec_flush (decoder); goto done; } break; case STATE_SEQUENCE_REPEATED: GST_DEBUG_OBJECT (mpeg2dec, "sequence repeated"); break; case STATE_GOP: GST_DEBUG_OBJECT (mpeg2dec, "gop"); break; case STATE_PICTURE: ret = handle_picture (mpeg2dec, info, frame); break; case STATE_SLICE_1ST: GST_LOG_OBJECT (mpeg2dec, "1st slice of frame encountered"); break; case STATE_PICTURE_2ND: GST_LOG_OBJECT (mpeg2dec, "Second picture header encountered. Decoding 2nd field"); break; #if MPEG2_RELEASE >= MPEG2_VERSION (0, 4, 0) case STATE_INVALID_END: GST_DEBUG_OBJECT (mpeg2dec, "invalid end"); #endif case STATE_END: GST_DEBUG_OBJECT (mpeg2dec, "end"); case STATE_SLICE: GST_DEBUG_OBJECT (mpeg2dec, "display_fbuf:%p, discard_fbuf:%p", info->display_fbuf, info->discard_fbuf); if (info->display_fbuf && info->display_fbuf->id) { ret = handle_slice (mpeg2dec, info); } else { GST_DEBUG_OBJECT (mpeg2dec, "no picture to display"); } if (info->discard_fbuf && info->discard_fbuf->id) gst_mpeg2dec_discard_buffer (mpeg2dec, GPOINTER_TO_INT (info->discard_fbuf->id) - 1); if (state != STATE_SLICE) { gst_mpeg2dec_clear_buffers (mpeg2dec); } break; case STATE_BUFFER: done = TRUE; break; /* error */ case STATE_INVALID: GST_VIDEO_DECODER_ERROR (decoder, 1, STREAM, DECODE, ("decoding error"), ("Reached libmpeg2 invalid state"), ret); continue; default: GST_ERROR_OBJECT (mpeg2dec, "Unknown libmpeg2 state %d, FIXME", state); ret = GST_FLOW_OK; gst_video_codec_frame_unref (frame); goto done; } if (ret != GST_FLOW_OK) { GST_DEBUG_OBJECT (mpeg2dec, "exit loop, reason %s", gst_flow_get_name (ret)); break; } } gst_video_codec_frame_unref (frame); done: gst_buffer_unmap (buf, &minfo); gst_buffer_unref (buf); return ret; }
static GstFlowReturn gst_amc_audio_dec_handle_frame (GstAudioDecoder * decoder, GstBuffer * inbuf) { GstAmcAudioDec *self; gint idx; GstAmcBuffer *buf; GstAmcBufferInfo buffer_info; guint offset = 0; GstClockTime timestamp, duration, timestamp_offset = 0; GstMapInfo minfo; memset (&minfo, 0, sizeof (minfo)); self = GST_AMC_AUDIO_DEC (decoder); GST_DEBUG_OBJECT (self, "Handling frame"); /* Make sure to keep a reference to the input here, * it can be unreffed from the other thread if * finish_frame() is called */ if (inbuf) inbuf = gst_buffer_ref (inbuf); if (!self->started) { GST_ERROR_OBJECT (self, "Codec not started yet"); if (inbuf) gst_buffer_unref (inbuf); return GST_FLOW_NOT_NEGOTIATED; } if (self->eos) { GST_WARNING_OBJECT (self, "Got frame after EOS"); if (inbuf) gst_buffer_unref (inbuf); return GST_FLOW_EOS; } if (self->flushing) goto flushing; if (self->downstream_flow_ret != GST_FLOW_OK) goto downstream_error; if (!inbuf) return gst_amc_audio_dec_drain (self); timestamp = GST_BUFFER_PTS (inbuf); duration = GST_BUFFER_DURATION (inbuf); gst_buffer_map (inbuf, &minfo, GST_MAP_READ); while (offset < minfo.size) { /* Make sure to release the base class stream lock, otherwise * _loop() can't call _finish_frame() and we might block forever * because no input buffers are released */ GST_AUDIO_DECODER_STREAM_UNLOCK (self); /* Wait at most 100ms here, some codecs don't fail dequeueing if * the codec is flushing, causing deadlocks during shutdown */ idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000); GST_AUDIO_DECODER_STREAM_LOCK (self); if (idx < 0) { if (self->flushing) goto flushing; switch (idx) { case INFO_TRY_AGAIN_LATER: GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out"); continue; /* next try */ break; case G_MININT: GST_ERROR_OBJECT (self, "Failed to dequeue input buffer"); goto dequeue_error; default: g_assert_not_reached (); break; } continue; } if (idx >= self->n_input_buffers) goto invalid_buffer_index; if (self->flushing) goto flushing; if (self->downstream_flow_ret != GST_FLOW_OK) { memset (&buffer_info, 0, sizeof (buffer_info)); gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info); goto downstream_error; } /* Now handle the frame */ /* Copy the buffer content in chunks of size as requested * by the port */ buf = &self->input_buffers[idx]; memset (&buffer_info, 0, sizeof (buffer_info)); buffer_info.offset = 0; buffer_info.size = MIN (minfo.size - offset, buf->size); orc_memcpy (buf->data, minfo.data + offset, buffer_info.size); /* Interpolate timestamps if we're passing the buffer * in multiple chunks */ if (offset != 0 && duration != GST_CLOCK_TIME_NONE) { timestamp_offset = gst_util_uint64_scale (offset, duration, minfo.size); } if (timestamp != GST_CLOCK_TIME_NONE) { buffer_info.presentation_time_us = gst_util_uint64_scale (timestamp + timestamp_offset, 1, GST_USECOND); self->last_upstream_ts = timestamp + timestamp_offset; } if (duration != GST_CLOCK_TIME_NONE) self->last_upstream_ts += duration; if (offset == 0) { if (!GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DELTA_UNIT)) buffer_info.flags |= BUFFER_FLAG_SYNC_FRAME; } offset += buffer_info.size; GST_DEBUG_OBJECT (self, "Queueing buffer %d: size %d time %" G_GINT64_FORMAT " flags 0x%08x", idx, buffer_info.size, buffer_info.presentation_time_us, buffer_info.flags); if (!gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info)) goto queue_error; } gst_buffer_unmap (inbuf, &minfo); gst_buffer_unref (inbuf); return self->downstream_flow_ret; downstream_error: { GST_ERROR_OBJECT (self, "Downstream returned %s", gst_flow_get_name (self->downstream_flow_ret)); if (minfo.data) gst_buffer_unmap (inbuf, &minfo); if (inbuf) gst_buffer_unref (inbuf); return self->downstream_flow_ret; } invalid_buffer_index: { GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Invalid input buffer index %d of %d", idx, self->n_input_buffers)); if (minfo.data) gst_buffer_unmap (inbuf, &minfo); if (inbuf) gst_buffer_unref (inbuf); return GST_FLOW_ERROR; } dequeue_error: { GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Failed to dequeue input buffer")); if (minfo.data) gst_buffer_unmap (inbuf, &minfo); if (inbuf) gst_buffer_unref (inbuf); return GST_FLOW_ERROR; } queue_error: { GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Failed to queue input buffer")); if (minfo.data) gst_buffer_unmap (inbuf, &minfo); if (inbuf) gst_buffer_unref (inbuf); return GST_FLOW_ERROR; } flushing: { GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING"); if (minfo.data) gst_buffer_unmap (inbuf, &minfo); if (inbuf) gst_buffer_unref (inbuf); return GST_FLOW_FLUSHING; } }
static GstFlowReturn gst_opus_enc_encode (GstOpusEnc * enc, GstBuffer * buf) { guint8 *bdata = NULL, *data, *mdata = NULL; gsize bsize, size; gsize bytes; gint ret = GST_FLOW_OK; GstMapInfo map; GstMapInfo omap; gint outsize; GstBuffer *outbuf; guint max_payload_size; gint frame_samples; g_mutex_lock (&enc->property_lock); bytes = enc->frame_samples * enc->n_channels * 2; max_payload_size = enc->max_payload_size; frame_samples = enc->frame_samples; g_mutex_unlock (&enc->property_lock); if (G_LIKELY (buf)) { gst_buffer_map (buf, &map, GST_MAP_READ); bdata = map.data; bsize = map.size; if (G_UNLIKELY (bsize % bytes)) { GST_DEBUG_OBJECT (enc, "draining; adding silence samples"); size = ((bsize / bytes) + 1) * bytes; mdata = g_malloc0 (size); memcpy (mdata, bdata, bsize); data = mdata; } else { data = bdata; size = bsize; } } else { GST_DEBUG_OBJECT (enc, "nothing to drain"); goto done; } g_assert (size == bytes); outbuf = gst_buffer_new_and_alloc (max_payload_size * enc->n_channels); if (!outbuf) goto done; GST_DEBUG_OBJECT (enc, "encoding %d samples (%d bytes)", frame_samples, (int) bytes); gst_buffer_map (outbuf, &omap, GST_MAP_WRITE); GST_DEBUG_OBJECT (enc, "encoding %d samples (%d bytes)", frame_samples, (int) bytes); outsize = opus_multistream_encode (enc->state, (const gint16 *) data, frame_samples, omap.data, max_payload_size * enc->n_channels); gst_buffer_unmap (outbuf, &omap); if (outsize < 0) { GST_ERROR_OBJECT (enc, "Encoding failed: %d", outsize); ret = GST_FLOW_ERROR; goto done; } else if (outsize > max_payload_size) { GST_WARNING_OBJECT (enc, "Encoded size %d is higher than max payload size (%d bytes)", outsize, max_payload_size); ret = GST_FLOW_ERROR; goto done; } GST_DEBUG_OBJECT (enc, "Output packet is %u bytes", outsize); gst_buffer_set_size (outbuf, outsize); ret = gst_audio_encoder_finish_frame (GST_AUDIO_ENCODER (enc), outbuf, frame_samples); done: if (bdata) gst_buffer_unmap (buf, &map); if (mdata) g_free (mdata); return ret; }
static void gst_amc_audio_dec_loop (GstAmcAudioDec * self) { GstFlowReturn flow_ret = GST_FLOW_OK; gboolean is_eos; GstAmcBufferInfo buffer_info; gint idx; GST_AUDIO_DECODER_STREAM_LOCK (self); retry: /*if (self->input_caps_changed) { idx = INFO_OUTPUT_FORMAT_CHANGED; } else { */ GST_DEBUG_OBJECT (self, "Waiting for available output buffer"); GST_AUDIO_DECODER_STREAM_UNLOCK (self); /* Wait at most 100ms here, some codecs don't fail dequeueing if * the codec is flushing, causing deadlocks during shutdown */ idx = gst_amc_codec_dequeue_output_buffer (self->codec, &buffer_info, 100000); GST_AUDIO_DECODER_STREAM_LOCK (self); /*} */ if (idx < 0) { if (self->flushing) goto flushing; switch (idx) { case INFO_OUTPUT_BUFFERS_CHANGED:{ GST_DEBUG_OBJECT (self, "Output buffers have changed"); if (self->output_buffers) gst_amc_codec_free_buffers (self->output_buffers, self->n_output_buffers); self->output_buffers = gst_amc_codec_get_output_buffers (self->codec, &self->n_output_buffers); if (!self->output_buffers) goto get_output_buffers_error; break; } case INFO_OUTPUT_FORMAT_CHANGED:{ GstAmcFormat *format; gchar *format_string; GST_DEBUG_OBJECT (self, "Output format has changed"); format = gst_amc_codec_get_output_format (self->codec); if (!format) goto format_error; format_string = gst_amc_format_to_string (format); GST_DEBUG_OBJECT (self, "Got new output format: %s", format_string); g_free (format_string); if (!gst_amc_audio_dec_set_src_caps (self, format)) { gst_amc_format_free (format); goto format_error; } gst_amc_format_free (format); if (self->output_buffers) gst_amc_codec_free_buffers (self->output_buffers, self->n_output_buffers); self->output_buffers = gst_amc_codec_get_output_buffers (self->codec, &self->n_output_buffers); if (!self->output_buffers) goto get_output_buffers_error; goto retry; break; } case INFO_TRY_AGAIN_LATER: GST_DEBUG_OBJECT (self, "Dequeueing output buffer timed out"); goto retry; break; case G_MININT: GST_ERROR_OBJECT (self, "Failure dequeueing output buffer"); goto dequeue_error; break; default: g_assert_not_reached (); break; } goto retry; } GST_DEBUG_OBJECT (self, "Got output buffer at index %d: size %d time %" G_GINT64_FORMAT " flags 0x%08x", idx, buffer_info.size, buffer_info.presentation_time_us, buffer_info.flags); is_eos = ! !(buffer_info.flags & BUFFER_FLAG_END_OF_STREAM); self->n_buffers++; if (buffer_info.size > 0) { GstAmcAudioDecClass *klass = GST_AMC_AUDIO_DEC_GET_CLASS (self); GstBuffer *outbuf; GstAmcBuffer *buf; GstMapInfo minfo; /* This sometimes happens at EOS or if the input is not properly framed, * let's handle it gracefully by allocating a new buffer for the current * caps and filling it */ if (idx >= self->n_output_buffers) goto invalid_buffer_index; if (strcmp (klass->codec_info->name, "OMX.google.mp3.decoder") == 0) { /* Google's MP3 decoder outputs garbage in the first output buffer * so we just drop it here */ if (self->n_buffers == 1) { GST_DEBUG_OBJECT (self, "Skipping first buffer of Google MP3 decoder output"); goto done; } } outbuf = gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (self), buffer_info.size); if (!outbuf) goto failed_allocate; gst_buffer_map (outbuf, &minfo, GST_MAP_WRITE); buf = &self->output_buffers[idx]; if (self->needs_reorder) { gint i, n_samples, c, n_channels; gint *reorder_map = self->reorder_map; gint16 *dest, *source; dest = (gint16 *) minfo.data; source = (gint16 *) (buf->data + buffer_info.offset); n_samples = buffer_info.size / self->info.bpf; n_channels = self->info.channels; for (i = 0; i < n_samples; i++) { for (c = 0; c < n_channels; c++) { dest[i * n_channels + reorder_map[c]] = source[i * n_channels + c]; } } } else { orc_memcpy (minfo.data, buf->data + buffer_info.offset, buffer_info.size); } gst_buffer_unmap (outbuf, &minfo); /* FIXME: We should get one decoded input frame here for * every buffer. If this is not the case somewhere, we will * error out at some point and will need to add workarounds */ flow_ret = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (self), outbuf, 1); } done: if (!gst_amc_codec_release_output_buffer (self->codec, idx, TRUE)) goto failed_release; if (is_eos || flow_ret == GST_FLOW_EOS) { GST_AUDIO_DECODER_STREAM_UNLOCK (self); g_mutex_lock (&self->drain_lock); if (self->draining) { GST_DEBUG_OBJECT (self, "Drained"); self->draining = FALSE; g_cond_broadcast (&self->drain_cond); } else if (flow_ret == GST_FLOW_OK) { GST_DEBUG_OBJECT (self, "Component signalled EOS"); flow_ret = GST_FLOW_EOS; } g_mutex_unlock (&self->drain_lock); GST_AUDIO_DECODER_STREAM_LOCK (self); } else { GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret)); } self->downstream_flow_ret = flow_ret; if (flow_ret != GST_FLOW_OK) goto flow_error; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; dequeue_error: { GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Failed to dequeue output buffer")); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; } get_output_buffers_error: { GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Failed to get output buffers")); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; } format_error: { GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Failed to handle format")); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; } failed_release: { GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Failed to release output buffer index %d", idx)); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; } flushing: { GST_DEBUG_OBJECT (self, "Flushing -- stopping task"); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_FLUSHING; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; } flow_error: { if (flow_ret == GST_FLOW_EOS) { GST_DEBUG_OBJECT (self, "EOS"); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); } else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) { GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", gst_flow_get_name (flow_ret))); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); } GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; } invalid_buffer_index: { GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL), ("Invalid input buffer index %d of %d", idx, self->n_input_buffers)); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; } failed_allocate: { GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ("Failed to allocate output buffer")); gst_pad_push_event (GST_AUDIO_DECODER_SRC_PAD (self), gst_event_new_eos ()); gst_pad_pause_task (GST_AUDIO_DECODER_SRC_PAD (self)); self->downstream_flow_ret = GST_FLOW_ERROR; GST_AUDIO_DECODER_STREAM_UNLOCK (self); return; } }
static GstFlowReturn gst_niimaqsrc_create (GstPushSrc * psrc, GstBuffer ** buffer) { GstNiImaqSrc *src = GST_NIIMAQSRC (psrc); GstFlowReturn ret = GST_FLOW_OK; GstClockTime timestamp; GstClockTime duration; uInt32 copied_number; uInt32 copied_index; Int32 rval; uInt32 dropped; gboolean no_copy; GstMapInfo minfo; /* we can only do a no-copy if strides are property byte aligned */ no_copy = src->avoid_copy && src->width == src->rowpixels; /* start the IMAQ acquisition session if we haven't done so yet */ if (!src->session_started) { if (!gst_niimaqsrc_start_acquisition (src)) { GST_ELEMENT_ERROR (src, RESOURCE, FAILED, ("Unable to start acquisition."), (NULL)); return GST_FLOW_ERROR; } } if (no_copy) { GST_LOG_OBJECT (src, "Sending IMAQ buffer #%d along without copying", src->cumbufnum); *buffer = gst_buffer_new (); if (G_UNLIKELY (*buffer == NULL)) goto error; } else { GST_LOG_OBJECT (src, "Copying IMAQ buffer #%d, size %d", src->cumbufnum, src->framesize); ret = GST_BASE_SRC_CLASS (gst_niimaqsrc_parent_class)->alloc (GST_BASE_SRC (src), 0, src->framesize, buffer); if (ret != GST_FLOW_OK) { GST_ELEMENT_ERROR (src, RESOURCE, FAILED, ("Failed to allocate buffer"), ("Failed to get downstream pad to allocate buffer")); goto error; } } //{ // guint32 *data; // int i; // rval = imgSessionExamineBuffer2 (src->sid, src->cumbufnum, &copied_number, &data); // for (i=0; i<src->bufsize;i++) // if (data == src->buflist[i]) // break; // timestamp = src->times[i]; // memcpy (GST_BUFFER_DATA (*buffer), data, src->framesize); // src->times[i] = GST_CLOCK_TIME_NONE; // imgSessionReleaseBuffer (src->sid); //TODO: mutex here? //} if (no_copy) { /* FIXME: with callback change, is this broken now? mutex... */ gpointer data; rval = imgSessionExamineBuffer2 (src->sid, src->cumbufnum, &copied_number, &data); gst_buffer_append_memory (*buffer, gst_memory_new_wrapped (GST_MEMORY_FLAG_READONLY, data, src->framesize, 0, src->framesize, src, gst_niimaqsrc_release_buffer)); } else if (src->width == src->rowpixels) { /* TODO: optionally use ExamineBuffer and byteswap in transfer (to offer BIG_ENDIAN) */ gst_buffer_map (*buffer, &minfo, GST_MAP_WRITE); g_mutex_lock (&src->mutex); rval = imgSessionCopyBufferByNumber (src->sid, src->cumbufnum, minfo.data, IMG_OVERWRITE_GET_OLDEST, &copied_number, &copied_index); timestamp = src->times[copied_index]; src->times[copied_index] = GST_CLOCK_TIME_NONE; g_mutex_unlock (&src->mutex); gst_buffer_unmap (*buffer, &minfo); } else { gst_buffer_map (*buffer, &minfo, GST_MAP_WRITE); g_mutex_lock (&src->mutex); rval = imgSessionCopyAreaByNumber (src->sid, src->cumbufnum, 0, 0, src->height, src->width, minfo.data, src->rowpixels, IMG_OVERWRITE_GET_OLDEST, &copied_number, &copied_index); timestamp = src->times[copied_index]; src->times[copied_index] = GST_CLOCK_TIME_NONE; g_mutex_unlock (&src->mutex); gst_buffer_unmap (*buffer, &minfo); } /* TODO: do this above to reduce copying overhead */ if (src->is_signed) { gint16 *src; guint16 *dst; guint i; gst_buffer_map (*buffer, &minfo, GST_MAP_READWRITE); src = minfo.data; dst = minfo.data; GST_DEBUG_OBJECT (src, "Shifting signed to unsigned"); /* TODO: make this faster */ for (i = 0; i < minfo.size / 2; i++) *dst++ = *src++ + 32768; gst_buffer_unmap (*buffer, &minfo); } if (rval) { gst_niimaqsrc_report_imaq_error (rval); GST_ELEMENT_ERROR (src, RESOURCE, FAILED, ("failed to copy buffer %d", src->cumbufnum), ("failed to copy buffer %d", src->cumbufnum)); goto error; } /* make guess of duration from timestamp and cumulative buffer number */ if (GST_CLOCK_TIME_IS_VALID (timestamp)) { duration = timestamp / (copied_number + 1); } else { duration = 33 * GST_MSECOND; } GST_BUFFER_OFFSET (*buffer) = copied_number; GST_BUFFER_OFFSET_END (*buffer) = copied_number + 1; GST_BUFFER_TIMESTAMP (*buffer) = timestamp - gst_element_get_base_time (GST_ELEMENT (src)); GST_BUFFER_DURATION (*buffer) = duration; dropped = copied_number - src->cumbufnum; if (dropped > 0) { src->n_dropped_frames += dropped; GST_WARNING_OBJECT (src, "Asked to copy buffer #%d but was given #%d; just dropped %d frames (%d total)", src->cumbufnum, copied_number, dropped, src->n_dropped_frames); } /* set cumulative buffer number to get next frame */ src->cumbufnum = copied_number + 1; src->n_frames++; if (G_UNLIKELY (src->start_time && !src->start_time_sent)) { GstTagList *tl = gst_tag_list_new (GST_TAG_DATE_TIME, src->start_time, NULL); GstEvent *e = gst_event_new_tag (tl); GST_DEBUG_OBJECT (src, "Sending start time event: %" GST_PTR_FORMAT, e); gst_pad_push_event (GST_BASE_SRC_PAD (src), e); src->start_time_sent = TRUE; } return ret; error: { return ret; } }
static GstFlowReturn gst_dirac_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame, gint * skipsize) { int off; guint32 next_header; GstMapInfo map; guint8 *data; gsize size; gboolean have_picture = FALSE; int offset; guint framesize = 0; gst_buffer_map (frame->buffer, &map, GST_MAP_READ); data = map.data; size = map.size; if (G_UNLIKELY (size < 13)) { *skipsize = 1; goto out; } GST_DEBUG ("%" G_GSIZE_FORMAT ": %02x %02x %02x %02x", size, data[0], data[1], data[2], data[3]); if (GST_READ_UINT32_BE (data) != 0x42424344) { GstByteReader reader; gst_byte_reader_init (&reader, data, size); off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff, 0x42424344, 0, size); if (off < 0) { *skipsize = size - 3; goto out; } GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off); GST_DEBUG ("skipping %d", off); *skipsize = off; goto out; } /* have sync, parse chunks */ offset = 0; while (!have_picture) { GST_DEBUG ("offset %d:", offset); if (offset + 13 >= size) { framesize = offset + 13; goto out; } GST_DEBUG ("chunk type %02x", data[offset + 4]); if (GST_READ_UINT32_BE (data + offset) != 0x42424344) { GST_DEBUG ("bad header"); *skipsize = 3; goto out; } next_header = GST_READ_UINT32_BE (data + offset + 5); GST_DEBUG ("next_header %d", next_header); if (next_header == 0) next_header = 13; if (SCHRO_PARSE_CODE_IS_PICTURE (data[offset + 4])) { have_picture = TRUE; } offset += next_header; if (offset >= size) { framesize = offset; goto out; } } gst_buffer_unmap (frame->buffer, &map); framesize = offset; GST_DEBUG ("framesize %d", framesize); g_assert (framesize <= size); if (data[4] == SCHRO_PARSE_CODE_SEQUENCE_HEADER) { GstCaps *caps; GstDiracParse *diracparse = GST_DIRAC_PARSE (parse); DiracSequenceHeader sequence_header; int ret; ret = dirac_sequence_header_parse (&sequence_header, data + 13, size - 13); if (ret) { memcpy (&diracparse->sequence_header, &sequence_header, sizeof (sequence_header)); caps = gst_caps_new_simple ("video/x-dirac", "width", G_TYPE_INT, sequence_header.width, "height", G_TYPE_INT, sequence_header.height, "framerate", GST_TYPE_FRACTION, sequence_header.frame_rate_numerator, sequence_header.frame_rate_denominator, "pixel-aspect-ratio", GST_TYPE_FRACTION, sequence_header.aspect_ratio_numerator, sequence_header.aspect_ratio_denominator, "interlace-mode", G_TYPE_STRING, sequence_header.interlaced ? "interleaved" : "progressive", "profile", G_TYPE_STRING, get_profile_name (sequence_header.profile), "level", G_TYPE_STRING, get_level_name (sequence_header.level), NULL); gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps); gst_caps_unref (caps); gst_base_parse_set_frame_rate (parse, sequence_header.frame_rate_numerator, sequence_header.frame_rate_denominator, 0, 0); } } gst_base_parse_set_min_frame_size (parse, 13); return gst_base_parse_finish_frame (parse, frame, framesize); out: gst_buffer_unmap (frame->buffer, &map); if (framesize) gst_base_parse_set_min_frame_size (parse, framesize); return GST_FLOW_OK; }