bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip, int64_t aTimeThreshold) { NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); GstBuffer* buffer = nullptr; int64_t timestamp, nextTimestamp; while (true) { if (!WaitForDecodedData(&mVideoSinkBufferCount)) { mVideoQueue.Finish(); break; } mDecoder->NotifyDecodedFrames(0, 1); buffer = gst_app_sink_pull_buffer(mVideoAppSink); bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT); if ((aKeyFrameSkip && !isKeyframe)) { gst_buffer_unref(buffer); buffer = nullptr; continue; } timestamp = GST_BUFFER_TIMESTAMP(buffer); { ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); timestamp = gst_segment_to_stream_time(&mVideoSegment, GST_FORMAT_TIME, timestamp); } NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp), "frame has invalid timestamp"); timestamp = nextTimestamp = GST_TIME_AS_USECONDS(timestamp); if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer))) nextTimestamp += GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer)); else if (fpsNum && fpsDen) /* add 1-frame duration */ nextTimestamp += gst_util_uint64_scale(GST_USECOND, fpsNum, fpsDen); if (timestamp < aTimeThreshold) { LOG(PR_LOG_DEBUG, ("skipping frame %" GST_TIME_FORMAT " threshold %" GST_TIME_FORMAT, GST_TIME_ARGS(timestamp), GST_TIME_ARGS(aTimeThreshold))); gst_buffer_unref(buffer); buffer = nullptr; continue; } break; } if (!buffer) /* no more frames */ return false; nsRefPtr<PlanarYCbCrImage> image; #if GST_VERSION_MICRO >= 36 const GstStructure* structure = gst_buffer_get_qdata(buffer, g_quark_from_string("moz-reader-data")); const GValue* value = gst_structure_get_value(structure, "image"); if (value) { BufferData* data = reinterpret_cast<BufferData*>(g_value_get_boxed(value)); image = data->mImage; } #endif if (!image) { /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy. */ GstBuffer* tmp = nullptr; AllocateVideoBufferFull(nullptr, GST_BUFFER_OFFSET(buffer), GST_BUFFER_SIZE(buffer), nullptr, &tmp, image); /* copy */ gst_buffer_copy_metadata(tmp, buffer, (GstBufferCopyFlags)GST_BUFFER_COPY_ALL); memcpy(GST_BUFFER_DATA(tmp), GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(tmp)); gst_buffer_unref(buffer); buffer = tmp; } guint8* data = GST_BUFFER_DATA(buffer); int width = mPicture.width; int height = mPicture.height; GstVideoFormat format = mFormat; VideoData::YCbCrBuffer b; for(int i = 0; i < 3; i++) { b.mPlanes[i].mData = data + gst_video_format_get_component_offset(format, i, width, height); b.mPlanes[i].mStride = gst_video_format_get_row_stride(format, i, width); b.mPlanes[i].mHeight = gst_video_format_get_component_height(format, i, height); b.mPlanes[i].mWidth = gst_video_format_get_component_width(format, i, width); b.mPlanes[i].mOffset = 0; b.mPlanes[i].mSkip = 0; } bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT); /* XXX ? */ int64_t offset = 0; VideoData* video = VideoData::Create(mInfo, image, offset, timestamp, nextTimestamp, b, isKeyframe, -1, mPicture); mVideoQueue.Push(video); gst_buffer_unref(buffer); return true; }
/* * description : convert input 3gpp buffer to nalu based buffer * params : @self : GstOmxH264Dec, @buf: buffer to be converted * return : none * comments : none */ static void convert_frame (GstOmxH264Dec *self, GstBuffer **buf) { OMX_U8 frameType; OMX_U32 nalSize = 0; OMX_U32 cumulSize = 0; OMX_U32 offset = 0; OMX_U32 nalHeaderSize = 0; OMX_U32 outSize = 0; OMX_U8 *frame_3gpp = GST_BUFFER_DATA(*buf); OMX_U32 frame_3gpp_size = GST_BUFFER_SIZE(*buf); GstBuffer *nalu_next_buf = NULL; GstBuffer *nalu_buf = NULL; do { /* get NAL Length based on length of length*/ if (self->h264NalLengthSize == 1) { nalSize = frame_3gpp[0]; } else if (self->h264NalLengthSize == 2) { nalSize = GSTOMX_H264_RB16(frame_3gpp); } else { nalSize = GSTOMX_H264_RB32(frame_3gpp); } GST_LOG_OBJECT(self, "packetized frame size = %d", nalSize); frame_3gpp += self->h264NalLengthSize; /* Checking frame type */ frameType = *frame_3gpp & 0x1f; switch (frameType) { case GSTOMX_H264_NUT_SLICE: GST_LOG_OBJECT(self, "Frame is non-IDR frame..."); break; case GSTOMX_H264_NUT_IDR: GST_LOG_OBJECT(self, "Frame is an IDR frame..."); break; case GSTOMX_H264_NUT_SEI: GST_LOG_OBJECT(self, "Found SEI Data..."); break; case GSTOMX_H264_NUT_SPS: GST_LOG_OBJECT(self, "Found SPS data..."); break; case GSTOMX_H264_NUT_PPS: GST_LOG_OBJECT(self, "Found PPS data..."); break; case GSTOMX_H264_NUT_EOSEQ: GST_LOG_OBJECT(self, "End of sequence..."); break; case GSTOMX_H264_NUT_EOSTREAM: GST_LOG_OBJECT(self, "End of stream..."); break; case GSTOMX_H264_NUT_DPA: case GSTOMX_H264_NUT_DPB: case GSTOMX_H264_NUT_DPC: case GSTOMX_H264_NUT_AUD: case GSTOMX_H264_NUT_FILL: case GSTOMX_H264_NUT_MIXED: break; default: GST_INFO_OBJECT(self, "Unknown Frame type: %d\n", frameType); goto EXIT; } /* if nal size is same, we can change only start code */ if((nalSize + GSTOMX_H264_NAL_START_LEN) == frame_3gpp_size) { GST_LOG_OBJECT(self, "only change start code"); GSTOMX_H264_WB32(GST_BUFFER_DATA(*buf), 1); return; } /* Convert 3GPP Frame to NALU Frame */ offset = outSize; nalHeaderSize = offset ? 3 : 4; outSize += nalSize + nalHeaderSize; if ((nalSize > frame_3gpp_size)||(outSize < 0)) { GST_ERROR_OBJECT(self, "out of bounds Error. frame_nalu_size=%d", outSize); goto EXIT; } if (nalu_buf) { nalu_next_buf= gst_buffer_new_and_alloc(nalSize + nalHeaderSize); if (nalu_next_buf == NULL) { GST_ERROR_OBJECT(self, "gst_buffer_new_and_alloc failed.(nalu_next_buf)"); goto EXIT; } } else { nalu_buf = gst_buffer_new_and_alloc(outSize); } if (nalu_buf == NULL) { GST_ERROR_OBJECT(self, "gst_buffer_new_and_alloc failed.(nalu_buf)"); goto EXIT; } if (!offset) { memcpy(GST_BUFFER_DATA(nalu_buf)+nalHeaderSize, frame_3gpp, nalSize); GSTOMX_H264_WB32(GST_BUFFER_DATA(nalu_buf), 1); } else { if (nalu_next_buf) { GstBuffer *nalu_joined_buf = gst_buffer_join(nalu_buf,nalu_next_buf); nalu_buf = nalu_joined_buf; nalu_next_buf = NULL; } memcpy(GST_BUFFER_DATA(nalu_buf)+nalHeaderSize+offset, frame_3gpp, nalSize); (GST_BUFFER_DATA(nalu_buf)+offset)[0] = (GST_BUFFER_DATA(nalu_buf)+offset)[1] = 0; (GST_BUFFER_DATA(nalu_buf)+offset)[2] = 1; } frame_3gpp += nalSize; cumulSize += nalSize + self->h264NalLengthSize; GST_LOG_OBJECT(self, "frame_3gpp_size = %d => frame_nalu_size=%d", frame_3gpp_size, outSize); } while (cumulSize < frame_3gpp_size); gst_buffer_copy_metadata(nalu_buf, *buf, GST_BUFFER_COPY_ALL); if (*buf) { gst_buffer_unref (*buf); } *buf = nalu_buf; return; EXIT: if (nalu_buf) { gst_buffer_unref (nalu_buf); } GST_ERROR_OBJECT(self, "converting frame error."); return; }
static GstFlowReturn gst_pngenc_chain (GstPad * pad, GstBuffer * buf) { GstPngEnc *pngenc; gint row_index; png_byte **row_pointers; GstFlowReturn ret = GST_FLOW_OK; GstBuffer *encoded_buf = NULL; pngenc = GST_PNGENC (gst_pad_get_parent (pad)); GST_DEBUG_OBJECT (pngenc, "BEGINNING"); if (G_UNLIKELY (pngenc->width <= 0 || pngenc->height <= 0)) { ret = GST_FLOW_NOT_NEGOTIATED; goto done; } if (G_UNLIKELY (GST_BUFFER_SIZE (buf) < pngenc->height * pngenc->stride)) { gst_buffer_unref (buf); GST_ELEMENT_ERROR (pngenc, STREAM, FORMAT, (NULL), ("Provided input buffer is too small, caps problem?")); ret = GST_FLOW_ERROR; goto done; } /* initialize png struct stuff */ pngenc->png_struct_ptr = png_create_write_struct (PNG_LIBPNG_VER_STRING, (png_voidp) NULL, user_error_fn, user_warning_fn); if (pngenc->png_struct_ptr == NULL) { gst_buffer_unref (buf); GST_ELEMENT_ERROR (pngenc, LIBRARY, INIT, (NULL), ("Failed to initialize png structure")); ret = GST_FLOW_ERROR; goto done; } pngenc->png_info_ptr = png_create_info_struct (pngenc->png_struct_ptr); if (!pngenc->png_info_ptr) { gst_buffer_unref (buf); png_destroy_write_struct (&(pngenc->png_struct_ptr), (png_infopp) NULL); GST_ELEMENT_ERROR (pngenc, LIBRARY, INIT, (NULL), ("Failed to initialize the png info structure")); ret = GST_FLOW_ERROR; goto done; } /* non-0 return is from a longjmp inside of libpng */ if (setjmp (png_jmpbuf (pngenc->png_struct_ptr)) != 0) { gst_buffer_unref (buf); png_destroy_write_struct (&pngenc->png_struct_ptr, &pngenc->png_info_ptr); GST_ELEMENT_ERROR (pngenc, LIBRARY, FAILED, (NULL), ("returning from longjmp")); ret = GST_FLOW_ERROR; goto done; } png_set_filter (pngenc->png_struct_ptr, 0, PNG_FILTER_NONE | PNG_FILTER_VALUE_NONE); png_set_compression_level (pngenc->png_struct_ptr, pngenc->compression_level); png_set_IHDR (pngenc->png_struct_ptr, pngenc->png_info_ptr, pngenc->width, pngenc->height, 8, pngenc->png_color_type, PNG_INTERLACE_NONE, PNG_COMPRESSION_TYPE_DEFAULT, PNG_FILTER_TYPE_DEFAULT); png_set_write_fn (pngenc->png_struct_ptr, pngenc, (png_rw_ptr) user_write_data, user_flush_data); row_pointers = g_new (png_byte *, pngenc->height); for (row_index = 0; row_index < pngenc->height; row_index++) { row_pointers[row_index] = GST_BUFFER_DATA (buf) + (row_index * pngenc->stride); } /* allocate the output buffer */ pngenc->buffer_out = gst_buffer_new_and_alloc (pngenc->height * pngenc->stride); pngenc->written = 0; png_write_info (pngenc->png_struct_ptr, pngenc->png_info_ptr); png_write_image (pngenc->png_struct_ptr, row_pointers); png_write_end (pngenc->png_struct_ptr, NULL); g_free (row_pointers); encoded_buf = gst_buffer_create_sub (pngenc->buffer_out, 0, pngenc->written); png_destroy_info_struct (pngenc->png_struct_ptr, &pngenc->png_info_ptr); png_destroy_write_struct (&pngenc->png_struct_ptr, (png_infopp) NULL); gst_buffer_copy_metadata (encoded_buf, buf, GST_BUFFER_COPY_TIMESTAMPS); gst_buffer_unref (buf); gst_buffer_set_caps (encoded_buf, GST_PAD_CAPS (pngenc->srcpad)); if ((ret = gst_pad_push (pngenc->srcpad, encoded_buf)) != GST_FLOW_OK) goto done; if (pngenc->snapshot) { GstEvent *event; GST_DEBUG_OBJECT (pngenc, "snapshot mode, sending EOS"); /* send EOS event, since a frame has been pushed out */ event = gst_event_new_eos (); gst_pad_push_event (pngenc->srcpad, event); ret = GST_FLOW_UNEXPECTED; } done: GST_DEBUG_OBJECT (pngenc, "END, ret:%d", ret); if (pngenc->buffer_out != NULL) { gst_buffer_unref (pngenc->buffer_out); pngenc->buffer_out = NULL; } gst_object_unref (pngenc); return ret; }