static gboolean gstti_viddec_process(GstTIDmaidec *dmaidec, GstBuffer *encData, Buffer_Handle hDstBuf,gboolean codecFlushed){ GstTIDmaidecData *decoder; Buffer_Handle hEncData = NULL; Int32 encDataConsumed, originalBufferSize; Int ret; decoder = (GstTIDmaidecData *) g_type_get_qdata(G_OBJECT_CLASS_TYPE(G_OBJECT_GET_CLASS(dmaidec)), GST_TIDMAIDEC_PARAMS_QDATA); hEncData = GST_TIDMAIBUFFERTRANSPORT_DMAIBUF(encData); g_assert(hEncData != NULL); /* Make sure the whole buffer is used for output */ BufferGfx_resetDimensions(hDstBuf); /* Invoke the video decoder */ originalBufferSize = Buffer_getNumBytesUsed(hEncData); GST_DEBUG("invoking the video decoder, with %ld bytes (%p, %p)\n",originalBufferSize, Buffer_getUserPtr(hEncData),Buffer_getUserPtr(hDstBuf)); ret = Vdec_process(dmaidec->hCodec, hEncData, hDstBuf); encDataConsumed = (codecFlushed) ? 0 : Buffer_getNumBytesUsed(hEncData); if (ret < 0) { GST_ELEMENT_ERROR(dmaidec,STREAM,DECODE,(NULL), ("failed to decode video buffer")); return FALSE; } if (ret == Dmai_EBITERROR){ GST_ELEMENT_WARNING(dmaidec,STREAM,DECODE,(NULL), ("Unable to decode frame with timestamp %"GST_TIME_FORMAT, GST_TIME_ARGS(GST_BUFFER_TIMESTAMP(encData)))); /* We failed to process this buffer, so we need to release it because the codec won't do it. */ GST_DEBUG("Freeing buffer because of bit error on the stream"); Buffer_freeUseMask(hDstBuf, gst_tidmaibuffertransport_GST_FREE | decoder->dops->outputUseMask); return FALSE; } return TRUE; }
/****************************************************************************** * gst_tiaudenc1_encode_thread * Call the audio codec to process a full input buffer ******************************************************************************/ static void* gst_tiaudenc1_encode_thread(void *arg) { GstTIAudenc1 *audenc1 = GST_TIAUDENC1(gst_object_ref(arg)); void *threadRet = GstTIThreadSuccess; Buffer_Handle hDstBuf; Int32 encDataConsumed; GstBuffer *encDataWindow = NULL; GstClockTime encDataTime; Buffer_Handle hEncDataWindow; GstBuffer *outBuf; GstClockTime sampleDuration; guint sampleRate; guint numSamples; Int bufIdx; Int ret; GST_LOG("starting audenc encode thread\n"); /* Initialize codec engine */ ret = gst_tiaudenc1_codec_start(audenc1); /* Notify main thread that it is ok to continue initialization */ Rendezvous_meet(audenc1->waitOnEncodeThread); Rendezvous_reset(audenc1->waitOnEncodeThread); if (ret == FALSE) { GST_ELEMENT_ERROR(audenc1, RESOURCE, FAILED, ("Failed to start codec\n"), (NULL)); goto thread_exit; } while (TRUE) { /* Obtain an raw data frame */ encDataWindow = gst_ticircbuffer_get_data(audenc1->circBuf); encDataTime = GST_BUFFER_TIMESTAMP(encDataWindow); hEncDataWindow = GST_TIDMAIBUFFERTRANSPORT_DMAIBUF(encDataWindow); /* Check if there is enough encoded data to be sent to the codec. * The last frame of data may not be sufficient to meet the codec * requirements for the amount of input data. If so just throw * away the last bit of data rather than filling with bogus * data. */ if (GST_BUFFER_SIZE(encDataWindow) < Aenc1_getInBufSize(audenc1->hAe)) { GST_LOG("Not enough audio data remains\n"); if (!audenc1->drainingEOS) { goto thread_failure; } goto thread_exit; } /* Obtain a free output buffer for the encoded data */ if (!(hDstBuf = gst_tidmaibuftab_get_buf(audenc1->hOutBufTab))) { GST_ELEMENT_ERROR(audenc1, RESOURCE, READ, ("Failed to get a free contiguous buffer from BufTab\n"), (NULL)); goto thread_exit; } /* Invoke the audio encoder */ GST_LOG("Invoking the audio encoder at 0x%08lx with %u bytes\n", (unsigned long)Buffer_getUserPtr(hEncDataWindow), GST_BUFFER_SIZE(encDataWindow)); ret = Aenc1_process(audenc1->hAe, hEncDataWindow, hDstBuf); encDataConsumed = Buffer_getNumBytesUsed(hEncDataWindow); if (ret < 0) { GST_ELEMENT_ERROR(audenc1, STREAM, ENCODE, ("Failed to encode audio buffer\n"), (NULL)); goto thread_failure; } /* If no encoded data was used we cannot find the next frame */ if (ret == Dmai_EBITERROR && encDataConsumed == 0) { GST_ELEMENT_ERROR(audenc1, STREAM, ENCODE, ("Fatal bit error\n"), (NULL)); goto thread_failure; } if (ret > 0) { GST_LOG("Aenc1_process returned success code %d\n", ret); } sampleRate = audenc1->samplefreq; numSamples = encDataConsumed / (2 * audenc1->channels) ; sampleDuration = GST_FRAMES_TO_CLOCK_TIME(numSamples, sampleRate); /* Release the reference buffer, and tell the circular buffer how much * data was consumed. */ ret = gst_ticircbuffer_data_consumed(audenc1->circBuf, encDataWindow, encDataConsumed); encDataWindow = NULL; if (!ret) { goto thread_failure; } /* Set the source pad capabilities based on the encoded frame * properties. */ gst_tiaudenc1_set_source_caps(audenc1); /* Create a DMAI transport buffer object to carry a DMAI buffer to * the source pad. The transport buffer knows how to release the * buffer for re-use in this element when the source pad calls * gst_buffer_unref(). */ outBuf = gst_tidmaibuffertransport_new(hDstBuf, audenc1->hOutBufTab, NULL, NULL); gst_buffer_set_data(outBuf, GST_BUFFER_DATA(outBuf), Buffer_getNumBytesUsed(hDstBuf)); gst_buffer_set_caps(outBuf, GST_PAD_CAPS(audenc1->srcpad)); /* Set timestamp on output buffer */ if (audenc1->genTimeStamps) { GST_BUFFER_DURATION(outBuf) = sampleDuration; GST_BUFFER_TIMESTAMP(outBuf) = encDataTime; } else { GST_BUFFER_TIMESTAMP(outBuf) = GST_CLOCK_TIME_NONE; } /* Tell circular buffer how much time we consumed */ gst_ticircbuffer_time_consumed(audenc1->circBuf, sampleDuration); /* Push the transport buffer to the source pad */ GST_LOG("pushing buffer to source pad with timestamp : %" GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP(outBuf)), GST_TIME_ARGS (GST_BUFFER_DURATION(outBuf))); if (gst_pad_push(audenc1->srcpad, outBuf) != GST_FLOW_OK) { GST_DEBUG("push to source pad failed\n"); goto thread_failure; } /* Release buffers no longer in use by the codec */ Buffer_freeUseMask(hDstBuf, gst_tidmaibuffer_CODEC_FREE); } thread_failure: gst_tithread_set_status(audenc1, TIThread_CODEC_ABORTED); gst_ticircbuffer_consumer_aborted(audenc1->circBuf); threadRet = GstTIThreadFailure; thread_exit: /* Re-claim any buffers owned by the codec */ bufIdx = BufTab_getNumBufs(GST_TIDMAIBUFTAB_BUFTAB(audenc1->hOutBufTab)); while (bufIdx-- > 0) { Buffer_Handle hBuf = BufTab_getBuf( GST_TIDMAIBUFTAB_BUFTAB(audenc1->hOutBufTab), bufIdx); Buffer_freeUseMask(hBuf, gst_tidmaibuffer_CODEC_FREE); } /* Release the last buffer we retrieved from the circular buffer */ if (encDataWindow) { gst_ticircbuffer_data_consumed(audenc1->circBuf, encDataWindow, 0); } /* We have to wait to shut down this thread until we can guarantee that * no more input buffers will be queued into the circular buffer * (we're about to delete it). */ Rendezvous_meet(audenc1->waitOnEncodeThread); Rendezvous_reset(audenc1->waitOnEncodeThread); /* Notify main thread that we are done draining before we shutdown the * codec, or we will hang. We proceed in this order so the EOS event gets * propagated downstream before we attempt to shut down the codec. The * codec-shutdown process will block until all BufTab buffers have been * released, and downstream-elements may hang on to buffers until * they get the EOS. */ Rendezvous_force(audenc1->waitOnEncodeDrain); /* Initialize codec engine */ if (gst_tiaudenc1_codec_stop(audenc1) < 0) { GST_ERROR("failed to stop codec\n"); GST_ELEMENT_ERROR(audenc1, RESOURCE, FAILED, ("Failed to stop codec\n"), (NULL)); } gst_object_unref(audenc1); GST_LOG("exit audio encode_thread (%d)\n", (int)threadRet); return threadRet; }