Esempio n. 1
0
Bool AvcDecoder_OMX::FlushOutput_OMX(uint8* aOutBuffer, uint32* aOutputLength, OMX_TICKS* aOutTimestamp, uint32 OldWidth, uint32 OldHeight)
{
    AVCFrameIO Output;
    AVCDec_Status Status;
    int32 Index, Release, FrameSize;
    uint32 OldFrameSize = ((OldWidth + 15) & (~15)) * ((OldHeight + 15) & (~15));
		////char szLog[300] = {0};
    Output.YCbCr[0] = Output.YCbCr[1] = Output.YCbCr[2] = NULL;
   // sprintf(szLog, "FlushOutput_OMX step 1");
	 ////__android_log_write(ANDROID_LOG_ERROR, "up_audio_play", szLog); 	
    Status = PVAVCDecGetOutput(&(AvcHandle), &Index, &Release, &Output);
	
		//sprintf(szLog, "*****************get output status=%d******************",Status);
	 ////__android_log_write(ANDROID_LOG_ERROR, "up_audio_play", szLog); 	
    if (Status == AVCDEC_FAIL)
    {
        return 0;
    }
    *aOutTimestamp = DisplayTimestampArray[Index];
    *aOutputLength = 0; // init to 0

    if (Output.YCbCr[0])
    {
    	//  sprintf(szLog, "FlushOutput_OMX step 2");
	// //__android_log_write(ANDROID_LOG_ERROR, "up_audio_play", szLog); 	
        FrameSize = Output.pitch * Output.height;
        // it should not happen that the frame size is smaller than available buffer size, but check just in case
        if (FrameSize <= OldFrameSize)
        {
            *aOutputLength = (Output.pitch * Output.height * 3) >> 1;

            oscl_memcpy(aOutBuffer, Output.YCbCr[0], FrameSize);
            oscl_memcpy(aOutBuffer + FrameSize, Output.YCbCr[1], FrameSize >> 2);
            oscl_memcpy(aOutBuffer + FrameSize + FrameSize / 4, Output.YCbCr[2], FrameSize >> 2);
        }
Esempio n. 2
0
bool PVAVCDecoder::GetDecOutput(int *indx, int *release, AVCFrameIO* output)
{
    return (PVAVCDecGetOutput((AVCHandle *)&iAvcHandle, indx, release, output) != AVCDEC_SUCCESS) ? false : true;
}