Esempio n. 1
0
DvevmStRetCode
dvtb_imgDec1ProcessOutBuff(DvevmStVpbeInfo *vpbe, FILE *fp, DvevmStXdmBuffer *outbuf, DvevmStBool fileBased, int nFrames)
{
    int i = 0;
    DvevmStRetCode retCode = DVEVM_ST_SUCCESS;

    if (DVEVM_ST_TRUE == fileBased)
    {
        for (i = 0; i < outbuf->numBufs; i++)
        {
            if(DVEVM_ST_FAIL ==  dvtb_fileWrite(fp, (char *)outbuf->bufs[i], outbuf->bufSizes[i]))
            {
                SYS_ERROR("Error in writing to file.\n");
                retCode = DVEVM_ST_FAIL;
                break;
            }
        }

        if(DVEVM_ST_SUCCESS == retCode)
            SYS_OUT("Dumped Frame# %d.\n", nFrames);
    }
    else
    {
        if(DVEVM_ST_FAIL ==  dvtb_displayBuffer(vpbe, (char *)outbuf->bufs[0], outbuf->bufSizes[0]))
        {
            SYS_ERROR("Unable to display Frame# %d\n", nFrames);
            retCode = DVEVM_ST_FAIL;
        }
        else
            SYS_OUT("Displayed Frame# %d.\n", nFrames);
    }

    return retCode;
}
Esempio n. 2
0
DvevmStRetCode
dvtb_setV4l2OutputDac(DvevmStVpbeInfo *vpbe)
{
	DvevmStRetCode retval = DVEVM_ST_FAIL;
	char outputName[10];

	ASSERT(vpbe != NULL);

	switch (vpbe->dispOutput)
	{
		case DVEVM_ST_OUTPUT_NA:
			break;
		case DVEVM_ST_COMPOSITE:
			strcpy(outputName, "COMPOSITE");
			SYS_OUT("Setting Display Output as <%s>\n", outputName);
			if (DVEVM_ST_SUCCESS != dvtb_writeSysFs(SYSFS_OUTPUT, outputName))
			{
				SYS_ERROR("Unable to set the Display Output as <%s> in the driver.\n", outputName);
				dvtb_close_vpbe(vpbe);
				return retval;
			}
			break;

		case DVEVM_ST_COMPONENT:
			strcpy(outputName, "COMPONENT");
			SYS_OUT("Setting Display Output as <%s>\n", outputName);
			if (DVEVM_ST_SUCCESS != dvtb_writeSysFs(SYSFS_OUTPUT, outputName))
			{
				SYS_ERROR("Unable to set the Display Output as <%s> in the driver.\n", outputName);
				dvtb_close_vpbe(vpbe);
				return retval;
			}
			break;

		case DVEVM_ST_SVIDEO:
			strcpy(outputName, "SVIDEO");
			SYS_OUT("Setting Display Output as <%s>\n", outputName);
			if (DVEVM_ST_SUCCESS != dvtb_writeSysFs(SYSFS_OUTPUT, outputName))
			{
				SYS_ERROR("Unable to set the Display Output as <%s> in the driver.\n", outputName);
				dvtb_close_vpbe(vpbe);
				return retval;
			}
			break;

		default:
			SYS_ERROR("Invalid Display Output. Setting it to COMPOSITE by default.\n");
			strcpy(outputName, "COMPOSITE");
			SYS_OUT("Setting Display Output as <%s>\n", outputName);
			if (DVEVM_ST_SUCCESS != dvtb_writeSysFs(SYSFS_OUTPUT, outputName))
			{
				SYS_ERROR("Unable to set the Display Output as <%s> in the driver.\n", outputName);
				dvtb_close_vpbe(vpbe);
				return retval;
			}
			break;
	}
	return DVEVM_ST_SUCCESS;
}
Esempio n. 3
0
void
dvtb_aacheEnc1HanClose(DvevmStAacHeEnc1Info *aenc1, char *engName)
{
	dvtb_aacheEnc1Close(aenc1);
	SYS_OUT("Audio Encoder <%s> closed\n", aenc1->aacenc1Name);

	if (DVEVM_ST_FAIL == dvtb_ceDeInit(&aenc1->ceHdl))
		SYS_ERROR("Unable to close Engine <%s>\n", engName);
	else
		SYS_OUT("Engine <%s> closed\n", engName);
}
Esempio n. 4
0
void
dvtb_loopBackVidDec2ProcessEndMessage(DvevmStBool useAlgorithm, DvevmStBool fileBased, int nFrames)
{
	if (DVEVM_ST_FALSE == fileBased)//Driver based
	{
		SYS_OUT("Total # of frames captured, Encoded, Decoded & Displayed = %d.\n", nFrames - 1);
	}
	else
		//File based
		SYS_OUT("Total # of frames Read from file, Encoded, Decoded & dumped = %d.\n", nFrames - 1);
}
Esempio n. 5
0
void
dvtb_audDec1HanClose(DvevmStAudDec1Info *ad, char *engName)
{
        dvtb_audDec1Close(ad);
        SYS_OUT("Audio Decoder <%s> closed\n", ad->adecName);

        if (DVEVM_ST_FAIL == dvtb_ceDeInit(&ad->ceHdl))
                SYS_ERROR("Unable to close Engine <%s>\n", engName);
        else
                SYS_OUT("Engine <%s> closed\n", engName);
}
Esempio n. 6
0
void
dvtb_imgDec1HanClose(DvevmStImgDec1Info *idec, char *engName)
{
    dvtb_imgDec1Close(idec);
    SYS_OUT("Image Decoder <%s> closed\n", idec->idec1Name);

    if (DVEVM_ST_FAIL == dvtb_ceDeInit(&idec->ceHdl))
        SYS_ERROR("Unable to close Engine <%s>\n", engName);
    else
        SYS_OUT("Engine <%s> closed\n", engName);
}
Esempio n. 7
0
void
dvtb_h264Enc1HanClose(DvevmStH264Enc1Info *ve, char *engName)
{
	dvtb_h264Enc1Close(ve);
	SYS_OUT("Video Encoder <%s> closed\n", ve->venc1Name);

	if (DVEVM_ST_FAIL == dvtb_ceDeInit(&ve->ceHdl))
		SYS_ERROR("Unable to close Engine <%s>\n", engName);
	else
		SYS_OUT("Engine <%s> closed\n", engName);
}
Esempio n. 8
0
void
dvtb_jpegEnc1HanClose(DvevmStJpegEnc1Info *ienc, char *engName)
{
	dvtb_jpegEnc1Close(ienc);
	SYS_OUT("Jpeg Encoder <%s> closed\n", ienc->ienc1Name);

	if (DVEVM_ST_FAIL == dvtb_ceDeInit(&ienc->ceHdl))
		SYS_ERROR("Unable to close Engine <%s>\n", engName);
	else
		SYS_OUT("Engine <%s> closed\n", engName);
}
Esempio n. 9
0
void
dvtb_aacheEnc1ProcessEndMessage(DvevmStBool useAlgorithm, DvevmStBool fileBased, int seconds)
{
	if (DVEVM_ST_FALSE == fileBased)//Driver based
	{
		if (useAlgorithm)
			SYS_OUT("AAC [Capture]+[Encode] completed for %d seconds\n",	seconds);
		else
			SYS_OUT("AAC [Capture] completed for %d seconds\n", seconds);
	}
	else//File based
		SYS_OUT("AAC [File Read]+[Encode] completed\n");
}
Esempio n. 10
0
void
dvtb_audDec1ProcessEndMessage(DvevmStBool useAlgorithm, DvevmStBool fileBased, int nFrames)
{
        if (DVEVM_ST_FALSE == fileBased)//Driver based
        {
                if (useAlgorithm)
                        SYS_OUT("Audio [Decode]+[Play] completed for <%d> frames\n", nFrames - 1);
                else
                        SYS_OUT("Audio [Play] completed for <%d> frames\n", nFrames - 1);
        }
        else//File based
                SYS_OUT("Audio [Decode]+[dump] completed for <%d> frames\n", nFrames - 1);
}
Esempio n. 11
0
void
dvtb_imgEncProcessEndMessage(DvevmStBool useAlgorithm, DvevmStBool fileBased, int nFrames)
{
	if (DVEVM_ST_FALSE == fileBased) //Driver based
	{
		if (useAlgorithm)
			SYS_OUT("Total # of Slices Encoded = %d \n", nFrames);
		else
			SYS_OUT("Image Captured \n");
	}
	else // File based
		SYS_OUT("Total # of Slices Encoded = %d \n", nFrames);
}
Esempio n. 12
0
void
dvtb_imgDec1ProcessEndMessage(DvevmStBool useAlgorithm, DvevmStBool fileBased, int nFrames)
{
    if ((DVEVM_ST_FALSE == fileBased))//Driver based
    {
        if (useAlgorithm)
            SYS_OUT("Total # of slices Decoded and Displayed = %d  \n", nFrames);
        else
            SYS_OUT("Image Displayed \n");
    }
    else//File based
        SYS_OUT("Total # of slices Decoded = %d \n", nFrames);
}
Esempio n. 13
0
void
dvtb_mpeg4spDec2ProcessEndMessage(DvevmStBool useAlgorithm, DvevmStBool fileBased, int nFrames)
{
	if (DVEVM_ST_FALSE == fileBased)//Driver based
	{
		if (useAlgorithm)
			SYS_OUT("Total # of frames Decoded & Displayed = %d \n", nFrames - 1);
		else
			SYS_OUT("Total # of frames Displayed = %d \n", nFrames - 1);
	}
	else//File based
		SYS_OUT("Total # of frames Decoded & Dumped = %d \n", nFrames - 1);
}
Esempio n. 14
0
void
dvtb_aacEncHanClose(DvevmStAACEncInfo *aenc, char *engName)
{
#if 0
	dvtb_aacEncClose(aenc);
	SYS_OUT("Audio Encoder <%s> closed\n", aenc->aencName);

	if (DVEVM_ST_FAIL == dvtb_ceDeInit(&aenc->ceHdl))
		SYS_ERROR("Unable to close Engine <%s>\n", engName);
	else
		SYS_OUT("Engine <%s> closed\n", engName);
#endif
}
Esempio n. 15
0
void
dvtb_h264Enc1ProcessEndMessage(DvevmStBool useAlgorithm, DvevmStBool fileBased, int nFrames)
{
	if (DVEVM_ST_FALSE == fileBased)//Driver based
	{
		if (useAlgorithm)
			SYS_OUT("Total # of frames Captured & Encoded = %d \n", nFrames - 1);
		else
			SYS_OUT("Total # of frames Captured = %d \n", nFrames - 1);
	}
	else//File based
		SYS_OUT("Total # of frames Read from File & Encoded = %d \n", nFrames - 1);
}
Esempio n. 16
0
/*********************************************************************
 *	dvtb_vidDec2ProcessOutBuff:
 *	Description: Process output of decoder
 ********************************************************************/
DvevmStRetCode
dvtb_vidDec2ProcessOutBuff(DvevmStGlobalParams *g, DvevmStVidDec2Info *vd, FILE *fp, DvevmStBool fileBased, int nFrames, int *displayFrame)
{
        int validOutIds = 0, frames = 0;
        DvevmStRetCode retCode = DVEVM_ST_SUCCESS;

        frames = *displayFrame;
        //Check for Frame ready via outputID array and display buffer pointers
        validOutIds = 0;
        while(vd->vdec2OutArgs.outputID[validOutIds] > 0)//loop to display/dump all output buffers
        {
                SYS_DEBUG("Valid output id = %d\n",validOutIds);

                if(DVEVM_ST_TRUE == fileBased)
                {
			// Dump output in file
                        SYS_DEBUG("Frame #%d, OutputId %d.\n", nFrames, (int)vd->vdec2OutArgs.outputID[validOutIds]);
                        if(DVEVM_ST_FAIL == dvtb_vidDec2WriteOutputData(&vd->vdec2OutArgs.displayBufs[validOutIds], fp, &vd->vdec2Status))
                        {
                                SYS_ERROR("Unable to Dump frame # %d. \n",frames);
                                retCode = DVEVM_ST_FAIL;
                                break;
                        }
                        SYS_OUT("Dumped Frame# <%d>.\n", frames);
                        frames++;
                }
                else
                {
#if 0
			// Give output to driver for display
                        if(DVEVM_ST_FAIL == dvtb_vidDec2DisplayOutputData(g, (char *)vd->vdec2OutArgs.displayBufs[validOutIds].bufDesc[0].buf, vd->vdec2OutArgs.displayBufs[validOutIds].bufDesc[0].bufSize))
                        {
                                SYS_ERROR("Unable to display frame # %d .\n",frames);
                                retCode = DVEVM_ST_FAIL;
                                break;
                        }

                        SYS_OUT("Displayed Frame# %d.\n", frames);
                        frames++;
#endif
                }

                validOutIds++;
        }

        *displayFrame = frames;
        return retCode;
}
Esempio n. 17
0
DvevmStRetCode
dvtb_mpeg2Dec2Flush(DvevmStThreadInfo *T, int *nFrames)
{
	int decDuration = 0;
	DvevmStRetCode retCode = DVEVM_ST_SUCCESS;

	do
	{
		/* Add logic for flushing display buffers */
		T->g.mpeg2dec2.vdec2Cmd = XDM_FLUSH;

		if (DVEVM_ST_FAIL == dvtb_mpeg2Dec2Control(&T->g.mpeg2dec2))
		{
			SYS_ERROR("Unable to set Flush mode.\n");
			retCode = DVEVM_ST_FAIL;
			break;
		}

		SYS_OUT("Flushing out frames held by Video Decoder\n");

		while ((*nFrames) <= T->g.mpeg2dec2.numFrames)
		{
			if (DVEVM_ST_FAIL == dvtb_mpeg2Dec2Decode(&T->g.mpeg2dec2, (*nFrames), &decDuration))
			{
				SYS_ERROR("Process call returned error while flushing display buffers\n");
				retCode = DVEVM_ST_FAIL;
				break;
			}

			T->g.mpeg2dec2.vdec2Cmd = XDM_GETSTATUS;
			if (DVEVM_ST_FAIL == dvtb_mpeg2Dec2Control(&T->g.mpeg2dec2))
			{
				SYS_ERROR("Unable to get the status of the decoder\n");
				retCode = DVEVM_ST_FAIL;
				break;
			}

			if (0 == T->g.mpeg2dec2.vdec2OutArgs.viddecOutArgs.outputID[0])
				break;

			/* O/p logic */
			if (DVEVM_ST_FAIL == dvtb_vidDec2ProcessOutBuff_dm365(&T->g.vpbe, &T->g.mpeg2dec2.vdec2Status.viddecStatus, &T->g.mpeg2dec2.vdec2OutArgs.viddecOutArgs, &T->g.mpeg2dec2.vdec2InArgs.viddecInArgs, &T->g.mpeg2dec2.vdec2Params.viddecParams, &T->g.mpeg2dec2.vdec2DynParams.viddecDynamicParams, &T->g.mpeg2dec2.dispBufArray, &T->g.mpeg2dec2.outBufArray, T->targetFp, nFrames, 0))
			{
				SYS_ERROR("Unable to process output buffer \n");
				retCode = DVEVM_ST_FAIL;
				break;
			}

			if (DVEVM_ST_USERBUFFER!=T->g.vpbe.opMode)
			{
				dvtb_vidDec2ReleaseOutBuffers(&T->g.mpeg2dec2.outBufArray, &T->g.mpeg2dec2.vdec2OutArgs.viddecOutArgs.freeBufID);
			}

		}//end of while(displayFrame < (*nFrames))
	} while (DVEVM_ST_FALSE);

	return retCode;
}
Esempio n. 18
0
void
dvtb_mpeg4spDec2HanClose(DvevmStMPEG4SPDecInfo *vd, char *engName)
{
	//Free the allocated Input Buffer
	if(0 < vd->inBuf.numBufs )
		dvtb_freeCmem(&vd->inBuf);

	//Free all the allocated output buffers
	dvtb_mpeg4spDec2DeInitOutBuffers(vd);

	dvtb_mpeg4spDec2Close(vd);
	SYS_OUT("Video Decoder <%s> closed.\n", vd->vdec2Name);

	if (DVEVM_ST_FAIL == dvtb_ceDeInit(&vd->ceHdl))
		SYS_ERROR("Unable to close Engine <%s>.\n", engName);
	else
		SYS_OUT("Engine <%s> closed.\n", engName);
}
Esempio n. 19
0
DvevmStRetCode
dvtb_DVREncDecMpeg4spDec2ProcessOutBuff(DvevmStThreadInfo *T, DvevmStMPEG4SPDecInfo *vd, FILE *fp, DvevmStBool fileBased, int nFrames, int *displayFrame)
{
	int validOutIds = 0, frames = 0;
	DvevmStRetCode retCode = DVEVM_ST_SUCCESS;

	frames = *displayFrame;
	//Check for Frame ready via outputID array and display buffer pointers
	validOutIds = 0;
	while(vd->mpeg4sp_vdecOutArgs.viddecOutArgs.outputID[validOutIds] > 0)
	{
		SYS_DEBUG("Valid output id = %d\n",validOutIds);

		if(DVEVM_ST_TRUE == fileBased)
		{
			SYS_DEBUG("Frame #%d, OutputId %d.\n", nFrames, (int)vd->mpeg4sp_vdecOutArgs.viddecOutArgs.outputID[validOutIds]);
			if(DVEVM_ST_FAIL == dvtb_DVREncDecMpeg4spDec2WriteOutputData(&vd->mpeg4sp_vdecOutArgs.viddecOutArgs.displayBufs[validOutIds], fp))
			{
				SYS_ERROR("Unable to Dump frame # %d. \n",frames);
				retCode = DVEVM_ST_FAIL;
				break;
			}
			SYS_OUT("Dumped Frame# <%d>.\n", frames);
			frames++;
		}
		else
		{
			if(DVEVM_ST_FAIL == dvtb_DVREncDecMpeg4spDec2DisplayOutputData(T, (char *)vd->mpeg4sp_vdecOutArgs.viddecOutArgs.displayBufs[validOutIds].bufDesc[0].buf, vd->mpeg4sp_vdecOutArgs.viddecOutArgs.displayBufs[validOutIds].bufDesc[0].bufSize))
			{
				SYS_ERROR("Unable to display frame # %d .\n",frames);
				retCode = DVEVM_ST_FAIL;
				break;
			}

			SYS_OUT("Displayed Frame# %d.\n", frames);
			frames++;
		}

		validOutIds++;
	}

	*displayFrame = frames;
	return retCode;
}
Esempio n. 20
0
void
dvtb_mpeg2Dec2HanClose(DvevmStMPEG2Dec2Info *vd, char *engName)
{
	//Free the allocated Input Buffer
	if (0 < vd->inBuf.numBufs)
		dvtb_freeCmemSingleChunk(&vd->inBuf);

	//Free all the allocated output buffers

	dvtb_vidDec2DeInitOutBuffers_dm365(&vd->outBufArray, vd->totBuf);

	dvtb_mpeg2Dec2Close(vd);
	SYS_OUT("Video Decoder <%s> closed\n", vd->mpeg2dec2Name);

	if (DVEVM_ST_FAIL == dvtb_ceDeInit(&vd->ceHdl))
		SYS_ERROR("Unable to close Engine <%s>\n", engName);
	else
		SYS_OUT("Engine <%s> closed\n", engName);
}
Esempio n. 21
0
File: lfs.c Progetto: shenyan1/aiofs
void lfs_reopen ()
{
    close (lfs_n.fd);
#ifdef O_DIRECT_MODE
    lfs_n.fd = open (lfs_n.block_device, O_RDWR | O_DIRECT);
    SYS_OUT ("O_DIRECT MODE is open");
#else
    lfs_n.fd = open (lfs_n.block_device, O_RDWR);
#endif
    if (lfs_n.fd < 0)
      {
	  perror ("block device open failed");
	  exit (1);
      }
}
Esempio n. 22
0
DvevmStRetCode
dvtb_ceMem(DvevmStEngineInfo *ce)
{
	DVTBASSERT(ce != NULL);
	DVTBASSERT(ce->hdl != NULL);

	ce->mem = Engine_getUsedMem(ce->hdl);
	if (0 == ce->mem)
	{
		ce->err = Engine_getLastError(ce->hdl);
		SYS_ERROR("Unable (%d) to get used memory for Engine <%s>\n", ce->err, ce->name);
		return DVEVM_ST_FAIL;
	}
	else
	{
		SYS_OUT("Engine <%s> => Used Memory : %d MAUs\n", ce->name, ce->mem);
		return DVEVM_ST_SUCCESS;
	}
}
Esempio n. 23
0
DvevmStRetCode
dvtb_ceCpu(DvevmStEngineInfo *ce)
{
	DVTBASSERT(ce != NULL);
	DVTBASSERT(ce->hdl != NULL);

	ce->cpu = Engine_getCpuLoad(ce->hdl);
	if (ce->cpu < 0)
	{
		ce->err = Engine_getLastError(ce->hdl);
		SYS_ERROR("Unable (%d) to get CPU load for Engine <%s>\n", ce->err, ce->name);
		return DVEVM_ST_FAIL;
	}
	else
	{
		SYS_OUT("Engine <%s> => CPU Load : %d MAUs\n", ce->name, ce->cpu);
		return DVEVM_ST_SUCCESS;
	}

}
Esempio n. 24
0
DvevmStRetCode
dvtb_ceInit(char *engineName, Engine_Handle *hdl)
{
	Engine_Error err;

	DVTBASSERT(engineName != NULL);
	DVTBASSERT(hdl != NULL);

	*hdl = Engine_open(engineName, NULL, &err);
	myHdl = *hdl;
	if (NULL == *hdl)
	{
		SYS_ERROR("Engine <%s> open failed. Status => %d\n", engineName, err);
		return DVEVM_ST_FAIL;
	}
	else
	{
		SYS_OUT("Engine <%s> opened.\n", engineName);
		SYS_DEBUG("Engine Handle %x\n", (unsigned int) *hdl);
		return DVEVM_ST_SUCCESS;
	}
}
Esempio n. 25
0
DvevmStRetCode
dvtb_initApp(void)
{
	DvevmStRetCode rc = DVEVM_ST_FAIL;
	IRESMAN_MemTcmParams memTcmConfigParams;

	dvtb_ceRuntimeInit(&G->eng);

	CMEM_init( );

	rc = dvtb_initParams( );
	ASSERT(rc != DVEVM_ST_FAIL);

	if (RMAN_init( ) != IRES_OK)
	{
		SYS_ERROR("Cannot Init RMAN.\n");
		return DVEVM_ST_FAIL;
	} else
		SYS_OUT("RMAN initialized.\n");

	/*
	 * Register IRES components
	 */
	memTcmConfigParams.baseConfig.allocFxn = RMAN_PARAMS.allocFxn;
	memTcmConfigParams.baseConfig.freeFxn = RMAN_PARAMS.freeFxn;
	memTcmConfigParams.baseConfig.size = sizeof(IRESMAN_MemTcmParams);;

	if (RMAN_register(&IRESMAN_MEMTCM, (IRESMAN_Params *) &memTcmConfigParams) != IRES_OK)
	{
		SYS_ERROR("MEMTCM Protocol Registration Failed \n");
		return DVEVM_ST_FAIL;
	}
	SYS_DEBUG("MEMTCM Protocol Registration Success \n");

	return DVEVM_ST_SUCCESS;
}
Esempio n. 26
0
void
dvtb_H264Enc1Capture(DvevmStThreadInfo *T)
{
	int maxFrames = 0, nFrames = 0, targetBytes = DEF_VIDFRAME_SIZE, width = 0, height = 0,
		encDuration = 0, i = 0;

	char *targetBuff = NULL;
	IVIDEO1_BufDescIn rszOutBufDesc;
	DvevmStXdmBuffer rszOutBuf;

	DvevmStBool usecaseFailed = DVEVM_ST_FALSE, vpfeOpened = DVEVM_ST_FALSE, fileBased = DVEVM_ST_TRUE, rszOpened = DVEVM_ST_FALSE, deInterOpened = DVEVM_ST_FALSE;

#ifdef DUMP_RECON_BUF
	FILE *reconBufFp = NULL;
	if(DVEVM_ST_FAIL == dvtb_fileOpen(&reconBufFp, "ReconBuf.buf", "wb"))
	{
		SYS_ERROR("can not open ReconBuf.buf\n");
		dvtb_fileClose(&T->sourceFp, &T->targetFp);
		dvtb_freeThr(T);
		dvtb_threadExit( );
	}

#endif

	maxFrames = T->g.h264enc1.numFrames;

	/* Quit if target file is not specified */
	if (!T->targetFp)
	{
		SYS_ERROR("Target file cannot be opened. It is a mandatory parameter\n");
		dvtb_fileClose(&T->sourceFp, &T->targetFp);
		dvtb_freeThr(T);
		dvtb_threadExit( );
#ifdef DUMP_RECON_BUF
		if(reconBufFp)
			fclose(reconBufFp);
#endif
	}

	if(!T->useAlgorithm)
	{
		SYS_ERROR("No DSP option is not supported \n");
		dvtb_fileClose(&T->sourceFp, &T->targetFp);
		dvtb_freeThr(T);
		dvtb_threadExit();
#ifdef DUMP_RECON_BUF
		if(reconBufFp)
			fclose(reconBufFp);
#endif

	}

	if(!T->sourceFp)
		fileBased = DVEVM_ST_FALSE;

	dvtb_startMessage(DVEVM_ST_ENCODE, "H264", T->useAlgorithm, fileBased);
	dvtb_h264Enc1InitPtrs(&T->g.h264enc1);
	if(T->g.vprsz.rszEnable)
	{
		rszOutBuf.numBufs = 0;
		for(i=0; i<MAX_XDM_BUFS; i++)
			rszOutBuf.bufs[i] = NULL;
	}
	

	/* Dummy loop to enable easy cleanup */
	do
	{
		/* If source file is not defined, initialize VPFE
		 * Else initialize the YUV source file to be encoded */
		if (DVEVM_ST_FALSE == fileBased)
		{
			if (DVEVM_ST_FAIL == dvtb_vpfeSetup(&T->g.vpfe))
			{
				SYS_ERROR("Unable to initialize VPFE device <%s>\n", T->g.vpfe.device);
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}
			if (DVEVM_ST_FAIL == dvtb_vpfeCaptureStart(&T->g.vpfe))
			{
				SYS_ERROR("Unable to start VPFE device <%s>\n", T->g.vpfe.device);
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}


			vpfeOpened = DVEVM_ST_TRUE;
			SYS_OUT("VPFE device <%s> initialized for capture\n", T->g.vpfe.device);

			if(T->g.vprsz.rszEnable)
			{
				T->g.vprsz.rszInputFormat = T->g.h264enc1.venc1Params.videncParams.inputChromaFormat;
				T->g.vprsz.rszInWidth     = T->g.vpfe.capWidth;
				T->g.vprsz.rszInHeight    = T->g.vpfe.capHeight;
				T->g.vprsz.rszOutWidth    = T->g.h264enc1.venc1DynParams.videncDynamicParams.inputWidth;
				T->g.vprsz.rszOutHeight   = T->g.h264enc1.venc1DynParams.videncDynamicParams.inputHeight;
				
				if (DVEVM_ST_FAIL == dvtb_resizerSetup(&T->g.vprsz))
				{
					SYS_ERROR("Unable to initialize resizer \n");
					usecaseFailed = DVEVM_ST_TRUE;
					break;
				}
				rszOpened = DVEVM_ST_TRUE;
			
				if (DVEVM_ST_FAIL == dvtb_resizerOutBuffAlloc(&T->g.vprsz, &rszOutBuf, &rszOutBufDesc))
				{
					SYS_ERROR("Unable to Allocate buffers for resizer \n");
					usecaseFailed = DVEVM_ST_TRUE;
					break;
				}
			}
		}

		width = T->g.h264enc1.venc1DynParams.videncDynamicParams.inputWidth;
		height = T->g.h264enc1.venc1DynParams.videncDynamicParams.inputHeight;

		if (DVEVM_ST_FAIL == dvtb_h264Enc1HanInit(&T->g.h264enc1, (char *)T->g.eng.name))
		{
			SYS_ERROR("Unable to initialize Video Encoder Handle \n");
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}

		T->g.h264enc1.inBufDesc.frameWidth =  T->g.h264enc1.venc1DynParams.videncDynamicParams.inputWidth;
		T->g.h264enc1.inBufDesc.frameHeight = T->g.h264enc1.venc1DynParams.videncDynamicParams.inputHeight;
		// Allocate memory for codec input buffers
		if (DVEVM_ST_FAIL == dvtb_h264Enc1InBuffAlloc(&T->g.h264enc1.inBuf, &T->g.h264enc1.inBufDesc, fileBased))
		{
			SYS_ERROR("Error in allocating memory for input buffer\n");
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}

		if (DVEVM_ST_FAIL == dvtb_h264Enc1OutBuffAlloc(&T->g.h264enc1.outBuf, &T->g.h264enc1.outBufDesc))
		{
			SYS_ERROR("Error in allocating memory for output buffer\n");
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}
		if(T->g.vprsz.rszEnable)
		{
			rszOutBufDesc.frameWidth = T->g.h264enc1.inBufDesc.frameWidth;
			rszOutBufDesc.frameHeight = T->g.h264enc1.inBufDesc.frameHeight;
			rszOutBufDesc.framePitch = T->g.h264enc1.inBufDesc.framePitch;
		}

		if (DVEVM_ST_FALSE == fileBased)
		{
			if(T->g.deinter.deinterEnable)
			{
				T->g.deinter.deinterInWidth     = T->g.vpfe.capWidth;
				T->g.deinter.deinterInHeight    = T->g.vpfe.capHeight;
				T->g.deinter.deinterInputFormat = T->g.h264enc1.venc1Params.videncParams.inputChromaFormat;
				T->g.deinter.ceHdl              = T->g.h264enc1.ceHdl;

				if (DVEVM_ST_FAIL == dvtb_deInterInit(&T->g.deinter))
				{
					SYS_ERROR("Unable to initialize DeInterlacer \n");
					usecaseFailed = DVEVM_ST_TRUE;
					break;
				}
				deInterOpened = DVEVM_ST_TRUE;
			}
		}


		/* Start the [capture][encode] process */
		while (nFrames++ < maxFrames)
		{
			if(DVEVM_ST_FAIL == dvtb_h264Enc1ReadInput(&T->g.vpfe, T->sourceFp, &T->g.h264enc1.inBufDesc, fileBased))
			{
				SYS_ERROR("Error in Read Input \n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			if (DVEVM_ST_TRUE == fileBased)
			{
				if (feof(T->sourceFp))
				{
					SYS_OUT("End of source file reached. Exiting ...\n");
					break;
				}
			}

			if(DVEVM_ST_FALSE == fileBased)
			{
				if(T->g.deinter.deinterEnable)
				{
					if(DVEVM_ST_FAIL == dvtb_copyBufDescToBufs(&T->g.deinter.inBuf,&T->g.h264enc1.inBufDesc))
					{
						SYS_ERROR("Unable to copy from BufDesc To Bufs \n");
						usecaseFailed = DVEVM_ST_TRUE;
						break;
                                        }

					if(DVEVM_ST_FAIL == dvtb_deInterEncode(&T->g.deinter, &encDuration))
					{
						SYS_ERROR("Unable to deInterlace \n");
						usecaseFailed = DVEVM_ST_TRUE;
						break;
					}
					SYS_DEBUG("DeinterlaceTime %d \n",encDuration);

					if(DVEVM_ST_FAIL == dvtb_copyBufsToBufDesc(&T->g.h264enc1.inBufDesc,&T->g.deinter.outBuf))
					{
						SYS_ERROR("Unable to copy from Buf To BufDesc \n");
						usecaseFailed = DVEVM_ST_TRUE;
						break;
					}
				}

				if(T->g.vprsz.rszEnable)
				{
					if(DVEVM_ST_FAIL == dvtb_resizeImage(&T->g.vprsz, &T->g.h264enc1.inBufDesc, &rszOutBufDesc))
					{
						SYS_ERROR("Unable to resize \n");
						usecaseFailed = DVEVM_ST_TRUE;
						break;
					}
					memcpy(&T->g.h264enc1.inBufDesc, &rszOutBufDesc, sizeof(rszOutBufDesc));
				}
			}


			if(DVEVM_ST_FAIL == dvtb_h264Enc1EncProcess(&T->g.h264enc1, &encDuration))
			{
				SYS_ERROR("video encoder Process fails for frame # %d \n", nFrames);
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			targetBuff = (char *) T->g.h264enc1.outBufDesc.bufs[0];
			targetBytes = T->g.h264enc1.venc1OutArgs.videncOutArgs.bytesGenerated;

			if (T->g.h264enc1.venc1OutArgs.videncOutArgs.inputFrameSkip)
				SYS_ERROR("Frame <%d> Skipped\n", nFrames);
			else
				SYS_OUT("Encoded Frame %d.\n", nFrames);

#ifdef PERF
			SYS_PERF("Thread, %x, Frame#, %d, Frame Type, %d, Frame Size(bytes), %d, EncTime(us), %d\n", (unsigned int) T, nFrames, (int) T->g.h264enc1.venc1OutArgs.videncOutArgs.encodedFrameType, targetBytes, encDuration);
#endif

			if(DVEVM_ST_FAIL ==  dvtb_fileWrite(T->targetFp, (char *)targetBuff, targetBytes))
			{
				SYS_ERROR("Error in writing to file.\n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

#ifdef DUMP_RECON_BUF
			SYS_OUT("Dumping ReconBuf data for Frame# %d\n", nFrames);
			SYS_OUT("Recon OutBuf[1], size %d\n", outbuf->bufSizes[1]);
			fwrite(outbuf->bufs[1], 1, outbuf->bufSizes[1], reconBufFp);
			SYS_OUT("Recon OutBuf[2], size %d\n", outbuf->bufSizes[2]);
			fwrite(outbuf->bufs[2], 1, outbuf->bufSizes[2], reconBufFp);
#endif
			T->g.h264enc1.venc1OutArgs.videncOutArgs.inputFrameSkip = 0;

			if (DVEVM_ST_FALSE == fileBased)
				dvtb_vpfeReturnBuffer(&T->g.vpfe);
		}
	} while (DVEVM_ST_FALSE);

	dvtb_h264Enc1ProcessEndMessage(T->useAlgorithm, fileBased, nFrames);

	if(DVEVM_ST_TRUE == deInterOpened)
	{
		dvtb_deInterClose(&T->g.deinter);
		SYS_OUT("Deinterlacer <%s> closed\n", T->g.deinter.deinterName);
	}

	dvtb_h264Enc1HanClose(&T->g.h264enc1, T->g.eng.name);

	if (DVEVM_ST_TRUE == vpfeOpened)
	{
		dvtb_vpfeClose(&T->g.vpfe);
		SYS_OUT("VPFE Device <%s> closed\n", T->g.vpfe.device);
	}

	if(T->g.vprsz.rszEnable)
	{
		if (rszOutBuf.numBufs > 0)
		{
			dvtb_freeCmem(&rszOutBuf);
			rszOutBuf.numBufs = 0;
		}
	}

	if(DVEVM_ST_TRUE == rszOpened)
	{
		dvtb_resizerClose(&T->g.vprsz);
		SYS_OUT("Resizer closed\n");
	}

#ifdef DUMP_RECON_BUF
	if(reconBufFp)
		fclose(reconBufFp);
#endif

	dvtb_fileClose(&T->sourceFp, &T->targetFp);

	dvtb_exitMessage(DVEVM_ST_ENCODE, "H264", T->useAlgorithm, fileBased, usecaseFailed);

	dvtb_freeThr(T);
	dvtb_threadExit( );
}
Esempio n. 27
0
void
dvtb_H264Enc1Capture(DvevmStThreadInfo *T)
{
	int i = 0, maxFrames = 0, nFrames = 0, targetBytes = DEF_VIDFRAME_SIZE, width = 0, height = 0,
		encDuration = 0;

	char *targetBuff = NULL;
	DvevmStBool usecaseFailed = DVEVM_ST_FALSE, vpfeOpened = DVEVM_ST_FALSE, fileBased = DVEVM_ST_TRUE;
	IVIDEO1_BufDescIn pubInYUVBufDesc;
	DvevmStXdmBuffer pubInYUVBuff;

#ifdef DUMP_RECON_BUF
	FILE *reconBufFp = NULL;
	if(DVEVM_ST_FAIL == dvtb_fileOpen(&reconBufFp, "ReconBuf.buf", "wb"))
	{
		SYS_ERROR("can not open ReconBuf.buf\n");
		dvtb_fileClose(&T->sourceFp, &T->targetFp);
		dvtb_freeThr(T);
		dvtb_threadExit( );
	}

#endif

	maxFrames = T->g.h264enc1.numFrames;

	/* Quit if target file is not specified */
	if (!T->targetFp)
	{
		SYS_ERROR("Target file cannot be opened. It is a mandatory parameter\n");
		dvtb_fileClose(&T->sourceFp, &T->targetFp);
		dvtb_freeThr(T);
		dvtb_threadExit( );
#ifdef DUMP_RECON_BUF
		if(reconBufFp)
			fclose(reconBufFp);
#endif
	}

	if(!T->useAlgorithm)
	{
		SYS_ERROR("No DSP option is not supported \n");
		dvtb_fileClose(&T->sourceFp, &T->targetFp);
		dvtb_freeThr(T);
		dvtb_threadExit();
#ifdef DUMP_RECON_BUF
		if(reconBufFp)
			fclose(reconBufFp);
#endif

	}

	if(!T->sourceFp)
		fileBased = DVEVM_ST_FALSE;

	dvtb_startMessage(DVEVM_ST_ENCODE, "H264", T->useAlgorithm, fileBased);
	dvtb_h264Enc1InitPtrs(&T->g.h264enc1);

	//pubInYUVBuff: Initialize
	pubInYUVBuff.numBufs = 0;
	for(i=0; i<MAX_XDM_BUFS; i++)
		pubInYUVBuff.bufs[i] = NULL;


	/* Dummy loop to enable easy cleanup */
	do
	{
		/* If source file is not defined, initialize VPFE
		 * Else initialize the YUV source file to be encoded */
		if (DVEVM_ST_FALSE == fileBased)
		{
			if (DVEVM_ST_FAIL == dvtb_vpfeSetup(&T->g.vpfe))
			{
				SYS_ERROR("Unable to initialize VPFE device <%s>\n", T->g.vpfe.device);
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}
			if (DVEVM_ST_FAIL == dvtb_vpfeCaptureStart(&T->g.vpfe))
			{
				SYS_ERROR("Unable to start VPFE device <%s>\n", T->g.vpfe.device);
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}


			vpfeOpened = DVEVM_ST_TRUE;
			SYS_OUT("VPFE device <%s> initialized for capture\n", T->g.vpfe.device);

			if(DVEVM_ST_FAIL == dvtb_vdceSetup(&T->g.capvdce, T->g.vpfe.capHeight, T->g.vpfe.capWidth, T->g.vpfe.capWidth, 2))
			{
				SYS_ERROR("Unable to setup VDCE device \n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			SYS_OUT("VDCE device  <%s> initialized \n", T->g.capvdce.device);
		}

		width = T->g.h264enc1.venc1DynParams.videncDynamicParams.inputWidth;
		height = T->g.h264enc1.venc1DynParams.videncDynamicParams.inputHeight;

		if (DVEVM_ST_FAIL == dvtb_h264Enc1HanInit(&T->g.h264enc1, (char *)T->g.eng.name))
		{
			SYS_ERROR("Unable to initialize Video Encoder Handle \n");
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}

		T->g.h264enc1.inBufDesc.frameWidth =  T->g.h264enc1.venc1DynParams.videncDynamicParams.inputWidth;
		T->g.h264enc1.inBufDesc.frameHeight = T->g.h264enc1.venc1DynParams.videncDynamicParams.inputHeight;
		// Allocate memory for codec input buffers
		if (DVEVM_ST_FAIL == dvtb_h264Enc1InBuffAlloc(&T->g.h264enc1.inBuf, &T->g.h264enc1.inBufDesc, fileBased))
		{
			SYS_ERROR("Error in allocating memory for input buffer\n");
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}

		if (DVEVM_ST_FAIL == dvtb_h264Enc1OutBuffAlloc(&T->g.h264enc1.outBuf, &T->g.h264enc1.outBufDesc))
		{
			SYS_ERROR("Error in allocating memory for output buffer\n");
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}

		if (DVEVM_ST_TRUE == fileBased)
		{
			// Allocate memory & Update pubInYUVBuff and pubInYUVBufDesc for "File Read"
			pubInYUVBuff.numBufs = 3;
			pubInYUVBuff.bufSizes[0] = width * height;
			pubInYUVBuff.bufSizes[1] = width * height / 4;
			pubInYUVBuff.bufSizes[2] = width * height / 4;

			if (DVEVM_ST_FAIL == dvtb_h264Enc1InBuffAlloc(&pubInYUVBuff, &pubInYUVBufDesc, DVEVM_ST_TRUE))
			{
				SYS_ERROR("Cannot allocate memory for file read\n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}
		}
		else
		{
			// Update pubInYUVBufDesc for "capture from driver"
			pubInYUVBufDesc.numBufs = 1;
		}

		/* Start the [capture][encode] process */
		while (nFrames++ < maxFrames)
		{
			if(DVEVM_ST_FAIL == dvtb_h264Enc1ReadInput(&T->g.vpfe, T->sourceFp, &pubInYUVBufDesc, fileBased))
			{
				SYS_ERROR("Error in Read Input \n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			if (DVEVM_ST_TRUE == fileBased)
			{
				if (feof(T->sourceFp))
				{
					SYS_OUT("End of source file reached. Exiting ...\n");
					break;
				}
			}
			if(DVEVM_ST_FAIL == dvtb_h264Enc1ConvertFormat(T, &pubInYUVBuff, &T->g.h264enc1.inBufDesc, fileBased))
			{
				SYS_ERROR("Failed to change the input format \n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			if(DVEVM_ST_FAIL == dvtb_h264Enc1EncProcess(&T->g.h264enc1, &encDuration))
			{
				SYS_ERROR("video encoder Process fails for frame # %d \n", nFrames);
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			targetBuff = (char *) T->g.h264enc1.outBufDesc.bufs[0];
			targetBytes = T->g.h264enc1.venc1OutArgs.videncOutArgs.bytesGenerated;

			if (T->g.h264enc1.venc1OutArgs.videncOutArgs.inputFrameSkip)
				SYS_ERROR("Frame <%d> Skipped\n", nFrames);
			else
				SYS_OUT("Encoded Frame %d.\n", nFrames);

#ifdef PERF
			SYS_PERF("Thread, %x, Frame#, %d, Frame Type, %d, Frame Size(bytes), %d, EncTime(us), %d\n", (unsigned int) T, nFrames, (int) T->g.h264enc1.venc1OutArgs.videncOutArgs.encodedFrameType, targetBytes, encDuration);
#endif

			if(DVEVM_ST_FAIL ==  dvtb_fileWrite(T->targetFp, (char *)targetBuff, targetBytes))
			{
				SYS_ERROR("Error in writing to file.\n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

#ifdef DUMP_RECON_BUF
			SYS_OUT("Dumping ReconBuf data for Frame# %d\n", nFrames);
			SYS_OUT("Recon OutBuf[1], size %d\n", outbuf->bufSizes[1]);
			fwrite(outbuf->bufs[1], 1, outbuf->bufSizes[1], reconBufFp);
			SYS_OUT("Recon OutBuf[2], size %d\n", outbuf->bufSizes[2]);
			fwrite(outbuf->bufs[2], 1, outbuf->bufSizes[2], reconBufFp);
#endif
			T->g.h264enc1.venc1OutArgs.videncOutArgs.inputFrameSkip = 0;

			if (DVEVM_ST_FALSE == fileBased)
				dvtb_vpfeReturnBuffer(&T->g.vpfe);
		}
	} while (DVEVM_ST_FALSE);

	dvtb_h264Enc1ProcessEndMessage(T->useAlgorithm, fileBased, nFrames);

	if (pubInYUVBuff.numBufs > 0)
	{
		dvtb_freeCmem(&pubInYUVBuff);
	}

	dvtb_h264Enc1HanClose(&T->g.h264enc1, T->g.eng.name);

	if (DVEVM_ST_TRUE == vpfeOpened)
	{
		dvtb_vpfeClose(&T->g.vpfe);
		SYS_OUT("VPFE Device <%s> closed\n", T->g.vpfe.device);
	}

	if (DVEVM_ST_TRUE == T->g.capvdce.vdceOpened)
	{
		dvtb_vdceClose(&T->g.capvdce);
		SYS_OUT("VDCE Device <%s> closed\n",T->g.capvdce.device);
	}

#ifdef DUMP_RECON_BUF
	if(reconBufFp)
		fclose(reconBufFp);
#endif

	dvtb_fileClose(&T->sourceFp, &T->targetFp);

	dvtb_exitMessage(DVEVM_ST_ENCODE, "H264", T->useAlgorithm, fileBased, usecaseFailed);

	dvtb_freeThr(T);
	dvtb_threadExit( );
}
Esempio n. 28
0
void
dvtb_VidLoopback1(DvevmStThreadInfo *T)
{

	int i = 0, maxFrames = 0, nFrames = 1, targetBytes = 0, width = 0, height = 0,
		encDuration = 0, frameSize = 0, decDuration = 0, displayFrame = 1, bytesConsumed = 0,
		remainingBytes = 0, buffSize = 0;

	DvevmStRetCode status = DVEVM_ST_FAIL;
	DvevmStBool firstProcessCall = DVEVM_ST_TRUE, usecaseFailed=DVEVM_ST_FALSE,
		vpbeOpened=DVEVM_ST_FALSE, fileBased = DVEVM_ST_TRUE, vpfeOpened = DVEVM_ST_FALSE;


#if 0
	T->g.vpbe.videoFd = 0;
	T->g.vpbe.opMode = DVEVM_ST_USERBUFFER;
#endif

	maxFrames = T->g.venc1.numFrames;
	if(T->g.viddec2.numFrames > maxFrames)
		maxFrames = T->g.viddec2.numFrames;


	if ((T->targetFp && (!T->sourceFp)) || (T->sourceFp && (!T->targetFp)))
	{
		SYS_ERROR("mandatory parameters: Either provide both source and target files or provide none \n");
		dvtb_fileClose(&T->sourceFp, &T->targetFp);
		dvtb_freeThread(T);
		dvtb_waitThread();
	}
	if(!T->useAlgorithm)
	{
		SYS_ERROR("No DSP option is not supported \n");
		dvtb_fileClose(&T->sourceFp, &T->targetFp);
		dvtb_freeThread(T);
		dvtb_waitThread();
	}

	if ((!T->targetFp) && (!T->sourceFp))
		fileBased = DVEVM_ST_FALSE;

	dvtb_startMessage(DVEVM_ST_ENCDEC, "Video Loopback", T->useAlgorithm, fileBased);
	dvtb_vidDec2InitPtrs(&T->g.viddec2);
	dvtb_vidEnc1InitPtrs(&T->g.venc1);


	/* Dummy loop to enable easy cleanup */
	do
	{
		T->g.viddec2.hTime = dvtb_timeCreate();
		if(NULL == T->g.viddec2.hTime)
		{
			SYS_ERROR("Unable to Create Time handle. \n");
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}

		T->g.venc1.hTime = dvtb_timeCreate();
		if(NULL == T->g.venc1.hTime)
		{
			SYS_ERROR("Unable to Create Time handle. \n");
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}

		width = T->g.venc1.venc1DynParams.inputWidth;
		height = T->g.venc1.venc1DynParams.inputHeight;

		// Video Encoder: Initialize
		if (DVEVM_ST_FAIL == dvtb_vidEnc1HanInit(&T->g.venc1, (char *)T->g.eng.name))
		{
			SYS_ERROR("Unable to initialize Video Encoder Handle \n");
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}

		//Video Decoder: Initialize
		T->g.viddec2.ceHdl = T->g.venc1.ceHdl;
		if (DVEVM_ST_FAIL == dvtb_vidDec2Init(&T->g.viddec2))
		{
			SYS_ERROR("Unable to initialize Video Decoder <%s>\n", T->g.viddec2.vdec2Name);
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}

		//Video Encoder: Allocate Input buffer
		T->g.venc1.inBufDesc.frameWidth =  T->g.venc1.venc1DynParams.inputWidth;
		T->g.venc1.inBufDesc.frameHeight = T->g.venc1.venc1DynParams.inputHeight;
		if (DVEVM_ST_FAIL == dvtb_vidEnc1InBuffAlloc(&T->g.venc1.inBuf, &T->g.venc1.inBufDesc, fileBased))
		{
			SYS_ERROR("Error in allocating memory for input buffer\n");
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}

		//Video Encoder: Allocate Output buffer
		if (DVEVM_ST_FAIL == dvtb_vidEnc1OutBuffAlloc(&T->g.venc1.outBuf, &T->g.venc1.outBufDesc))
		{
			SYS_ERROR("Error in allocating memory for output buffer\n");
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}

		//Video Decoder: Allocate First output buffer
		if(DVEVM_ST_FAIL == dvtb_vidDec2InitFirstOutBuffer(&T->g.viddec2))
		{
			SYS_ERROR("Unable to Initialize 1st out buffer.\n");
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}



		if (DVEVM_ST_FALSE == fileBased)
		{
#if 0
			//Allocate buffers for driver(when used in user buffer pointer Mode)
			buffSize = T->g.viddec2.outBufArray[0].buffer.bufSizes[0];
			if (DVEVM_ST_FAIL == dvtb_vidDec2VpbeBuffAlloc(&T->g.vpbe, buffSize))
			{
				SYS_ERROR("Error in allocating memory for VPBE buffer(User buffer mode).\n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

#ifndef VPFENOTSUPPORTED
			T->g.vpbe.imageWidth = T->g.vpfe.capWidth;
			T->g.vpbe.imageHeight = T->g.vpfe.capHeight;
#endif

			status = (*dvtb_vpbeSetupPtr)(&T->g.vpbe);
			if (DVEVM_ST_FAIL == status)
			{
				SYS_ERROR("Unable to setup VPBE device <%s> for display\n", T->g.vpbe.device);
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}
            status = dvtb_vpbeDisplayStart(&T->g.vpbe);
			if (DVEVM_ST_FAIL == status)
			{
				SYS_ERROR("Unable to sart display device <%s>.\n", T->g.vpbe.device);
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			vpbeOpened = DVEVM_ST_TRUE;
			SYS_OUT("VPBE device <%s> initialized for display\n", T->g.vpbe.device);

#ifndef VPFENOTSUPPORTED
			if (DVEVM_ST_FAIL == dvtb_vpfeSetup(&T->g.vpfe))
			{
				SYS_ERROR("Unable to initialize VPFE device <%s>\n", T->g.vpfe.device);
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}
            if (DVEVM_ST_FAIL == dvtb_vpfeCaptureStart(&T->g.vpfe))
			{
				SYS_ERROR("Unable to start VPFE device <%s>\n", T->g.vpfe.device);
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			vpfeOpened = DVEVM_ST_TRUE;
			SYS_OUT("VPFE device <%s> initialized for capture\n", T->g.vpfe.device);
#endif
#endif
		}


		T->g.viddec2.vdec2OutArgs.outBufsInUseFlag = 0;

		// point video decoder input buffer to the output buffer of video encoder
		T->g.viddec2.inBufDesc.numBufs = T->g.venc1.outBufDesc.numBufs;
		T->g.viddec2.inBuf.numBufs = T->g.venc1.outBufDesc.numBufs;
		for (i = 0; i < T->g.viddec2.inBufDesc.numBufs; i++)
		{
			T->g.viddec2.inBufDesc.descs[i].buf = T->g.venc1.outBufDesc.bufs[i];
			T->g.viddec2.inBufDesc.descs[i].bufSize = T->g.venc1.outBufDesc.bufSizes[i];
			T->g.viddec2.inBuf.bufs[i] = (char *) T->g.venc1.outBufDesc.bufs[i];
			T->g.viddec2.inBuf.bufSizes[i] = T->g.venc1.outBufDesc.bufSizes[i];
		}

		/* Start the [capture][encode] process */
		while (nFrames <= maxFrames)
		{
#ifndef VPFENOTSUPPORTED
			if(DVEVM_ST_FAIL == dvtb_vidEnc1ReadInput(&T->g.vpfe, T->sourceFp, &T->g.venc1.inBufDesc, fileBased))
			{
				SYS_ERROR("Error in Read Input \n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}
#else
			if(DVEVM_ST_FAIL == dvtb_vidEnc1ReadInput(T->sourceFp, &T->g.venc1.inBufDesc, fileBased))
			{
				SYS_ERROR("Error in Read Input \n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}
#endif

			if (DVEVM_ST_TRUE == fileBased)
			{
				if (feof(T->sourceFp))
				{
					SYS_OUT("End of source file reached. Exiting ...\n");
					break;
				}
			}

			//Video Encoder: Encode input buffer
			if(DVEVM_ST_FAIL == dvtb_vidEnc1EncProcess(&T->g.venc1, &encDuration))
			{
				SYS_ERROR("video encoder Process fails for frame # %d \n", nFrames);
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			targetBytes = T->g.venc1.venc1OutArgs.bytesGenerated;
			targetBytes = T->g.venc1.venc1OutArgs.bytesGenerated;

			if (T->g.venc1.venc1OutArgs.inputFrameSkip)
				SYS_ERROR("Frame <%d> Skipped\n", nFrames);
			else
				SYS_OUT("Encoded Frame %d.\n", nFrames);

#ifdef PERF
			SYS_PERF("Thread, %x, Frame#, %d, Frame Type, %d, Frame Size(bytes), %d, EncTime(us), %d\n", (unsigned int) T, nFrames, (int) T->g.venc1.venc1OutArgs.encodedFrameType, targetBytes, encDuration);
#endif
			if (T->g.venc1.venc1OutArgs.bytesGenerated)
			{

				T->g.viddec2.vdec2InArgs.numBytes = targetBytes;
				bytesConsumed = 0;
				remainingBytes = targetBytes;

			}
			else
			{
#ifndef VPFENOTSUPPORTED
				if (DVEVM_ST_FALSE == fileBased)
					dvtb_vpfeReturnBuffer(&T->g.vpfe);
#endif
				continue;
			}

			if (DVEVM_ST_FAIL == dvtb_vidDec2DecProcess(&T->g.viddec2, &decDuration))
			{
				SYS_ERROR("Video decoder process fails for frame # %d. \n",nFrames);
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}
			frameSize = T->g.viddec2.vdec2OutArgs.bytesConsumed;
			SYS_DEBUG("Bytes Consumed = %d.\n", frameSize);
			if (frameSize == 0)
			{
				SYS_ERROR("Video Decode Process returned bytesConsumed as 0. Exiting decoding.\n");
				break;
			}

			SYS_OUT("Decoded Frame %d.\n", nFrames);
#ifdef PERF
			SYS_PERF("Thread, %x, Decoded Frame#, %d ,Frame Size(bytes), %d, DecDuration(us), %d\n",(unsigned int) T, nFrames, frameSize, decDuration);
#endif

			bytesConsumed += frameSize;
			remainingBytes -= frameSize;

			if (firstProcessCall == DVEVM_ST_TRUE)
			{
				firstProcessCall = DVEVM_ST_FALSE;

				//Allocate and initialize all outBuffers once again
				if(DVEVM_ST_FAIL == dvtb_vidDec2InitAllOutBuffers(&T->g.viddec2))
				{
					SYS_ERROR("Unable to Initialize other out buffers.\n");
					dvtb_vidDec2DeInitOutBuffers(&T->g.viddec2);
					usecaseFailed = DVEVM_ST_TRUE;
					break;
				}

				SYS_DEBUG("All Output buffers initialized.\n");
			}

			if (!T->g.viddec2.vdec2OutArgs.outBufsInUseFlag)
			{
				//TBD
			}

			if(DVEVM_ST_FAIL == dvtb_vidDec2ProcessOutBuff(&T->g,&T->g.viddec2, T->targetFp, fileBased, nFrames, &displayFrame))
			{
				SYS_ERROR("Unable to Process Out buffers. \n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			dvtb_vidDec2ReleaseBuffers(&T->g.viddec2, (unsigned int *)T->g.viddec2.vdec2OutArgs.freeBufID);

#ifndef VPFENOTSUPPORTED
			if (DVEVM_ST_FALSE == fileBased)
				dvtb_vpfeReturnBuffer(&T->g.vpfe);
#endif

			nFrames++;
		}

		//Flush the held frames only in the case where the usecase has not failed so far
		if(DVEVM_ST_FALSE == usecaseFailed && T->useAlgorithm)
		{
			if(DVEVM_ST_FAIL == dvtb_vidDec2Flush(&T->g,&T->g.viddec2, T->targetFp, fileBased, nFrames, &displayFrame))
			{
				SYS_ERROR("Unable to Flush Held frames buffers. \n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}
		}
	} while (DVEVM_ST_FALSE);

	dvtb_loopBackVidDec2ProcessEndMessage(T->useAlgorithm, fileBased, displayFrame);

	//Video Decoder: Free all the allocated output buffers
	dvtb_vidDec2DeInitOutBuffers(&T->g.viddec2);

	//Video Decoder: close video decoder
	dvtb_vidDec2Close(&T->g.viddec2);
	SYS_OUT("Video Decoder <%s> closed.\n", T->g.viddec2.vdec2Name);

	//Video Encoder: close video encoder and engine
	dvtb_vidEnc1HanClose(&T->g.venc1, T->g.eng.name);

#if 0
	if(DVEVM_ST_TRUE == vpbeOpened)
	{
		dvtb_vpbeClose(&T->g.vpbe);
		SYS_OUT("VPBE Device <%s> closed.\n", T->g.vpbe.device);
	}

	if (DVEVM_ST_TRUE == vpfeOpened)
	{
#ifndef VPFENOTSUPPORTED
		dvtb_vpfeClose(&T->g.vpfe);
		SYS_OUT("VPFE Device <%s> closed\n", T->g.vpfe.device);
#endif
	}
#endif

	dvtb_fileClose(&T->sourceFp, &T->targetFp);
	dvtb_timeDelete(T->g.venc1.hTime);
	dvtb_timeDelete(T->g.viddec2.hTime);
	dvtb_exitMessage(DVEVM_ST_ENCDEC, "Video Loopback", T->useAlgorithm, fileBased, usecaseFailed);

	dvtb_freeThread(T);
	dvtb_waitThread();
}
Esempio n. 29
0
DvevmStRetCode
dvtb_setV4l2OutputSandard(DvevmStVpbeInfo *vpbe)
{
	DvevmStRetCode retval = DVEVM_ST_FAIL;
	char stdName[10];

	ASSERT(vpbe != NULL);

	switch (vpbe->dispStd)
	{
		case DVEVM_ST_STANDARD_NA:
			break;
		case DVEVM_ST_NTSC:
			strcpy(stdName, "NTSC");
			SYS_OUT("Setting Display Standard as <%s>\n", stdName);
			if (DVEVM_ST_SUCCESS != dvtb_writeSysFs(SYSFS_STANDARD, stdName))
			{
				SYS_ERROR("Unable to set the Display Standard as <%s> in the driver.\n", stdName);
				dvtb_close_vpbe(vpbe);
				return retval;
			}
			break;

		case DVEVM_ST_PAL:
			strcpy(stdName, "PAL");
			SYS_OUT("Setting Display Standard as <%s>\n", stdName);
			if (DVEVM_ST_SUCCESS != dvtb_writeSysFs(SYSFS_STANDARD, stdName))
			{
				SYS_ERROR("Unable to set the Display Standard as <%s> in the driver.\n", stdName);
				dvtb_close_vpbe(vpbe);
				return retval;
			}
			break;

		case DVEVM_ST_720P_60:
			strcpy(stdName, "720P-60");
			SYS_OUT("Setting Display Standard as <%s>\n", stdName);
			if (DVEVM_ST_SUCCESS != dvtb_writeSysFs(SYSFS_STANDARD, stdName))
			{
				SYS_ERROR("Unable to set the Display Standard as <%s> in the driver.\n", stdName);
				dvtb_close_vpbe(vpbe);
				return retval;
			}
			break;

		case DVEVM_ST_720P_50:
			strcpy(stdName, "720P-50");
			SYS_OUT("Setting Display Standard as <%s>\n", stdName);
			if (DVEVM_ST_SUCCESS != dvtb_writeSysFs(SYSFS_STANDARD, stdName))
			{
				SYS_ERROR("Unable to set the Display Standard as <%s> in the driver.\n", stdName);
				dvtb_close_vpbe(vpbe);
				return retval;
			}
			break;

		case DVEVM_ST_1080I_30:
			strcpy(stdName, "1080I-30");
			SYS_OUT("Setting Display Standard as <%s>\n", stdName);
			if (DVEVM_ST_SUCCESS != dvtb_writeSysFs(SYSFS_STANDARD, stdName))
			{
				SYS_ERROR("Unable to set the Display Standard as <%s> in the driver.\n", stdName);
				dvtb_close_vpbe(vpbe);
				return retval;
			}
			break;

		case DVEVM_ST_1080I_25:
			strcpy(stdName, "1080I-25");
			SYS_OUT("Setting Display Standard as <%s>\n", stdName);
			if (DVEVM_ST_SUCCESS != dvtb_writeSysFs(SYSFS_STANDARD, stdName))
			{
				SYS_ERROR("Unable to set the Display Standard as <%s> in the driver.\n", stdName);
				dvtb_close_vpbe(vpbe);
				return retval;
			}
			break;

		case DVEVM_ST_480P_60:
			strcpy(stdName, "480P-60");
			SYS_OUT("Setting Display Standard as <%s>\n", stdName);
			if (DVEVM_ST_SUCCESS != dvtb_writeSysFs(SYSFS_STANDARD, stdName))
			{
				SYS_ERROR("Unable to set the Display Standard as <%s> in the driver.\n", stdName);
				dvtb_close_vpbe(vpbe);
				return retval;
			}
			break;

		case DVEVM_ST_576P_50:
			strcpy(stdName, "576P-50");
			SYS_OUT("Setting Display Standard as <%s>\n", stdName);
			if (DVEVM_ST_SUCCESS != dvtb_writeSysFs(SYSFS_STANDARD, stdName))
			{
				SYS_ERROR("Unable to set the Display Standard as <%s> in the driver.\n", stdName);
				dvtb_close_vpbe(vpbe);
				return retval;
			}
			break;

		default:
			SYS_ERROR("Invalid Display Std. Setting it to NTSC by default.\n");
			strcpy(stdName, "NTSC");
			SYS_OUT("Setting Display Standard as <%s>\n", stdName);
			if (DVEVM_ST_SUCCESS != dvtb_writeSysFs(SYSFS_STANDARD, stdName))
			{
				SYS_ERROR("Unable to set the Display Standard as <%s> in the driver.\n", stdName);
				dvtb_close_vpbe(vpbe);
				return retval;
			}
			break;
	}
	return DVEVM_ST_SUCCESS;
}
Esempio n. 30
0
void
dvtb_SphEnc1Capture(DvevmStThreadInfo *T)
{
	int nBytes = DVEVM_ST_FAIL, appBuffSize, appBuffFilled, bytesToCapture = 0,
		count = 0, frames = 0, inBuffSize = 0, bytesToRead = 0, encDuration = 0;

	long rem_size = 0, setpos = 0;
	unsigned int size = 0;

	char *buff = NULL, *appBuff = NULL, *appBuffPtr = NULL, *inBuff = NULL;

	DvevmStRetCode retval = DVEVM_ST_FAIL;
	DvevmStBool usecaseFailed = DVEVM_ST_FALSE, audioDeviceOpened =	DVEVM_ST_FALSE, fileBased = DVEVM_ST_TRUE;

	T->g.senc1.sencParams.tablesPtr = NULL;
	appBuffPtr = appBuff;

	if (!T->targetFp)
	{
		SYS_ERROR("Target file cannot be opened. It is a mandatory parameter\n");
		dvtb_fileClose(&T->sourceFp, &T->targetFp);
		dvtb_freeThr(T);
		dvtb_threadExit();
	}

	if(!T->sourceFp)
		fileBased = DVEVM_ST_FALSE;

	dvtb_startMessage(DVEVM_ST_ENCODE, "Speech", T->useAlgorithm, fileBased);
	dvtb_sphEnc1InitPtrs(&T->g.senc1);

	// [Capture][Encode] scenario
	do
	{
		if (T->useAlgorithm)
		{
			appBuffFilled = 0;
			appBuffSize = (SPHENC1_FRAME_SIZE * 4) * 5;

			if (DVEVM_ST_FAIL == dvtb_allocSingleBufCmem(&appBuff, appBuffSize))
			{
				SYS_ERROR("Unable to allocate memory for appBuffer \n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			appBuffPtr = appBuff;
			T->g.aud.sampleRate = 8000;

			if(DVEVM_ST_FAIL == dvtb_sphEnc1HanInit(&T->g.senc1, T->g.eng.name))
			{
				SYS_ERROR("Unable to initialize Speech Encoder Handle \n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			SYS_OUT("Speech Encoder <%s> initialized\n", T->g.senc1.sencName);
		}
		else
		{
			appBuffFilled = 0;
			appBuffSize = (SPHENC1_FRAME_SIZE * 4) * 10;

			if (DVEVM_ST_FAIL == dvtb_allocSingleBufCmem(&appBuff, appBuffSize))
			{
				SYS_ERROR("Unable to allocate memory for appBuffer \n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}
			appBuffPtr = appBuff;
		}

		if (DVEVM_ST_FALSE == fileBased)
		{
			T->g.aud.mode = DVEVM_ST_CAPTURE;
			retval = dvtb_audioSetup(&T->g.aud);

			if (DVEVM_ST_FAIL == retval)
			{
				SYS_ERROR("Unable to initialize Audio device <%s> for capture\n", T->g.aud.device);
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			audioDeviceOpened = DVEVM_ST_TRUE;

			SYS_OUT("Audio Device <%s> initialized for capture\n", T->g.aud.device);

			bytesToCapture = (T->g.senc1.seconds) * (AUD_FRAME_SIZE(T->g.aud.sampleRate, T->g.aud.numChannels, T->g.aud.sampleSize));
		}
		else
		{
			if (!(strcmp(T->g.senc1.sencName, "g711enc")))
			{
				if(DVEVM_ST_FAIL == dvtb_getFileSize(T->sourceFp, &size))
				{
					SYS_ERROR("Unable to Get File Size \n");
					usecaseFailed = DVEVM_ST_TRUE;
					break;
				}

				rem_size = size % (T->g.senc1.inBuf.size);
			}
		}

		inBuffSize = dvtb_sphEnc1GetInputBuffSize(&T->g.senc1, fileBased);
		if (DVEVM_ST_FAIL == dvtb_allocSingleBufCmem(&inBuff, inBuffSize))
		{
			SYS_ERROR("Unable to allocate memory for Input Buffer \n");
			usecaseFailed = DVEVM_ST_TRUE;
			break;
		}

		bytesToRead = dvtb_sphEnc1BytesToRead(&T->g.senc1, fileBased);

		while (1)
		{
			buff = inBuff;

			frames++;

			if (DVEVM_ST_FAIL == dvtb_sphEnc1ReadInput(&T->g.aud, buff, T->sourceFp, bytesToRead, fileBased, &nBytes))
			{
				SYS_ERROR("Read Input fails \n");
				usecaseFailed = DVEVM_ST_TRUE;
				break;
			}

			if (DVEVM_ST_TRUE == fileBased)
			{
				if (0 == nBytes || feof(T->sourceFp))
				{
					SYS_OUT("End of source file reached. Exiting ...\n");
					break;
				}
			}
			else
			{
				if (count >= bytesToCapture)
				{
					break;
				}

				count += SPHENC1_FRAME_SIZE * 4;
			}

			if (T->useAlgorithm)
			{
				if (DVEVM_ST_FAIL == dvtb_sphEnc1EncProcess(&T->g.senc1, buff, fileBased, &encDuration))
				{
					SYS_ERROR("Encode Process fails \n");
					usecaseFailed = DVEVM_ST_TRUE;
					break;
				}

				buff = T->g.senc1.outBuf.start;

#ifdef PERF
				SYS_PERF("Thread, %x, Frame#, %d, Frame Size(bytes), %d, EncTime(us), %d\n", (unsigned int) T, frames, T->g.senc1.outBuf.size, encDuration);
#endif
			}
			else
				T->g.senc1.outBuf.size = SPHENC1_FRAME_SIZE * 4;

			if (!(strcmp(T->g.senc1.sencName, "ilbcenc")))
			{
				if (T->g.senc1.sencParams.codecSelection == 0)
					fwrite(buff, 1, PRM_SIZE_20, T->targetFp);
				else
					fwrite(buff, 1, PRM_SIZE_30, T->targetFp);
			}
			else if (!(strcmp(T->g.senc1.sencName, "g722enc")))
				fwrite(buff, 1, (T->g.senc1.outBuf.size) * 2, T->targetFp);
			else
			{
				memcpy(appBuffPtr, buff, T->g.senc1.outBuf.size);
				appBuffPtr += T->g.senc1.outBuf.size;
				appBuffFilled += T->g.senc1.outBuf.size;

				if (appBuffFilled == appBuffSize)
				{
					/* Store the processed frame to target file */
					if(DVEVM_ST_FAIL ==  dvtb_fileWrite(T->targetFp, (char *)appBuff, appBuffSize))
					{
						SYS_ERROR("Error in writing to file.\n");
						usecaseFailed = DVEVM_ST_TRUE;
						break;
					}

					appBuffPtr = appBuff;
					appBuffFilled = 0;
				}
			}
		}

		if (DVEVM_ST_TRUE == usecaseFailed)// Proceed only when use case is successful
			break;

		if (!(strcmp(T->g.senc1.sencName, "g711enc")))
		{
			if (DVEVM_ST_TRUE == fileBased)
			{
				if (rem_size != 0)
				{
					SYS_OUT("ENTERED rem_size %ld\n", rem_size);
					setpos = size - rem_size;
					buff = inBuff;

					fseek(T->sourceFp, setpos, SEEK_SET);

					if(DVEVM_ST_FAIL == dvtb_sphEnc1ReadInput(NULL, buff, T->sourceFp, rem_size, DVEVM_ST_TRUE, &nBytes))
					{
						SYS_ERROR("Read Input fails \n");
						usecaseFailed = DVEVM_ST_TRUE;
						break;
					}

					if(DVEVM_ST_FAIL == dvtb_inputFileToProcessbuff((char *) buff, (char *) T->g.senc1.inBuf.start, rem_size))
					{
						SYS_ERROR("Unable to copy input file to process buff  \n");
						usecaseFailed = DVEVM_ST_TRUE;
						break;
					}

					memset(T->g.senc1.outBuf.start, 0, rem_size);

					if (DVEVM_ST_FAIL == dvtb_sphEnc1Encode(&T->g.senc1, &encDuration))
					{
						SYS_ERROR("Unable to encode the input2 buffer\n");
						usecaseFailed = DVEVM_ST_TRUE;
						break;
					}

#ifdef PERF
					SYS_PERF("Thread, %x, Frame#, %d, Frame Size(bytes), %d, EncTime(us), %d\n", (unsigned int) T, frames, T->g.senc1.outBuf.size, encDuration);
#endif

					buff = T->g.senc1.outBuf.start;
					memcpy(appBuffPtr, buff, (rem_size / 2));
					appBuffPtr += T->g.senc1.outBuf.size;
					appBuffFilled += (rem_size / 2);

					/* Store the processed frame to target file */
					if(DVEVM_ST_FAIL ==  dvtb_fileWrite(T->targetFp, (char *)appBuff, appBuffFilled))
					{
						SYS_ERROR("Error in writing to file.\n");
						usecaseFailed = DVEVM_ST_TRUE;
						break;
					}

					appBuffPtr = appBuff;
					appBuffFilled = 0;
				}
			}
		}
	} while (DVEVM_ST_FALSE); // Dummy loop

	dvtb_sphEnc1ProcessEndMessage(T->useAlgorithm, fileBased, T->g.senc1.seconds);

	if (DVEVM_ST_TRUE == audioDeviceOpened)
	{
		dvtb_audioClose(&T->g.aud);
		SYS_OUT("Audio Device <%s> closed\n", T->g.aud.device);
	}

	if (T->useAlgorithm)
		dvtb_sphEnc1HanClose(&T->g.senc1, T->g.eng.name);

	if(NULL != appBuff)
		dvtb_freeSingleBufCmem(appBuff, appBuffSize);

	if(NULL != inBuff)
		dvtb_freeSingleBufCmem(inBuff, inBuffSize);


	dvtb_fileClose(&T->sourceFp, &T->targetFp);

	dvtb_exitMessage(DVEVM_ST_ENCODE, "Speech", T->useAlgorithm, fileBased, usecaseFailed);

	dvtb_freeThr(T);
	dvtb_threadExit();
}