/****************************************************************************** * captureThrFxn ******************************************************************************/ Void *captureThrFxn(Void *arg) { CaptureEnv *envp = (CaptureEnv *) arg; Void *status = THREAD_SUCCESS; Capture_Attrs cAttrs = Capture_Attrs_DM365_DEFAULT; Display_Attrs dAttrs = Display_Attrs_DM365_VID_DEFAULT; BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT; Capture_Handle hCapture = NULL; Display_Handle hDisplay = NULL; BufTab_Handle hBufTab = NULL; BufTab_Handle hDispBufTab = NULL; BufTab_Handle hFifoBufTab = NULL; Buffer_Handle hDstBuf, hCapBuf, hDisBuf, hBuf; BufferGfx_Dimensions capDim; VideoStd_Type videoStd; Int32 width, height, bufSize; Int fifoRet; ColorSpace_Type colorSpace = ColorSpace_YUV420PSEMI; Int bufIdx; Int numCapBufs; /* Create capture device driver instance */ cAttrs.numBufs = NUM_CAPTURE_BUFS; cAttrs.videoInput = envp->videoInput; cAttrs.videoStd = envp->videoStd; cAttrs.colorSpace = colorSpace; videoStd = envp->videoStd; /* We only support D1, 720P and 1080P input */ if (videoStd != VideoStd_D1_NTSC && videoStd != VideoStd_D1_PAL && videoStd != VideoStd_720P_60 && videoStd != VideoStd_720P_50 && videoStd != VideoStd_1080I_30) { ERR("Need D1/720P/1080P input to this demo\n"); cleanup(THREAD_FAILURE); } if (envp->imageWidth > 0 && envp->imageHeight > 0) { if (VideoStd_getResolution(videoStd, &width, &height) < 0) { ERR("Failed to calculate resolution of video standard\n"); cleanup(THREAD_FAILURE); } if (width < envp->imageWidth && height < envp->imageHeight) { ERR("User resolution (%ldx%ld) larger than detected (%ldx%ld)\n", envp->imageWidth, envp->imageHeight, width, height); cleanup(THREAD_FAILURE); } /* * Capture driver provides 32 byte aligned data. We 32 byte align the * capture and video buffers to perform zero copy encoding. */ envp->imageWidth = Dmai_roundUp(envp->imageWidth,32); capDim.x = 0; capDim.y = 0; capDim.height = envp->imageHeight; capDim.width = envp->imageWidth; capDim.lineLength = BufferGfx_calcLineLength(capDim.width, colorSpace); } else { /* Calculate the dimensions of a video standard given a color space */ if (BufferGfx_calcDimensions(videoStd, colorSpace, &capDim) < 0) { ERR("Failed to calculate Buffer dimensions\n"); cleanup(THREAD_FAILURE); } /* * Capture driver provides 32 byte aligned data. We 32 byte align the * capture and video buffers to perform zero copy encoding. */ capDim.width = Dmai_roundUp(capDim.width,32); envp->imageWidth = capDim.width; envp->imageHeight = capDim.height; } numCapBufs = NUM_CAPTURE_BUFS; gfxAttrs.dim.height = capDim.height; gfxAttrs.dim.width = capDim.width; gfxAttrs.dim.lineLength = Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width, colorSpace), 32); gfxAttrs.dim.x = 0; gfxAttrs.dim.y = 0; if (colorSpace == ColorSpace_YUV420PSEMI) { bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2; } else { bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2; } /* Create a table of buffers to use with the capture driver */ gfxAttrs.colorSpace = colorSpace; hBufTab = BufTab_create(numCapBufs, bufSize, BufferGfx_getBufferAttrs(&gfxAttrs)); if (hBufTab == NULL) { ERR("Failed to create buftab\n"); cleanup(THREAD_FAILURE); } /* Create a table of buffers to use to prime Fifo to video thread */ hFifoBufTab = BufTab_create(VIDEO_PIPE_SIZE, bufSize, BufferGfx_getBufferAttrs(&gfxAttrs)); if (hFifoBufTab == NULL) { ERR("Failed to create buftab\n"); cleanup(THREAD_FAILURE); } /* Update global data for user interface */ gblSetImageWidth(envp->imageWidth); gblSetImageHeight(envp->imageHeight); /* Report the video standard and image size back to the main thread */ Rendezvous_meet(envp->hRendezvousCapStd); /* Capture at half frame rate if using COMPONENT input at 720P */ if ((envp->videoStd == VideoStd_720P_60) && (envp->videoInput == Capture_Input_COMPONENT)) { cAttrs.videoStd = VideoStd_720P_30; } else { cAttrs.videoStd = envp->videoStd; } /*If its component input and video std is 1080I_30 then make it 1080I_60.*/ if (cAttrs.videoStd == VideoStd_1080I_30 && cAttrs.videoInput == Capture_Input_COMPONENT) { cAttrs.videoStd = VideoStd_1080I_60; } cAttrs.numBufs = NUM_CAPTURE_BUFS; cAttrs.colorSpace = colorSpace; cAttrs.captureDimension = &gfxAttrs.dim; /* Create the capture device driver instance */ hCapture = Capture_create(hBufTab, &cAttrs); if (hCapture == NULL) { ERR("Failed to create capture device. Is video input connected?\n"); cleanup(THREAD_FAILURE); } /* Create a table of buffers to use with the display driver */ hDispBufTab = BufTab_create(NUM_DISPLAY_BUFS, bufSize, BufferGfx_getBufferAttrs(&gfxAttrs)); if (hDispBufTab == NULL) { ERR("Failed to create buftab\n"); cleanup(THREAD_FAILURE); } /* Create display device driver instance if preview is needed */ if (!envp->previewDisabled) { dAttrs.videoStd = envp->videoStd; if ( (dAttrs.videoStd == VideoStd_CIF) || (dAttrs.videoStd == VideoStd_SIF_NTSC) || (dAttrs.videoStd == VideoStd_SIF_PAL) || (dAttrs.videoStd == VideoStd_VGA) || (dAttrs.videoStd == VideoStd_D1_NTSC) || (dAttrs.videoStd == VideoStd_D1_PAL) ) { dAttrs.videoOutput = Display_Output_COMPOSITE; } else { dAttrs.videoOutput = Display_Output_COMPONENT; } dAttrs.numBufs = NUM_DISPLAY_BUFS; dAttrs.colorSpace = colorSpace; dAttrs.width = capDim.width; dAttrs.height = capDim.height; hDisplay = Display_create(hDispBufTab, &dAttrs); if (hDisplay == NULL) { ERR("Failed to create display device\n"); cleanup(THREAD_FAILURE); } } for (bufIdx = 0; bufIdx < VIDEO_PIPE_SIZE; bufIdx++) { /* Queue the video buffers for main thread processing */ hBuf = BufTab_getFreeBuf(hFifoBufTab); if (hBuf == NULL) { ERR("Failed to fill video pipeline\n"); cleanup(THREAD_FAILURE); } /* Fill with black the buffer */ CapBuf_blackFill(hBuf); /* Send buffer to video thread for encoding */ if (Fifo_put(envp->hOutFifo, hBuf) < 0) { ERR("Failed to send buffer to display thread\n"); cleanup(THREAD_FAILURE); } } /* Signal that initialization is done and wait for other threads */ Rendezvous_meet(envp->hRendezvousInit); while (!gblGetQuit()) { /* Pause processing? */ Pause_test(envp->hPauseProcess); /* Capture a frame */ if (Capture_get(hCapture, &hCapBuf) < 0) { ERR("Failed to get capture buffer\n"); cleanup(THREAD_FAILURE); } /* Get a buffer from the display device */ if ((!envp->previewDisabled) && (Display_get(hDisplay, &hDisBuf) < 0)) { ERR("Failed to get display buffer\n"); cleanup(THREAD_FAILURE); } /* Send buffer to video thread for encoding */ if (Fifo_put(envp->hOutFifo, hCapBuf) < 0) { ERR("Failed to send buffer to display thread\n"); cleanup(THREAD_FAILURE); } /* Get a buffer from the video thread */ fifoRet = Fifo_get(envp->hInFifo, &hDstBuf); if (fifoRet < 0) { ERR("Failed to get buffer from video thread\n"); cleanup(THREAD_FAILURE); } /* Did the video thread flush the fifo? */ if (fifoRet == Dmai_EFLUSH) { cleanup(THREAD_SUCCESS); } if (!envp->previewDisabled) { /* Release buffer to the display device driver */ if (Display_put(hDisplay, hDstBuf) < 0) { ERR("Failed to put display buffer\n"); cleanup(THREAD_FAILURE); } } if (envp->previewDisabled) { /* Return the processed buffer to the capture driver */ if (Capture_put(hCapture, hDstBuf) < 0) { ERR("Failed to put capture buffer\n"); cleanup(THREAD_FAILURE); } } else { /* Return the displayed buffer to the capture driver */ if (Capture_put(hCapture, hDisBuf) < 0) { ERR("Failed to put capture buffer\n"); cleanup(THREAD_FAILURE); } } /* Increment statistics for the user interface */ gblIncFrames(); } cleanup: /* Make sure the other threads aren't waiting for us */ Rendezvous_force(envp->hRendezvousCapStd); Rendezvous_force(envp->hRendezvousInit); Pause_off(envp->hPauseProcess); Fifo_flush(envp->hOutFifo); /* Meet up with other threads before cleaning up */ Rendezvous_meet(envp->hRendezvousCleanup); if (hDisplay) { Display_delete(hDisplay); } if (hCapture) { Capture_delete(hCapture); } /* Clean up the thread before exiting */ if (hBufTab) { BufTab_delete(hBufTab); } if (hFifoBufTab) { BufTab_delete(hFifoBufTab); } if (hDispBufTab) { BufTab_delete(hDispBufTab); } return status; }
Void *videoThrFxn(Void *arg) { VideoEnv *envp = (VideoEnv *) arg; Venc1_Handle hVe1 = NULL; VIDENC1_Params params = Venc1_Params_DEFAULT; VIDENC1_DynamicParams dynParams = Venc1_DynamicParams_DEFAULT; IH264VENC_Params h264Params = IH264VENC_PARAMS; IH264VENC_DynamicParams h264DynParams = H264VENC_TI_IH264VENC_DYNAMICPARAMS; VUIParamBuffer VUI_Buffer = H264VENC_TI_VUIPARAMBUFFER; BufTab_Handle hVidBufTab = NULL; Buffer_Handle hVInBuf, hWOutBuf; BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT; ColorSpace_Type colorSpace = ColorSpace_YUV420PSEMI; Int bufSize = 0; Void *status = THREAD_SUCCESS; /* Initialization */ params.maxWidth = envp->imageWidth; params.maxHeight = envp->imageHeight; params.inputChromaFormat = XDM_YUV_420SP; params.reconChromaFormat = XDM_YUV_420SP; params.maxFrameRate = envp->videoFrameRate; params.encodingPreset = XDM_USER_DEFINED; params.rateControlPreset = IVIDEO_USER_DEFINED; params.maxBitRate = 10000000; dynParams.targetBitRate = envp->videoBitRate*0.9; dynParams.inputWidth = envp->imageWidth; dynParams.captureWidth = Dmai_roundUp(BufferGfx_calcLineLength(envp->imageWidth, colorSpace), 32); dynParams.inputHeight = envp->imageHeight; dynParams.refFrameRate = params.maxFrameRate; dynParams.targetFrameRate = params.maxFrameRate; dynParams.intraFrameInterval = 0; dynParams.interFrameInterval = 0; h264Params.videncParams = params; h264Params.videncParams.size = sizeof(IH264VENC_Params); h264Params.encQuality = 1; h264Params.enableDDRbuff = 1; /* Uses DDR instead of VICP buffers */ h264Params.enableARM926Tcm = 0; h264Params.enableVUIparams = (0x1 << 1); h264Params.videncParams.inputContentType = IVIDEO_PROGRESSIVE; h264DynParams.videncDynamicParams = dynParams; h264DynParams.videncDynamicParams.size = sizeof(IH264VENC_DynamicParams); h264DynParams.VUI_Buffer = &VUI_Buffer; h264DynParams.VUI_Buffer->aspectRatioInfoPresentFlag = 1; h264DynParams.VUI_Buffer->overscanInfoPresentFlag = 0; h264DynParams.VUI_Buffer->videoSignalTypePresentFlag = 0; h264DynParams.VUI_Buffer->timingInfoPresentFlag = 1; h264DynParams.VUI_Buffer->numUnitsInTicks = 1; h264DynParams.VUI_Buffer->timeScale = params.maxFrameRate / 1000; h264DynParams.VUI_Buffer->fixedFrameRateFlag = 1; h264DynParams.VUI_Buffer->nalHrdParameterspresentFlag = 1; h264DynParams.VUI_Buffer->picStructPresentFlag = 1; h264DynParams.idrFrameInterval = 15; hVe1 = Venc1_create(envp->hEngine, envp->videoEncoder, (IVIDENC1_Params *) &h264Params, (IVIDENC1_DynamicParams *) &h264DynParams); if (hVe1 == NULL) { ERR("Failed to create video encoder: %s\n", envp->videoEncoder); cleanup(THREAD_FAILURE); } /* Store the output buffer size in the environment */ envp->outBufSize = Venc1_getOutBufSize(hVe1); /* Signal that the codec is created and output buffer size available */ Rendezvous_meet(envp->hRendezvousWriter); /* Video BufTab create */ BufferGfx_calcDimensions(VideoStd_D1_PAL, colorSpace, &gfxAttrs.dim); gfxAttrs.dim.width = 704; gfxAttrs.dim.height = 576; gfxAttrs.dim.lineLength = Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width, colorSpace), 32); gfxAttrs.colorSpace = colorSpace; bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2; hVidBufTab = BufTab_create(NUM_VIDEO_BUFS, bufSize, BufferGfx_getBufferAttrs(&gfxAttrs)); if (hVidBufTab == NULL) { ERR("Failed to create video buftab\n"); cleanup(THREAD_FAILURE); } /* Set input buffer table */ Venc1_setBufTab(hVe1, hVidBufTab); /* Send video buffers to DEI */ Int nBufId = 0; for (nBufId = 0; nBufId < NUM_VIDEO_BUFS; nBufId++) { hVInBuf = BufTab_getBuf(hVidBufTab, nBufId); if (Fifo_put(envp->hVideoOutFifo, hVInBuf) < 0) { ERR("Failed to send buffer to dei thread\n"); cleanup(THREAD_FAILURE); } } /* Signal that initialization is done and wait for other threads */ Rendezvous_meet(envp->hRendezvousInit); while(1) { /* Get buffer from DEI thread */ if(Fifo_get(envp->hVideoInFifo, &hVInBuf) < 0) { ERR("Failed to get buffer from dei thread\n"); cleanup(THREAD_FAILURE); } /* Get buffer from Writer thread */ if(Fifo_get(envp->hWriterOutFifo, &hWOutBuf) < 0) { ERR("Failed to get buffer from writer thread\n"); cleanup(THREAD_FAILURE); } /* Make sure the whole buffer is used for input */ BufferGfx_resetDimensions(hVInBuf); /* Encode */ if (Venc1_process(hVe1, hVInBuf, hWOutBuf) < 0) { ERR("Failed to encode video buffer\n"); cleanup(THREAD_FAILURE); } /* Put buffer to dei thread */ if (Fifo_put(envp->hVideoOutFifo, hVInBuf) < 0) { ERR("Failed to send buffer to dei thread\n"); cleanup(THREAD_FAILURE); } /* Put buffer to writer thread */ if (Fifo_put(envp->hWriterInFifo, hWOutBuf) < 0) { ERR("Failed to send buffer to dei thread\n"); cleanup(THREAD_FAILURE); } } cleanup: /* Make sure the other threads aren't waiting for us */ Rendezvous_force(envp->hRendezvousInit); Rendezvous_force(envp->hRendezvousWriter); /* Make sure the other threads aren't waiting for init to complete */ Rendezvous_meet(envp->hRendezvousCleanup); if (hVidBufTab) { BufTab_delete(hVidBufTab); } if (hVe1) { Venc1_delete(hVe1); } return status; }