/******************************************************************************
 * gst_ticircbuffer_broadcast_consumer
 *    Broadcast when consumer has processed some data
 ******************************************************************************/
static void gst_ticircbuffer_broadcast_consumer(GstTICircBuffer *circBuf)
{
    gboolean canUnblock = FALSE;

    /* If the write pointer is at the end of the buffer and the first window
     * is free, unblock so the queue thread can shift data to the beginning
     * and continue.
     */
    if (circBuf->contiguousData &&
        gst_ticircbuffer_first_window_free(circBuf)) {
            canUnblock = TRUE;
    }

    /* Otherwise, we can unblock if there is now enough space to queue the
     * next input buffer.
     */
    else if (gst_ticircbuffer_write_space(circBuf) >= circBuf->bytesNeeded) {
        canUnblock = TRUE;
    }

    if (canUnblock) {
        GST_LOG("broadcast_consumer: input unblocked\n");
        Rendezvous_force(circBuf->waitOnConsumer);
    }
}
Beispiel #2
0
void *thread_func_aew(void *arg)
{
        struct env_aew          *env = arg;
        void                    *status = THREAD_SUCCESS;
        struct image_buffer_description *image = NULL;

        log_enter();
        log_dbg("meet\n");
        Rendezvous_meet(env->rendezvous_init);

        while (!gblGetQuit()) {
                usleep(10000);

                if (fifo_get(env->fifo_aew, &image) == FIFO_FAILURE) {
                        breakLoop(THREAD_FAILURE);
                }

                if (image == NULL) { //阻塞FIFO,只有FIFO中有消息才会执行下去
                        breakLoop(THREAD_SUCCESS);
                }
  
                capture_buffer_put(image);
        }

        Rendezvous_force(env->rendezvous_init);
        Rendezvous_meet(env->rendezvous_deinit);
        log("exit aew thread\n");
        log_exit();

        return status;
}
/******************************************************************************
 * gst_ticircbuffer_consumer_aborted
 *    Consumer aborted - no longer block waiting on the consumer, and throw
 *    away all input buffers.
 ******************************************************************************/
void gst_ticircbuffer_consumer_aborted(GstTICircBuffer *circBuf)
{
    if (circBuf == NULL) {
        return;
    }

    circBuf->consumerAborted = TRUE;
    Rendezvous_force(circBuf->waitOnConsumer);
}
Beispiel #4
0
/******************************************************************************
 * gst_tiaudenc1_exit_audio
 *    Shut down any running audio encoder, and reset the element state.
 ******************************************************************************/
static gboolean gst_tiaudenc1_exit_audio(GstTIAudenc1 *audenc1)
{
    gboolean checkResult;
    void*    thread_ret;

    GST_LOG("begin exit_audio\n");

    /* Drain the pipeline if it hasn't already been drained */
    if (!audenc1->drainingEOS) {
       gst_tiaudenc1_drain_pipeline(audenc1);
     }

    /* Shut down the encode thread */
    if (gst_tithread_check_status(
            audenc1, TIThread_CODEC_CREATED, checkResult)) {
        GST_LOG("shutting down encode thread\n");

        Rendezvous_force(audenc1->waitOnEncodeThread);
        if (pthread_join(audenc1->encodeThread, &thread_ret) == 0) {
            if (thread_ret == GstTIThreadFailure) {
                GST_DEBUG("encode thread exited with an error condition\n");
            }
        }
    }

    /* Shut down thread status management */
    audenc1->threadStatus = 0UL;
    pthread_mutex_destroy(&audenc1->threadStatusMutex);

    /* Shut down remaining items */
    if (audenc1->waitOnEncodeDrain) {
        Rendezvous_delete(audenc1->waitOnEncodeDrain);
        audenc1->waitOnEncodeDrain = NULL;
    }

    if (audenc1->waitOnEncodeThread) {
        Rendezvous_delete(audenc1->waitOnEncodeThread);
        audenc1->waitOnEncodeThread = NULL;
    }

    GST_LOG("end exit_audio\n");
    return TRUE;
}
Beispiel #5
0
/******************************************************************************
 * gst_tiaudenc1_drain_pipeline
 *    Wait for the encode thread to finish processing queued input data.
 ******************************************************************************/
static void gst_tiaudenc1_drain_pipeline(GstTIAudenc1 *audenc1)
{
    gboolean checkResult;

    /* If the encode thread hasn't been created, there is nothing to drain. */
    if (!gst_tithread_check_status(
             audenc1, TIThread_CODEC_CREATED, checkResult)) {
        return;
    }

    audenc1->drainingEOS = TRUE;
    gst_ticircbuffer_drain(audenc1->circBuf, TRUE);

    /* Tell the encode thread that it is ok to shut down */
    Rendezvous_force(audenc1->waitOnEncodeThread);

    /* Wait for the encoder to finish draining */
    Rendezvous_meet(audenc1->waitOnEncodeDrain);

}
Beispiel #6
0
/******************************************************************************
 * captureThrFxn
 ******************************************************************************/
Void *captureThrFxn(Void *arg)
{
    CaptureEnv          *envp           = (CaptureEnv *) arg;
    Void                *status         = THREAD_SUCCESS;
    Capture_Attrs        cAttrs         = Capture_Attrs_DM365_DEFAULT;
    BufferGfx_Attrs      gfxAttrs       = BufferGfx_Attrs_DEFAULT;
    Capture_Handle       hCapture       = NULL;
    BufTab_Handle        hBufTab        = NULL;
    BufferGfx_Dimensions dim;
    Buffer_Handle        hDstBuf, hCapBuf;
    Int32                width, height, bufSize;
    Int                  fifoRet;
    ColorSpace_Type      colorSpace = ColorSpace_YUV420PSEMI;

    /* Create capture device driver instance */
    cAttrs.numBufs = NUM_CAPTURE_BUFS;
    cAttrs.videoInput = envp->videoInput;    
    cAttrs.videoStd   = envp->videoStd;
    cAttrs.colorSpace = colorSpace;

    if (VideoStd_getResolution(envp->videoStd, &width, &height) < 0) {
        ERR("Failed to calculate resolution of video standard\n");
        cleanup(THREAD_FAILURE);
    }

    if (envp->imageWidth > 0 && envp->imageHeight > 0) {
        if (width < envp->imageWidth && height < envp->imageHeight) {
            ERR("User resolution (%ldx%ld) larger than detected (%ldx%ld)\n",
                envp->imageWidth, envp->imageHeight, width, height);
            cleanup(THREAD_FAILURE);
        }

       /*
        * Capture driver provides 32-byte aligned data. We 32-byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        envp->imageWidth = Dmai_roundUp(envp->imageWidth, 32);
    }
    else {
        /* Resolution was not set on command line. Set to defaults. */
        envp->imageHeight = height;

       /*
        * Capture driver provides 32-byte aligned data. We 32-byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        envp->imageWidth  = Dmai_roundUp(width, 32); 
    }

    Dmai_clear(dim);
    dim.width      = envp->imageWidth;
    dim.height     = envp->imageHeight;

    dim.lineLength = Dmai_roundUp(dim.width, 32);
    if (colorSpace ==  ColorSpace_YUV420PSEMI) {
        bufSize = dim.lineLength * dim.height * 3 / 2;
    } else {
        bufSize = dim.lineLength * dim.height * 2;
    }
    gfxAttrs.dim = dim;

    /* Report the video standard and image size back to the main thread */
    Rendezvous_meet(envp->hRendezvousCapStd);

    gfxAttrs.colorSpace = colorSpace;
    hBufTab = BufTab_create(NUM_CAPTURE_BUFS, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    if ((envp->videoStd == VideoStd_720P_60) && (!envp->passThrough)) {
        cAttrs.videoStd = VideoStd_720P_30;
    }
    else {
        cAttrs.videoStd = envp->videoStd;    
    }
    cAttrs.colorSpace = colorSpace;
    cAttrs.captureDimension = &dim;
    cAttrs.numBufs = NUM_CAPTURE_BUFS;
    hCapture = Capture_create(hBufTab, &cAttrs);

    if (hCapture == NULL) {
        ERR("Failed to create capture device\n");
        cleanup(THREAD_FAILURE);
    }

    /* Get a buffer from the video thread */
    fifoRet = Fifo_get(envp->hInFifo, &hDstBuf);

    if (fifoRet < 0) {
        ERR("Failed to get buffer from video thread\n");
        cleanup(THREAD_FAILURE);
    }

    if (fifoRet == Dmai_EFLUSH) {
        cleanup(THREAD_SUCCESS);
    }

    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    while (!gblGetQuit()) {
        
        /* Get a buffer from the capture driver to encode */
        if (Capture_get(hCapture, &hCapBuf) < 0) {
            ERR("Failed to get capture buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Send captured buffer to video thread for encoding */
        if (Fifo_put(envp->hOutFifo, hCapBuf) < 0) {
            ERR("Failed to send buffer to video thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Get a buffer from the video thread */
        fifoRet = Fifo_get(envp->hInFifo, &hDstBuf);

        if (fifoRet < 0) {
            ERR("Failed to get buffer from video thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Did the video thread flush the fifo? */
        if (fifoRet == Dmai_EFLUSH) {
            cleanup(THREAD_SUCCESS);
        }

        /* Return a buffer to the capture driver */
        if (Capture_put(hCapture, hDstBuf) < 0) {
            ERR("Failed to put capture buffer\n");
            cleanup(THREAD_FAILURE);
        }
    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousCapStd);
    Rendezvous_force(envp->hRendezvousInit);
    Pause_off(envp->hPauseProcess);
    Fifo_flush(envp->hOutFifo);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    if (hCapture) {
        Capture_delete(hCapture);
    }

    /* Clean up the thread before exiting */
    if (hBufTab) {
        BufTab_delete(hBufTab);
    }

    return status;
}
Beispiel #7
0
/******************************************************************************
 * displayThrFxn
 ******************************************************************************/
Void *displayThrFxn(Void *arg)
{
    DisplayEnv             *envp       = (DisplayEnv *) arg;
    Display_Attrs           dAttrs     = Display_Attrs_DM365_VID_DEFAULT;
    Display_Handle          hDisplay   = NULL;
    Framecopy_Handle        hFc        = NULL;
    Void                   *status     = THREAD_SUCCESS;
    Uns                     frameCnt   = 0;
    BufferGfx_Dimensions    srcDim;
    Buffer_Handle           hSrcBuf, hDstBuf;
    Int                     fifoRet;
    ColorSpace_Type         colorSpace = ColorSpace_YUV420PSEMI;
    BufferGfx_Attrs         gfxAttrs = BufferGfx_Attrs_DEFAULT;
    BufTab_Handle           hBufTab  = NULL;
    Int32                   bufSize;
    Time_Attrs              tAttrs   = Time_Attrs_DEFAULT;
    Time_Handle             hTime    = NULL;
    Int32                   time, waitTime;
    Int                     bufCnt = 1;

    hTime = Time_create(&tAttrs);

    if (hTime == NULL) {
        ERR("Failed to create Time object\n");
        cleanup(THREAD_FAILURE);
    }

    if(Time_reset(hTime) != Dmai_EOK) {
        ERR("Failed to reset timer\n");
        cleanup(THREAD_FAILURE);
    }

    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    while (!gblGetQuit()) {
        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Pause for priming? */
        Pause_test(envp->hPausePrime);

        /* Get decoded video frame */
        fifoRet = Fifo_get(envp->hInFifo, &hSrcBuf);

        if (fifoRet < 0) {
            ERR("Failed to get buffer from video thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Did the video thread flush the fifo? */
        if (fifoRet == Dmai_EFLUSH) {
            cleanup(THREAD_SUCCESS);
        }
        
        BufferGfx_getDimensions(hSrcBuf, &srcDim);

        /* Prime the display driver with the first NUM_DISPLAY_BUFS buffers */
        if (bufCnt <= NUM_DISPLAY_BUFS) { 
            if (bufCnt == 1) {  // Create the Display at the first frame
                gfxAttrs.dim.width = srcDim.width;
                gfxAttrs.dim.height = srcDim.height;
                gfxAttrs.dim.lineLength = srcDim.lineLength;
                gfxAttrs.dim.x = srcDim.x;
                gfxAttrs.dim.y = srcDim.y;
                if (colorSpace ==  ColorSpace_YUV420PSEMI) {
                    bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 
                        3 / 2;
                } else {
                    bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2;
                }

                /* Create a table of buffers to use with the device drivers */
                gfxAttrs.colorSpace = colorSpace;
                hBufTab = BufTab_create(NUM_DISPLAY_BUFS, bufSize,
                    BufferGfx_getBufferAttrs(&gfxAttrs));
                if (hBufTab == NULL) {
                    ERR("Failed to create buftab\n");
                    cleanup(THREAD_FAILURE);
                }
	
                /* Create the display device instance */
                dAttrs.delayStreamon = TRUE;
                dAttrs.numBufs = NUM_DISPLAY_BUFS;
                dAttrs.videoStd = envp->videoStd;
                /* 
                 * Round down the width to a multiple of 32 as required by 
                 * display driver. Otherwise, the driver would internally round
                 * up the width, resulting in the codec padding showing up
                 * on the display when the image width is not a multiple of 32.
                 */
                dAttrs.width = ((gfxAttrs.dim.width & 0x1f) ?
                    (gfxAttrs.dim.width & ~(0x1f)) : gfxAttrs.dim.width);
                dAttrs.height = gfxAttrs.dim.height;
                dAttrs.videoOutput = envp->displayOutput;
                dAttrs.colorSpace  = colorSpace;
                hDisplay = Display_create(hBufTab, &dAttrs);

                if (hDisplay == NULL) {
                    ERR("Failed to create display device\n");
                    cleanup(THREAD_FAILURE);
                }
            }

            bufCnt++;
        }
        else {
            /* Get a buffer from the display device driver */
            if (Display_get(hDisplay, &hDstBuf) < 0) {
                ERR("Failed to get display buffer\n");
                cleanup(THREAD_FAILURE);
            }

            /* Send buffer back to the video thread */
            if (Fifo_put(envp->hOutFifo, hDstBuf) < 0) {
                ERR("Failed to send buffer to video thread\n");
                cleanup(THREAD_FAILURE);
            }
        }

        if (envp->videoStd == VideoStd_720P_60) {
            if (Time_delta(hTime, (UInt32*)&time) < 0) {
                ERR("Failed to get timer delta\n");
                cleanup(THREAD_FAILURE);
            }
            waitTime = DISPLAYLOOPLATENCY - time;
            if(waitTime > 0) {
                usleep(waitTime);
            }
            if(Time_reset(hTime) != Dmai_EOK) {
                ERR("Failed to reset timer\n");
                cleanup(THREAD_FAILURE);
            }
        }

        /* Incremement statistics for the user interface */
        gblIncFrames();           

        /* Give a filled buffer back to the display device driver */
        if (Display_put(hDisplay, hSrcBuf) < 0) {
            ERR("Failed to put display buffer\n");
            cleanup(THREAD_FAILURE);
        }

        frameCnt++;
    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousInit);
    Pause_off(envp->hPauseProcess);
    Pause_off(envp->hPausePrime);
    Fifo_flush(envp->hOutFifo);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    /* Clean up the thread before exiting */
    if (hFc) {
        Framecopy_delete(hFc);
    }

    if (hDisplay) {
        Display_delete(hDisplay);
    }

    /* Clean up the thread before exiting */
    if (hBufTab) {
        BufTab_delete(hBufTab);
    }

    if(hTime) {
        Time_delete(hTime);
    }

    return status;
}
Beispiel #8
0
/******************************************************************************
 * speechThrFxn
 ******************************************************************************/
Void *speechThrFxn(Void *arg)
{
    SpeechEnv              *envp                = (SpeechEnv *) arg;
    SPHDEC1_Params          defaultParams       = Sdec1_Params_DEFAULT;
    SPHDEC1_DynamicParams   defaultDynParams    = Sdec1_DynamicParams_DEFAULT;
    Void                   *status              = THREAD_SUCCESS;
    Sound_Attrs             sAttrs              = Sound_Attrs_MONO_DEFAULT;
    Loader_Attrs            lAttrs              = Loader_Attrs_DEFAULT;
    Buffer_Attrs            bAttrs              = Buffer_Attrs_DEFAULT;
    Sdec1_Handle            hSd1                = NULL;
    Sound_Handle            hSound              = NULL;
    Loader_Handle           hLoader             = NULL;
    Engine_Handle           hEngine             = NULL;
    Buffer_Handle           hOutBuf             = NULL;
    SPHDEC1_Params         *params;
    SPHDEC1_DynamicParams  *dynParams;
    Buffer_Handle           hInBuf;

    /* Open the codec engine */
    hEngine = Engine_open(envp->engineName, NULL, NULL);

    if (hEngine == NULL) {
        ERR("Failed to open codec engine %s\n", envp->engineName);
        cleanup(THREAD_FAILURE);
    }

    /* Create the sound device */
    sAttrs.sampleRate = 8000;
    sAttrs.mode = Sound_Mode_OUTPUT;
    sAttrs.leftGain  = 127;
    sAttrs.rightGain = 127;
    sAttrs.bufSize   = 128;
    hSound = Sound_create(&sAttrs);

    if (hSound == NULL) {
        ERR("Failed to create audio device\n");
        cleanup(THREAD_FAILURE);
    }

    /* Set the sample rate for the user interface */
    gblSetSamplingFrequency(sAttrs.sampleRate);

    /* Use supplied params if any, otherwise use defaults */
    params = envp->params ? envp->params : &defaultParams;
    dynParams = envp->dynParams ? envp->dynParams : &defaultDynParams;

    /* Create the speech decoder */
    hSd1 = Sdec1_create(hEngine, envp->speechDecoder, params, dynParams);

    if (hSd1 == NULL) {
        ERR("Failed to create speech decoder: %s\n", envp->speechDecoder);
        cleanup(THREAD_FAILURE);
    }

    /*
     * Make the output buffer size twice the size of what the codec needs
     * as the codec needs mono and the Sound module converts the decoded
     * mono samples to stereo before writing to the device driver.
     */
    hOutBuf = Buffer_create(OUTBUFSIZE, &bAttrs);

    if (hOutBuf == NULL) {
        ERR("Failed to allocate output buffer\n");
        cleanup(THREAD_FAILURE);
    }

    /* How much encoded data to feed the codec each process call */
    lAttrs.readSize = INBUFSIZE;

    /* Make the total ring buffer larger */
    lAttrs.readBufSize = lAttrs.readSize * 512;

    /* Create the file loader for reading encoded data */
    hLoader = Loader_create(envp->speechFile, &lAttrs);

    if (hLoader == NULL) {
        ERR("Failed to create loader\n");
        cleanup(THREAD_FAILURE);
    }

    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    /* Prime the file loader */
    Loader_prime(hLoader, &hInBuf);

    while (!gblGetQuit()) {
        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Decode the audio buffer */
        if (Sdec1_process(hSd1, hInBuf, hOutBuf) < 0) {
            ERR("Failed to decode audio buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Increment statistics for user interface */
        gblIncSoundBytesProcessed(Buffer_getNumBytesUsed(hInBuf));

        /*
         * Force the output buffer size since we are forcing the size of the
         * output buffer allocated as opposed to asking the codec for a size.
         */
        Buffer_setNumBytesUsed(hOutBuf, OUTBUFSIZE);

        /* Write the decoded samples to the sound device */
        if (Sound_write(hSound, hOutBuf) < 0) {
            ERR("Failed to write audio buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Load a new frame from the file system */
        if (Loader_getFrame(hLoader, hInBuf) < 0) {
            ERR("Failed to read a frame of encoded data\n");
            cleanup(THREAD_FAILURE);
        }

        /* Check if the clip has ended */
        if (Buffer_getUserPtr(hInBuf) == NULL) {
            /* Wait for the video clip to finish, if applicable */
            Rendezvous_meet(envp->hRendezvousLoop);

            /* If we are to loop the clip, start over */
            if (envp->loop) {
                /* Recreate the speech codec */
                Sdec1_delete(hSd1);
                hSd1 = Sdec1_create(hEngine, envp->speechDecoder,
                                    params, dynParams);

                if (hSd1 == NULL) {
                    ERR("Failed to create speech decoder: %s\n",
                        envp->speechDecoder);
                    cleanup(THREAD_FAILURE);
                }

                /* Re-prime the file loader */
                Loader_prime(hLoader, &hInBuf);
            }
            else {
                printf("End of clip reached, exiting..\n");
                cleanup(THREAD_SUCCESS);
            }
        }
    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousInit);
    Rendezvous_force(envp->hRendezvousLoop);
    Pause_off(envp->hPauseProcess);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    /* Clean up the thread before exiting */
    if (hLoader) {
        Loader_delete(hLoader);
    }

    if (hSd1) {
        Sdec1_delete(hSd1);
    }

    if (hSound) {
        Sound_delete(hSound);
    }

    if (hOutBuf) {
        Buffer_delete(hOutBuf);
    }

    if (hEngine) {
        Engine_close(hEngine);
    }

    return status;
}
Beispiel #9
0
/******************************************************************************
 * captureThrFxn
 ******************************************************************************/
Void *captureThrFxn(Void *arg)
{
    CaptureEnv           *envp     = (CaptureEnv *) arg;
    Void                 *status   = THREAD_SUCCESS;
    Capture_Attrs         cAttrs   = Capture_Attrs_DM365_DEFAULT;
    Display_Attrs         dAttrs   = Display_Attrs_DM365_VID_DEFAULT;
    BufferGfx_Attrs       gfxAttrs = BufferGfx_Attrs_DEFAULT;    
    Capture_Handle        hCapture = NULL;
    Display_Handle        hDisplay = NULL;
    BufTab_Handle         hBufTab  = NULL;
    BufTab_Handle         hDispBufTab = NULL;
    BufTab_Handle         hFifoBufTab = NULL;
    Buffer_Handle         hDstBuf, hCapBuf, hDisBuf, hBuf;
    BufferGfx_Dimensions  capDim;
    VideoStd_Type         videoStd;
    Int32                 width, height, bufSize;
    Int                   fifoRet;
    ColorSpace_Type       colorSpace = ColorSpace_YUV420PSEMI;
    Int                   bufIdx;
    Int                   numCapBufs;

    /* Create capture device driver instance */
    cAttrs.numBufs = NUM_CAPTURE_BUFS;
    cAttrs.videoInput = envp->videoInput;
    cAttrs.videoStd = envp->videoStd;
    cAttrs.colorSpace = colorSpace;

    videoStd = envp->videoStd;

    /* We only support D1, 720P and 1080P input */
    if (videoStd != VideoStd_D1_NTSC && videoStd != VideoStd_D1_PAL 
        && videoStd != VideoStd_720P_60 && videoStd != VideoStd_720P_50 &&
        videoStd != VideoStd_1080I_30) {
        ERR("Need D1/720P/1080P input to this demo\n");
        cleanup(THREAD_FAILURE);
    }
    if (envp->imageWidth > 0 && envp->imageHeight > 0) {
        if (VideoStd_getResolution(videoStd, &width, &height) < 0) {
            ERR("Failed to calculate resolution of video standard\n");
            cleanup(THREAD_FAILURE);
        }

        if (width < envp->imageWidth && height < envp->imageHeight) {
            ERR("User resolution (%ldx%ld) larger than detected (%ldx%ld)\n",
                envp->imageWidth, envp->imageHeight, width, height);
            cleanup(THREAD_FAILURE);
        }

       /*
        * Capture driver provides 32 byte aligned data. We 32 byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        envp->imageWidth  = Dmai_roundUp(envp->imageWidth,32);
        capDim.x          = 0;
        capDim.y          = 0;
        capDim.height     = envp->imageHeight;
        capDim.width      = envp->imageWidth;
        capDim.lineLength = BufferGfx_calcLineLength(capDim.width, colorSpace);
    } 
    else {
        /* Calculate the dimensions of a video standard given a color space */
        if (BufferGfx_calcDimensions(videoStd, colorSpace, &capDim) < 0) {
            ERR("Failed to calculate Buffer dimensions\n");
            cleanup(THREAD_FAILURE);
        }

       /*
        * Capture driver provides 32 byte aligned data. We 32 byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        capDim.width      = Dmai_roundUp(capDim.width,32);
        envp->imageWidth  = capDim.width;
        envp->imageHeight = capDim.height;
    }

    numCapBufs = NUM_CAPTURE_BUFS;

    gfxAttrs.dim.height = capDim.height;
    gfxAttrs.dim.width = capDim.width;
    gfxAttrs.dim.lineLength = 
        Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width,
                     colorSpace), 32);
    gfxAttrs.dim.x = 0;
    gfxAttrs.dim.y = 0;
    if (colorSpace ==  ColorSpace_YUV420PSEMI) {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2;
    } 
    else {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2;
    }

    /* Create a table of buffers to use with the capture driver */
    gfxAttrs.colorSpace = colorSpace;
    hBufTab = BufTab_create(numCapBufs, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    /* Create a table of buffers to use to prime Fifo to video thread */
    hFifoBufTab = BufTab_create(VIDEO_PIPE_SIZE, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hFifoBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    /* Update global data for user interface */
    gblSetImageWidth(envp->imageWidth);
    gblSetImageHeight(envp->imageHeight);

    /* Report the video standard and image size back to the main thread */
    Rendezvous_meet(envp->hRendezvousCapStd);

    /* Capture at half frame rate if using COMPONENT input at 720P */
    if ((envp->videoStd == VideoStd_720P_60) 
        && (envp->videoInput == Capture_Input_COMPONENT)) {
        cAttrs.videoStd = VideoStd_720P_30;
    }
    else {
        cAttrs.videoStd = envp->videoStd;    
    }

    /*If its component input and video std is 1080I_30 then make it 1080I_60.*/
    if (cAttrs.videoStd == VideoStd_1080I_30 && cAttrs.videoInput 
                        == Capture_Input_COMPONENT) {
        cAttrs.videoStd = VideoStd_1080I_60;
    }

    cAttrs.numBufs    = NUM_CAPTURE_BUFS;    
    cAttrs.colorSpace = colorSpace;
    cAttrs.captureDimension = &gfxAttrs.dim;
    /* Create the capture device driver instance */
    hCapture = Capture_create(hBufTab, &cAttrs);

    if (hCapture == NULL) {
        ERR("Failed to create capture device. Is video input connected?\n");
        cleanup(THREAD_FAILURE);
    }

    /* Create a table of buffers to use with the display driver */
    hDispBufTab = BufTab_create(NUM_DISPLAY_BUFS, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hDispBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    /* Create display device driver instance if preview is needed */
    if (!envp->previewDisabled) {
        dAttrs.videoStd = envp->videoStd;
        if ( (dAttrs.videoStd == VideoStd_CIF) ||
            (dAttrs.videoStd == VideoStd_SIF_NTSC) ||
            (dAttrs.videoStd == VideoStd_SIF_PAL) ||
            (dAttrs.videoStd == VideoStd_VGA) ||
            (dAttrs.videoStd == VideoStd_D1_NTSC) ||        
            (dAttrs.videoStd == VideoStd_D1_PAL) ) {
            dAttrs.videoOutput = Display_Output_COMPOSITE;
        } else {
            dAttrs.videoOutput = Display_Output_COMPONENT;
        }    
        dAttrs.numBufs    = NUM_DISPLAY_BUFS;
        dAttrs.colorSpace = colorSpace;
        dAttrs.width = capDim.width;
        dAttrs.height = capDim.height;
        hDisplay = Display_create(hDispBufTab, &dAttrs);

        if (hDisplay == NULL) {
            ERR("Failed to create display device\n");
            cleanup(THREAD_FAILURE);
        }
    }

    for (bufIdx = 0; bufIdx < VIDEO_PIPE_SIZE; bufIdx++) {
        /* Queue the video buffers for main thread processing */
        hBuf = BufTab_getFreeBuf(hFifoBufTab);
        if (hBuf == NULL) {
            ERR("Failed to fill video pipeline\n");
            cleanup(THREAD_FAILURE);            
        }

       /* Fill with black the buffer */
       CapBuf_blackFill(hBuf);

        /* Send buffer to video thread for encoding */
        if (Fifo_put(envp->hOutFifo, hBuf) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }
    }
    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    while (!gblGetQuit()) {
        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Capture a frame */
        if (Capture_get(hCapture, &hCapBuf) < 0) {
            ERR("Failed to get capture buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Get a buffer from the display device */
        if ((!envp->previewDisabled) && (Display_get(hDisplay, &hDisBuf) < 0)) {
            ERR("Failed to get display buffer\n");
            cleanup(THREAD_FAILURE);
        }
        /* Send buffer to video thread for encoding */
        if (Fifo_put(envp->hOutFifo, hCapBuf) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Get a buffer from the video thread */
        fifoRet = Fifo_get(envp->hInFifo, &hDstBuf);

        if (fifoRet < 0) {
            ERR("Failed to get buffer from video thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Did the video thread flush the fifo? */
        if (fifoRet == Dmai_EFLUSH) {
            cleanup(THREAD_SUCCESS);
        }

        if (!envp->previewDisabled) {
            /* Release buffer to the display device driver */
            if (Display_put(hDisplay, hDstBuf) < 0) {
                ERR("Failed to put display buffer\n");
                cleanup(THREAD_FAILURE);
            }
        }

        if (envp->previewDisabled) {
            /* Return the processed buffer to the capture driver */
            if (Capture_put(hCapture, hDstBuf) < 0) {
                ERR("Failed to put capture buffer\n");
                cleanup(THREAD_FAILURE);
            } 
        }
        else {
            /* Return the displayed buffer to the capture driver */
            if (Capture_put(hCapture, hDisBuf) < 0) {
                ERR("Failed to put capture buffer\n");
                cleanup(THREAD_FAILURE);
            }
        }

        /* Increment statistics for the user interface */
        gblIncFrames();

    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousCapStd);
    Rendezvous_force(envp->hRendezvousInit);
    Pause_off(envp->hPauseProcess);
    Fifo_flush(envp->hOutFifo);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    if (hDisplay) {
        Display_delete(hDisplay);
    }

    if (hCapture) {
        Capture_delete(hCapture);
    }
    
    /* Clean up the thread before exiting */
    if (hBufTab) {
        BufTab_delete(hBufTab);
    }
    if (hFifoBufTab) {
        BufTab_delete(hFifoBufTab);
    }
    if (hDispBufTab) {
        BufTab_delete(hDispBufTab);
    }

    return status;
}
Beispiel #10
0
/******************************************************************************
 * main
 ******************************************************************************/
Int main(Int argc, Char *argv[])
{
    Args                args                = DEFAULT_ARGS;
    Uns                 initMask            = 0;
    Int                 status              = EXIT_SUCCESS;
    Pause_Attrs         pAttrs              = Pause_Attrs_DEFAULT;
    Rendezvous_Attrs    rzvAttrs            = Rendezvous_Attrs_DEFAULT;
    Fifo_Attrs          fAttrs              = Fifo_Attrs_DEFAULT;
    Rendezvous_Handle   hRendezvousCapStd   = NULL;
    Rendezvous_Handle   hRendezvousInit     = NULL;
    Rendezvous_Handle   hRendezvousWriter   = NULL;
    Rendezvous_Handle   hRendezvousCleanup  = NULL;
    Pause_Handle        hPauseProcess       = NULL;
    UI_Handle           hUI                 = NULL;
    struct sched_param  schedParam;
    pthread_t           captureThread;
    pthread_t           writerThread;
    pthread_t           videoThread;
    pthread_t           speechThread;
    CaptureEnv          captureEnv;
    WriterEnv           writerEnv;
    VideoEnv            videoEnv;
    SpeechEnv           speechEnv;
    CtrlEnv             ctrlEnv;
    Int                 numThreads;
    pthread_attr_t      attr;
    Void               *ret;

    /* Zero out the thread environments */
    Dmai_clear(captureEnv);
    Dmai_clear(writerEnv);
    Dmai_clear(videoEnv);
    Dmai_clear(speechEnv);
    Dmai_clear(ctrlEnv);

    /* Parse the arguments given to the app and set the app environment */
    parseArgs(argc, argv, &args);

    printf("Encode demo started.\n");

    /* Initialize the mutex which protects the global data */
    pthread_mutex_init(&gbl.mutex, NULL);

    /* Set the priority of this whole process to max (requires root) */
    setpriority(PRIO_PROCESS, 0, -20);

    /* Initialize Codec Engine runtime */
    CERuntime_init();

    /* Initialize signal handler for SIGINT */
    signal(SIGINT, signalHandler);
    
    /* Initialize Davinci Multimedia Application Interface */
    Dmai_init();

    initMask |= LOGSINITIALIZED;

    /* Set up the user interface */
    hUI = uiSetup(&args);

    if (hUI == NULL) {
        cleanup(EXIT_FAILURE);
    }

    /* Create the Pause object */
    hPauseProcess = Pause_create(&pAttrs);

    if (hPauseProcess == NULL) {
        ERR("Failed to create Pause object\n");
        cleanup(EXIT_FAILURE);
    }

    /* Determine the number of threads needing synchronization */
    numThreads = 1;

    if (args.videoFile) {
        numThreads += 3;
    }

    if (args.speechFile) {
        numThreads += 1;
    }
    /* Create the objects which synchronizes the thread init and cleanup */
    hRendezvousCapStd  = Rendezvous_create(2, &rzvAttrs);
    hRendezvousInit = Rendezvous_create(numThreads, &rzvAttrs);
    hRendezvousCleanup = Rendezvous_create(numThreads, &rzvAttrs);
    hRendezvousWriter = Rendezvous_create(2, &rzvAttrs);

    if (hRendezvousCapStd  == NULL || hRendezvousInit == NULL || 
        hRendezvousCleanup == NULL || hRendezvousWriter == NULL) {
        ERR("Failed to create Rendezvous objects\n");
        cleanup(EXIT_FAILURE);
    }

    /* Initialize the thread attributes */
    if (pthread_attr_init(&attr)) {
        ERR("Failed to initialize thread attrs\n");
        cleanup(EXIT_FAILURE);
    }

    /* Force the thread to use custom scheduling attributes */
    if (pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED)) {
        ERR("Failed to set schedule inheritance attribute\n");
        cleanup(EXIT_FAILURE);
    }

    /* Set the thread to be fifo real time scheduled */
    if (pthread_attr_setschedpolicy(&attr, SCHED_FIFO)) {
        ERR("Failed to set FIFO scheduling policy\n");
        cleanup(EXIT_FAILURE);
    }

    /* Create the video threads if a file name is supplied */
    if (args.videoFile) {
        /* Create the capture fifos */
        captureEnv.hInFifo = Fifo_create(&fAttrs);
        captureEnv.hOutFifo = Fifo_create(&fAttrs);

        if (captureEnv.hInFifo == NULL || captureEnv.hOutFifo == NULL) {
            ERR("Failed to open display fifos\n");
            cleanup(EXIT_FAILURE);
        }

        /* Set the capture thread priority */
        schedParam.sched_priority = CAPTURE_THREAD_PRIORITY;
        if (pthread_attr_setschedparam(&attr, &schedParam)) {
            ERR("Failed to set scheduler parameters\n");
            cleanup(EXIT_FAILURE);
        }

        /* Create the capture thread */
        captureEnv.hRendezvousInit    = hRendezvousInit;
        captureEnv.hRendezvousCapStd  = hRendezvousCapStd;
        captureEnv.hRendezvousCleanup = hRendezvousCleanup;
        captureEnv.hPauseProcess      = hPauseProcess;
        captureEnv.videoStd           = args.videoStd;
        captureEnv.videoInput         = args.videoInput;
        captureEnv.imageWidth         = args.imageWidth;
        captureEnv.imageHeight        = args.imageHeight;
        /* TODO */
        VideoStd_getResolution(VideoStd_CIF, &captureEnv.resizeWidth,
                                             &captureEnv.resizeHeight);

        if (pthread_create(&captureThread, &attr, captureThrFxn, &captureEnv)) {
            ERR("Failed to create capture thread\n");
            cleanup(EXIT_FAILURE);
        }

        initMask |= CAPTURETHREADCREATED;

        /*
         * Once the capture thread has detected the video standard, make it
         * available to other threads. The capture thread will set the
         * resolution of the buffer to encode in the environment (derived
         * from the video standard if the user hasn't passed a resolution).
         */
        Rendezvous_meet(hRendezvousCapStd);

        /* Create the writer fifos */
        writerEnv.hInFifo = Fifo_create(&fAttrs);
        writerEnv.hOutFifo = Fifo_create(&fAttrs);

        if (writerEnv.hInFifo == NULL || writerEnv.hOutFifo == NULL) {
            ERR("Failed to open display fifos\n");
            cleanup(EXIT_FAILURE);
        }

        /* Set the video thread priority */
        schedParam.sched_priority = VIDEO_THREAD_PRIORITY;
        if (pthread_attr_setschedparam(&attr, &schedParam)) {
            ERR("Failed to set scheduler parameters\n");
            cleanup(EXIT_FAILURE);
        }

        /* Create the video thread */
        videoEnv.hRendezvousInit    = hRendezvousInit;
        videoEnv.hRendezvousCleanup = hRendezvousCleanup;
        videoEnv.hRendezvousWriter  = hRendezvousWriter;
        videoEnv.hPauseProcess      = hPauseProcess;
        videoEnv.hCaptureOutFifo    = captureEnv.hOutFifo;
        videoEnv.hCaptureInFifo     = captureEnv.hInFifo;
        videoEnv.hWriterOutFifo     = writerEnv.hOutFifo;
        videoEnv.hWriterInFifo      = writerEnv.hInFifo;
        videoEnv.videoEncoder       = args.videoEncoder->codecName;
        videoEnv.params             = args.videoEncoder->params;
        videoEnv.dynParams          = args.videoEncoder->dynParams;
        videoEnv.videoBitRate       = args.videoBitRate;
        videoEnv.imageWidth         = captureEnv.imageWidth;
        videoEnv.imageHeight        = captureEnv.imageHeight;
        videoEnv.resizeWidth        = captureEnv.resizeWidth;
        videoEnv.resizeHeight       = captureEnv.resizeHeight;
        videoEnv.imgEncoder         = "jpegenc";
        videoEnv.engineName         = engine->engineName;
        if (args.videoStd == VideoStd_D1_PAL) {
            videoEnv.videoFrameRate     = 25000;
        } else {
            videoEnv.videoFrameRate     = 30000;
        }

        if (pthread_create(&videoThread, &attr, videoThrFxn, &videoEnv)) {
            ERR("Failed to create video thread\n");
            cleanup(EXIT_FAILURE);
        }

        initMask |= VIDEOTHREADCREATED;

        /*
         * Wait for the codec to be created in the video thread before
         * launching the writer thread (otherwise we don't know which size
         * of buffers to use).
         */
        Rendezvous_meet(hRendezvousWriter);

        /* Set the writer thread priority */
        schedParam.sched_priority = WRITER_THREAD_PRIORITY;
        if (pthread_attr_setschedparam(&attr, &schedParam)) {
            ERR("Failed to set scheduler parameters\n");
            cleanup(EXIT_FAILURE);
        }

        /* Create the writer thread */
        writerEnv.hRendezvousInit    = hRendezvousInit;
        writerEnv.hRendezvousCleanup = hRendezvousCleanup;
        writerEnv.hPauseProcess      = hPauseProcess;
        writerEnv.videoFile          = args.videoFile;
        writerEnv.outBufSize         = videoEnv.outBufSize;
        writerEnv.outsBufSize        = videoEnv.outsBufSize;

        if (pthread_create(&writerThread, &attr, writerThrFxn, &writerEnv)) {
            ERR("Failed to create writer thread\n");
            cleanup(EXIT_FAILURE);
        }

        initMask |= WRITERTHREADCREATED;

    }

    /* Create the speech thread if a file name is supplied */
    if (args.speechFile) {
        /* Set the thread priority */
        schedParam.sched_priority = SPEECH_THREAD_PRIORITY;
        if (pthread_attr_setschedparam(&attr, &schedParam)) {
            ERR("Failed to set scheduler parameters\n");
            cleanup(EXIT_FAILURE);
        }

        /* Create the speech thread */
        speechEnv.hRendezvousInit    = hRendezvousInit;
        speechEnv.hRendezvousCleanup = hRendezvousCleanup;
        speechEnv.hPauseProcess      = hPauseProcess;
        speechEnv.speechFile         = args.speechFile;
        speechEnv.soundInput         = args.soundInput;
        speechEnv.speechEncoder      = args.speechEncoder->codecName;
        speechEnv.params             = args.speechEncoder->params;
        speechEnv.dynParams          = args.speechEncoder->dynParams;
        speechEnv.engineName         = engine->engineName;

        if (pthread_create(&speechThread, &attr, speechThrFxn, &speechEnv)) {
            ERR("Failed to create speech thread\n");
            cleanup(EXIT_FAILURE);
        }

        initMask |= SPEECHTHREADCREATED;
    }

    /* Main thread becomes the control thread */
    ctrlEnv.hRendezvousInit    = hRendezvousInit;
    ctrlEnv.hRendezvousCleanup = hRendezvousCleanup;
    ctrlEnv.hPauseProcess      = hPauseProcess;
    ctrlEnv.keyboard           = args.keyboard;
    ctrlEnv.time               = args.time;
    ctrlEnv.hUI                = hUI;
    ctrlEnv.engineName         = engine->engineName;

    ret = ctrlThrFxn(&ctrlEnv);

    if (ret == THREAD_FAILURE) {
        status = EXIT_FAILURE;
    }

cleanup:
    /* Make sure the other threads aren't waiting for init to complete */
    if (hRendezvousCapStd) Rendezvous_force(hRendezvousCapStd);
    if (hRendezvousWriter) Rendezvous_force(hRendezvousWriter);
    if (hRendezvousInit) Rendezvous_force(hRendezvousInit);
    if (hPauseProcess) Pause_off(hPauseProcess);

    /* Wait until the other threads terminate */
    if (initMask & SPEECHTHREADCREATED) {
        if (pthread_join(speechThread, &ret) == 0) {
            if (ret == THREAD_FAILURE) {
                status = EXIT_FAILURE;
            }
        }
    }

    if (initMask & VIDEOTHREADCREATED) {
        if (pthread_join(videoThread, &ret) == 0) {
            if (ret == THREAD_FAILURE) {
                status = EXIT_FAILURE;
            }
        }
    }

    if (initMask & WRITERTHREADCREATED) {
        if (pthread_join(writerThread, &ret) == 0) {
            if (ret == THREAD_FAILURE) {
                status = EXIT_FAILURE;
            }
        }
    }

    if (writerEnv.hOutFifo) {
        Fifo_delete(writerEnv.hOutFifo);
    }

    if (writerEnv.hInFifo) {
        Fifo_delete(writerEnv.hInFifo);
    }

    if (initMask & CAPTURETHREADCREATED) {
        if (pthread_join(captureThread, &ret) == 0) {
            if (ret == THREAD_FAILURE) {
                status = EXIT_FAILURE;
            }
        }
    }

    if (captureEnv.hOutFifo) {
        Fifo_delete(captureEnv.hOutFifo);
    }

    if (captureEnv.hInFifo) {
        Fifo_delete(captureEnv.hInFifo);
    }

    if (hRendezvousCleanup) {
        Rendezvous_delete(hRendezvousCleanup);
    }

    if (hRendezvousInit) {
        Rendezvous_delete(hRendezvousInit);
    }

    if (hPauseProcess) {
        Pause_delete(hPauseProcess);
    }

    if (hUI) {
        UI_delete(hUI);
    }

    system("sync");
    system("echo 3 > /proc/sys/vm/drop_caches");


    pthread_mutex_destroy(&gbl.mutex);

    if (args.interface) {
        /* Launch the demo selection interface when exiting */
        if (execl("./interface", "interface", "-l 3", (char *) NULL) == -1) {
            status = EXIT_FAILURE;
        }
    }

    exit(status);
}
Beispiel #11
0
Void *writerThrFxn(Void *arg)
{
    WriterEnv *envp = (WriterEnv *) arg;
    Void *status = THREAD_SUCCESS;
    FILE *outFile = NULL;
    Buffer_Attrs bAttrs = Buffer_Attrs_DEFAULT;
    BufTab_Handle hBufTab = NULL;
    Buffer_Handle hOutBuf;
    Int bufIdx;

    /* Initialization */

	/* Open the output video file */
    outFile = fopen(envp->videoFile, "w");
	if (outFile == NULL) {
        ERR("Failed to open %s for writing\n", envp->videoFile);
        cleanup(THREAD_FAILURE);
    }

    /* Create buftab for video thread */
    hBufTab = BufTab_create(NUM_WRITER_BUFS, envp->outBufSize, &bAttrs);
    if (hBufTab == NULL) {
        ERR("Failed to allocate contiguous buffers\n");
        cleanup(THREAD_FAILURE);
    }

	/* Send all buffers to the video thread to be filled with encoded data */
    for (bufIdx = 0; bufIdx < NUM_WRITER_BUFS; bufIdx++) {
        if (Fifo_put(envp->hWriterOutFifo, BufTab_getBuf(hBufTab, bufIdx)) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }
    }

	/* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    while(1) {

    	/* Get an encoded buffer from the video thread */
        if (Fifo_get(envp->hWriterInFifo, &hOutBuf) < 0) {
            ERR("Failed to get buffer from video thread\n");
            cleanup(THREAD_FAILURE);
        }

		if (Buffer_getNumBytesUsed(hOutBuf)) {
			if (fwrite(Buffer_getUserPtr(hOutBuf), Buffer_getNumBytesUsed(hOutBuf), 1, outFile) != 1) {
				ERR("Error writing the encoded data to video file\n");
				cleanup(THREAD_FAILURE);
			}
		} 
		else {
			printf("Warning, writer received 0 byte encoded frame\n");
		}

		/* Return buffer to capture thread */
        if (Fifo_put(envp->hWriterOutFifo, hOutBuf) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }

    }

cleanup:

    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousInit);
    Pause_off(envp->hPauseProcess);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    /* Clean up the thread before exiting */
    if (outFile) {
        fclose(outFile);
    }

    if (hBufTab) {
        BufTab_delete(hBufTab);
    }
    
	return status;    
}
Beispiel #12
0
Void *deiThrFxn(Void *arg)
{
	DeiEnv *envp = (DeiEnv *) arg;
	Void *status = THREAD_SUCCESS;

	Uint32 sysRegBase = 0;

	VIDENC1_Handle hDei = NULL;
	IDEI_Params deiParams;

	Uint16 frame_width = 0, frame_height = 0;
	Uint16 threshold_low = 0, threshold_high = 0;

	IVIDEO1_BufDescIn inBufDesc;	
	XDM_BufDesc outBufDesc;	
	XDAS_Int8 *outbufs[2];	
	XDAS_Int32 outBufSizeArray[2];
	VIDENC1_InArgs inArgs;
	IDEI_OutArgs outArgs;	
	Uint32 bufferSize;

	CMEM_AllocParams cmemPrm;
	Uint32 prevBufAddr, outBufAddr;

	Buffer_Handle cBuf, dBuf;

	Int ret = 0;

	int fd = -1;
	dm365mmap_params_t dm365mmap_params;
	pthread_mutex_t Dmai_DM365dmaLock = PTHREAD_MUTEX_INITIALIZER;

	/* ▼▼▼▼▼ Initialization ▼▼▼▼▼ */
	DM365MM_init(); printf("\n"); /* dm365mm issue */
	sysRegBase = DM365MM_mmap(0x01C40000, 0x4000);

	CMEM_init();
	cmemPrm.type = CMEM_HEAP;
	cmemPrm.flags = CMEM_NONCACHED;
	cmemPrm.alignment = 32;
	prevBufAddr = (Uint32)CMEM_alloc(IN_OUT_BUF_SIZE, &cmemPrm);
	outBufAddr = (Uint32)CMEM_alloc(IN_OUT_BUF_SIZE, &cmemPrm);

	frame_width = 720;
	frame_height = 576;
	threshold_low = 16;
	threshold_high = 20;

	/* Create DEI instance */	
	deiParams.videncParams.size = sizeof(IDEI_Params);
	deiParams.frameHeight = frame_height;
	deiParams.frameWidth = frame_width; 
	deiParams.inLineOffset = frame_width;
	deiParams.outLineOffset = (frame_width - 8);	
	deiParams.threshold_low = threshold_low;
	deiParams.threshold_high = threshold_high;

	deiParams.inputFormat = XDM_YUV_422ILE;	
	deiParams.outputFormat = XDM_YUV_420SP;
	
	deiParams.q_num = 1;
	deiParams.askIMCOPRes = 0; 
	deiParams.sysBaseAddr = sysRegBase;

	hDei = VIDENC1_create(envp->hEngine, "dei", (VIDENC1_Params *)&deiParams);						  
    if(hDei == NULL)
	{
		ERR("DEI alg creation failed\n");
        cleanup(THREAD_FAILURE);		
	}

	fd = open("/dev/dm365mmap", O_RDWR | O_SYNC);

	

	Rendezvous_meet(envp->hRendezvousInit);
	/* ▲▲▲▲▲ Initialization ▲▲▲▲▲ */
	while (1) {

		if (Fifo_get(envp->hFromCaptureFifo, &cBuf) < 0) {
			ERR("Failed to get buffer from capture thread\n");
			cleanup(THREAD_FAILURE);
		}

		if (Fifo_get(envp->hFromDisplayFifo, &dBuf) < 0) {
			ERR("Failed to get buffer from display thread\n");
			cleanup(THREAD_FAILURE);
		}

		inBufDesc.numBufs = 4;

		bufferSize = (frame_width * frame_height);
		
		inBufDesc.bufDesc[0].bufSize = bufferSize;
		inBufDesc.bufDesc[0].buf = (XDAS_Int8 *)Buffer_getUserPtr(cBuf);
		inBufDesc.bufDesc[0].accessMask = 0;

		inBufDesc.bufDesc[1].bufSize = bufferSize;
		inBufDesc.bufDesc[1].buf = (XDAS_Int8 *)(Buffer_getUserPtr(cBuf) + bufferSize);
		inBufDesc.bufDesc[1].accessMask = 0;

		inBufDesc.bufDesc[2].bufSize = bufferSize;
		inBufDesc.bufDesc[2].buf = (XDAS_Int8 *)prevBufAddr;
		inBufDesc.bufDesc[2].accessMask = 0;

		inBufDesc.bufDesc[3].bufSize = bufferSize;
		inBufDesc.bufDesc[3].buf = (XDAS_Int8 *)(prevBufAddr + bufferSize);
		inBufDesc.bufDesc[3].accessMask = 0;	
		
		/* Output buffers */
		outBufDesc.numBufs = 2;
		outbufs[0] = (XDAS_Int8*)outBufAddr;
		outbufs[1] = (XDAS_Int8*)(outBufAddr + bufferSize);
		outBufSizeArray[0] = bufferSize;
		outBufSizeArray[1] = bufferSize / 2;

		outBufDesc.bufSizes = outBufSizeArray;
		outBufDesc.bufs = outbufs;

		inArgs.size = sizeof(VIDENC1_InArgs);
		outArgs.videncOutArgs.size = sizeof(IDEI_OutArgs);

		ret = VIDENC1_process((VIDENC1_Handle)hDei,
								 &inBufDesc,
								 &outBufDesc,
								 &inArgs,
								 (IVIDENC1_OutArgs *)&outArgs);
		if (ret != VIDENC1_EOK) {
			ERR("DEI process failed\n");
			cleanup(THREAD_FAILURE);
		}

		dm365mmap_params.src = CMEM_getPhys(outbufs[0]);
		dm365mmap_params.srcmode = 0;
		dm365mmap_params.dst = Buffer_getPhysicalPtr(dBuf);
		dm365mmap_params.dstmode = 0;
		dm365mmap_params.srcbidx = 712;
		dm365mmap_params.dstbidx = 704;
		dm365mmap_params.acnt = 704;
		dm365mmap_params.bcnt = 576;
		dm365mmap_params.ccnt = 1;
		dm365mmap_params.bcntrld = dm365mmap_params.bcnt;
		dm365mmap_params.syncmode = 1;

		pthread_mutex_lock(&Dmai_DM365dmaLock);
		if (ioctl(fd, DM365MMAP_IOCMEMCPY, &dm365mmap_params) == -1) {
        	ERR("memcpy: Failed to do memcpy\n");
        	cleanup(THREAD_FAILURE);
    	}
		pthread_mutex_unlock(&Dmai_DM365dmaLock);

    	dm365mmap_params.src = CMEM_getPhys(outbufs[1]);
    	dm365mmap_params.srcmode = 0;
    	dm365mmap_params.dst = Buffer_getPhysicalPtr(dBuf) + (Buffer_getSize(dBuf) * 2 / 3);
    	dm365mmap_params.dstmode = 0;
    	dm365mmap_params.srcbidx = 712;
		dm365mmap_params.dstbidx = 704;
    	dm365mmap_params.acnt = 712;
    	dm365mmap_params.bcnt = 570 / 2;
    	dm365mmap_params.ccnt = 1;
    	dm365mmap_params.bcntrld = dm365mmap_params.bcnt;
    	dm365mmap_params.syncmode = 1;

		pthread_mutex_lock(&Dmai_DM365dmaLock);
		if (ioctl(fd, DM365MMAP_IOCMEMCPY, &dm365mmap_params) == -1) {
        	ERR("memcpy: Failed to do memcpy\n");
        	cleanup(THREAD_FAILURE);
    	}
		pthread_mutex_unlock(&Dmai_DM365dmaLock);
    	Buffer_setNumBytesUsed(dBuf, 704 * 576 * 3 / 2);

    	dm365mmap_params.src = Buffer_getPhysicalPtr(cBuf);
    	dm365mmap_params.srcmode = 0;
    	dm365mmap_params.dst = prevBufAddr;
    	dm365mmap_params.dstmode = 0;
    	dm365mmap_params.srcbidx = 1440;
		dm365mmap_params.dstbidx = 1440;
    	dm365mmap_params.acnt = 1440;
    	dm365mmap_params.bcnt = 576;
    	dm365mmap_params.ccnt = 1;
    	dm365mmap_params.bcntrld = dm365mmap_params.bcnt;
    	dm365mmap_params.syncmode = 1;

    	pthread_mutex_lock(&Dmai_DM365dmaLock);
		if (ioctl(fd, DM365MMAP_IOCMEMCPY, &dm365mmap_params) == -1) {
        	ERR("memcpy: Failed to do memcpy\n");
        	cleanup(THREAD_FAILURE);
    	}
		pthread_mutex_unlock(&Dmai_DM365dmaLock);

		/* Send buffer to display thread */
		if (Fifo_put(envp->hToDisplayFifo, dBuf) < 0) {
			ERR("Failed to send buffer to dei thread\n");
			cleanup(THREAD_FAILURE);
		}

		/* Send buffer to display thread */
		if (Fifo_put(envp->hToCaptureFifo, cBuf) < 0) {
			ERR("Failed to send buffer to dei thread\n");
			cleanup(THREAD_FAILURE);
		}

	}

cleanup:
	Rendezvous_force(envp->hRendezvousInit);
	Rendezvous_meet(envp->hRendezvousCleanup);

	/* Delete DEI ALG instance */
	VIDENC1_delete(hDei);

	DM365MM_ummap(sysRegBase,0x4000);

	DM365MM_exit();

	if (fd > 0) {   
        close(fd);
    }

	return status;
}
/******************************************************************************
 * gst_ticircbuffer_broadcast_producer
 *    Broadcast when producer has processed some data
 ******************************************************************************/
static void gst_ticircbuffer_broadcast_producer(GstTICircBuffer *circBuf)
{
    GST_LOG("broadcast_producer: output unblocked\n");
    Rendezvous_force(circBuf->waitOnProducer);
}
Beispiel #14
0
/*
##############################################
##Int ModuleResize(Int argc, Char *argv[])
##############################################
*/
Int ModuleResize(Int argc, Char *argv[])
{
	Uns                 initMask            = 0;
	Int                 status              = EXIT_SUCCESS;
	Rendezvous_Attrs    rzvAttrs            = Rendezvous_Attrs_DEFAULT;
	Fifo_Attrs          fAttrs              = Fifo_Attrs_DEFAULT;
	Rendezvous_Handle   hRendezvousInit     = NULL;
	Rendezvous_Handle   hRendezvousWriter   = NULL;
	Rendezvous_Handle   hRendezvousCleanup  = NULL;
	Int                 numThreads			= 0;
	pthread_t 			id_listen[5] 		= {0};
	Void               *ret;

	char 				devicebuf[16] 		= {0};

	CaptureEnv          captureEnv;
	WriterEnv           writerEnv;
	WriterEnv			writerLowRateEnv;
	DetectEnv			detectEnv;
	VideoEnv            videoEnv;
	VideoEnv            videoLowRateEnv;//dd
	VideoEnv            LowRateResize;
	AudioEnv            audioEnv;
	CtrlEnv             ctrlEnv;
	char box_version[64] = {0};
	OutputVideoInfo		outputhandle;
	textinfo			*texthandle;
	int 				DHCPVAL = 0, tmp = 0;
	char 				gateway[255] = {0};

	struct sched_param  schedParam;
	pthread_t           captureThread;
	pthread_t           detectThread;
	pthread_t           writerThread;
	pthread_t			writerLowThread;
	pthread_t           videoThread;
	pthread_t           audioThread;
	pthread_t			videoLowThread;
	pthread_t			resizeLowThread;
#ifdef DSS_ENC_1100_1200
	pthread_t           webListenThread;
#endif
	pthread_attr_t      attr;
	int       index = 0;
	int result = 0;
	char ts_version[128] = {0};
	/* Zero out the thread environments */
	Dmai_clear(captureEnv);
	Dmai_clear(writerEnv);
	Dmai_clear(videoEnv);
	Dmai_clear(audioEnv);
	Dmai_clear(ctrlEnv);

	mid_task_init();
	trace_init();
	open_gpio_port();

	ts_build_get_version(ts_version, sizeof(ts_version));

	strcpy(box_version, BOX_VER);
	strcat(box_version, CODE_TYPE);
	strcat(box_version, DEUBG);
	printf("[%s] Module Encode Program %s V%s\n", CODE_COND, BOARD_TYPE, box_version);
	printf("the build time is %s,the git vesion is %s.the ts version is %s\n\n", g_make_build_date, _VERSION, ts_version);

	initMutexPthread();
	InitgblCommonMutex();
	InitSysParams();
	initOutputVideoParam();
	InitHVTable(&gHVTable);
	//	webgetDHCPFlag(tmp, &DHCPVAL);
	//	readDHCPValue(DHCPCONFIG_FILE, &DHCPVAL);
	//	setDHCPFlag(DHCPVAL);
	gLogoinfo = initLogoMod();
	initTextinfo();

	ReadEncodeParamTable(CONFIG_NAME, &gSysParaT);
	DHCPVAL = gSysParaT.sysPara.nTemp[0];
	printf("----mic=%x:%x:%x:%x:%x:%x\n",gSysParaT.sysPara.szMacAddr[0],gSysParaT.sysPara.szMacAddr[1],gSysParaT.sysPara.szMacAddr[2],
			gSysParaT.sysPara.szMacAddr[3],gSysParaT.sysPara.szMacAddr[4],gSysParaT.sysPara.szMacAddr[5]);

	ReadLowbitParamTable(LOWBIT_PARAM, &gSysParaT);

	memset(&outputhandle, 0, sizeof(OutputVideoInfo));
	getOutputvideohandle(&outputhandle);

	readOutputVideoParam(VIDEOENCODE_FILE, &outputhandle);

	setOutputvideohandle(&outputhandle);
	//sleep(10);
	ReadLogoinfo(LOGOCONFIGNAME, gLogoinfo);
	//setLogoInfoHandle(logoEnv);
	//sleep(10);
	texthandle = getTextInfoHandle();
	readTextFromfile(ADDTEXT_FILE, texthandle);
	//	DEBUG(DL_DEBUG, "%d,%d,%d,%d,%d,%d,%s\n", DHCPVAL, texthandle->xpos, texthandle->ypos,
	//	      texthandle->enable, texthandle->showtime, texthandle->alpha, texthandle->msgtext);
	//	sleep(10);
#ifdef DSS_ENC_1100_1200
	ReadProtocolIni(PROTOCOL_NAME, &gProtocol);
#endif

	ReadRemoteCtrlIndex(REMOTE_NAME, &index);
	/*Read I frames Interval*/
	ReadIframeInterval(IFRAMES_NAME);
	/*green Save Module*/
	app_init_green_adjust_module();
#ifdef CL4000_DVI
	app_init_screen_adjust_module();
#endif
	gblSetRemoteIndex(index);
	ReadHVTable(&gHVTable, 0);
	ReadHVTable(&gHVTable, 1);
#ifdef CL4000_DVI_SDI
	ReadIPParamTable(IP_PARAM, &gSysParaT);
#endif

	if(DHCPVAL) {
		printf("i will set dhcp.\n");
#if 1
		system("kill -1 `cat /var/run/dhcpcd-eth0.pid`");
		system("/sbin/dhcpcd eth0");
		system("ifconfig eth0");
#endif
		gSysParaT.sysPara.dwNetMark = GetNetmask("eth0");
		gSysParaT.sysPara.dwAddr = GetIPaddr("eth0");
		get_gateway(gateway);
		gSysParaT.sysPara.dwGateWay = 	get_gateway(gateway);
		DEBUG(DL_DEBUG, "gateway =%s\n", gateway);
	} else {
		printf("i will set static ip.\n");
		SetEthConfigIP(gSysParaT.sysPara.dwAddr, gSysParaT.sysPara.dwNetMark);
		SetEthConfigGW(gSysParaT.sysPara.dwGateWay);
	}

	system("ifconfig");

	strcpy(gSysParaT.sysPara.strVer, box_version);
	initSetParam();
	DEBUG(DL_DEBUG, "logo=%d text=%d ,texthandle->enable=%d,texthandle->showtime=%d\n", outputhandle.logo_show, outputhandle.text_show, texthandle->enable, texthandle->showtime);
#ifdef DSS_ENC_1100_1200
	/*open lcd initial*/
	OpenLCDCom(); //matchbox ++
	gblLoadIDX(); //matchbox ++

	if(-2 == ReadDeviceType(DTYPECONFIG_NAME, 1)) {
		ReadDeviceType(DTYPECONFIG_NAME, 0);
	}

	GetDeviceType(devicebuf);
	DEBUG(DL_DEBUG, "DTYPECONFIG_NAME gDeviceType = %s\n", devicebuf);
#endif

	/*取消PIPE坏的信号*/
	Signal(SIGPIPE, SIG_IGN);
	/* Set the priority of this whole process to max (requires root) */
	setpriority(PRIO_PROCESS, 0, -20);
	/*初始化高码流视频编码库参数*/
	InitVideoEncParams(&gSysParaT.videoPara[PORT_ONE]);
	/*初始化低码流视频编码库参数*/
	InitLowRateParams(&gSysParaT.videoPara[PORT_TWO]);
	/*初始化音频编码库参数*/
	InitAudioEncParams(&gSysParaT.audioPara[PORT_ONE]);
	/* Initialize the mutex which protects the global data */
	pthread_mutex_init(&gbl.mutex, NULL);
	/* Initialize Codec Engine runtime */
	CERuntime_init();
	/* Initialize Davinci Multimedia Application Interface */
	Dmai_init();
	closeWatchDog();

	mid_timer_init();
		
	initWatchDog();
#ifdef CL4000_DVI_SDI

	if(gblGetRemoteIndex() < MAX_FAR_CTRL_NUM) {
		result = InitRemoteStruct(gblGetRemoteIndex());
	}

	gRemoteFD = CameraCtrlInit(PORT_COM2);

	if(gRemoteFD <= 0) {
		DEBUG(DL_ERROR, "Initial CameraCtrlInit() Error\n");
	}

#else
#ifndef ENABLE_DEUBG

	if(gblGetRemoteIndex() < MAX_FAR_CTRL_NUM) {
		result = InitRemoteStruct(gblGetRemoteIndex());
	}

	gRemoteFD = CameraCtrlInit(PORT_COM1);

	if(gRemoteFD <= 0) {
		DEBUG(DL_ERROR, "Initial CameraCtrlInit() Error\n");
	}

#endif
#endif
	CreateTCPTask(id_listen);
	/* Initialize the logs. Must be done after CERuntime_init() */
	/*  if(TraceUtil_start(engine->engineName) != TRACEUTIL_SUCCESS)
		{
		    ERR("Failed to TraceUtil_start\n");
			cleanup(EXIT_FAILURE);
		}	*/
	//initMask |= LOGSINITIALIZED;
	app_set_logoshow_flag(outputhandle.logo_show);
	app_set_textshow_flag(outputhandle.text_show)	;
	//setShowLogoTextFlag(outputhandle->logotext);
	addtextdisplay(texthandle);
	/* Determine the number of threads needing synchronization */
	numThreads = 1;
	/*视频线程个数*/
	numThreads += 7;
	/*音频线程个数*/
	numThreads += 1;

	/* Create the objects which synchronizes the thread init and cleanup */
	hRendezvousInit = Rendezvous_create(numThreads, &rzvAttrs);
	hRendezvousCleanup = Rendezvous_create(numThreads, &rzvAttrs);
	hRendezvousWriter = Rendezvous_create(3, &rzvAttrs);

	if(hRendezvousInit == NULL ||
	   hRendezvousCleanup == NULL ||
	   hRendezvousWriter == NULL) {
		ERR("Failed to create Rendezvous objects\n");
		cleanup(EXIT_FAILURE);
	}

	/* Initialize the thread attributes */
	if(pthread_attr_init(&attr)) {
		ERR("Failed to initialize thread attrs\n");
		cleanup(EXIT_FAILURE);
	}

	/* Force the thread to use custom scheduling attributes */
	if(pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED)) {
		ERR("Failed to set schedule inheritance attribute\n");
		cleanup(EXIT_FAILURE);
	}

	/* Set the thread to be fifo real time scheduled */
	if(pthread_attr_setschedpolicy(&attr, SCHED_FIFO)) {
		ERR("Failed to set FIFO scheduling policy\n");
		cleanup(EXIT_FAILURE);
	}


	/* Create the capture fifos */
	captureEnv.to_video_c = Fifo_create(&fAttrs);
	captureEnv.from_video_c = Fifo_create(&fAttrs);
	captureEnv.to_resize_c = Fifo_create(&fAttrs);
	captureEnv.from_resize_c = Fifo_create(&fAttrs);

	if(captureEnv.to_video_c == NULL || captureEnv.from_video_c == NULL ||
	   captureEnv.to_resize_c == NULL || captureEnv.from_resize_c == NULL) {
		ERR("Failed to open display fifos\n");
		cleanup(EXIT_FAILURE);
	}

	LowRateResize.to_videoresize_c = Fifo_create(&fAttrs);
	LowRateResize.from_videoresize_c = Fifo_create(&fAttrs);

	if(LowRateResize.to_videoresize_c == NULL || LowRateResize.from_videoresize_c == NULL) {
		ERR("Failed to open Resize fifos\n");
		cleanup(EXIT_FAILURE);
	}

	/* Set the capture thread priority */
	schedParam.sched_priority = CAPTURE_THREAD_PRIORITY;

	if(pthread_attr_setschedparam(&attr, &schedParam)) {
		ERR("Failed to set scheduler parameters\n");
		cleanup(EXIT_FAILURE);
	}

	/* Create the capture thread */
	captureEnv.hRendezvousInit    = hRendezvousInit;
	captureEnv.hRendezvousCleanup = hRendezvousCleanup;
	DEBUG(DL_DEBUG, "captureThrFxn thread!!!!\n");


	if(pthread_create(&captureThread, &attr, captureThrFxn, &captureEnv)) {
		ERR("Failed to create capture thread\n");
		cleanup(EXIT_FAILURE);
	}

	/* Create the writer fifos */
	writerEnv.to_video_c = Fifo_create(&fAttrs);
	writerEnv.from_video_c = Fifo_create(&fAttrs);
	writerLowRateEnv.to_writelow_c = Fifo_create(&fAttrs);
	writerLowRateEnv.from_writelow_c = Fifo_create(&fAttrs);

	if(writerEnv.to_video_c == NULL || writerEnv.from_video_c == NULL ||
	   writerLowRateEnv.to_writelow_c == NULL || writerLowRateEnv.from_writelow_c == NULL) {
		ERR("Failed to open display fifos\n");
		cleanup(EXIT_FAILURE);
	}

	initMask |= CAPTURETHREADCREATED ;
	/*detect thread*/
	detectEnv.hRendezvousInit	  = hRendezvousInit;
	detectEnv.hRendezvousCleanup  = hRendezvousCleanup;

	/* Set the video thread priority */
	schedParam.sched_priority = DETECT_THREAD_PRIORITY;

	if(pthread_attr_setschedparam(&attr, &schedParam)) {
		ERR("Failed to set scheduler parameters\n");
		cleanup(EXIT_FAILURE);
	}

	if(pthread_create(&detectThread, &attr, detectThrFxn, &detectEnv)) {
		ERR("Failed to create detect thread\n");
		cleanup(EXIT_FAILURE);
	}

	initMask |= DETECTTHREADCREATED ;

	/* Set the video thread priority */
	schedParam.sched_priority = VIDEO_THREAD_PRIORITY;

	if(pthread_attr_setschedparam(&attr, &schedParam)) {
		ERR("Failed to set scheduler parameters\n");
		cleanup(EXIT_FAILURE);
	}

	DEBUG(DL_DEBUG, "videoThrFxn thread!!!!\n");

	/* Create the video thread */
	videoEnv.hRendezvousInit    = hRendezvousInit;
	videoEnv.hRendezvousCleanup = hRendezvousCleanup;
	videoEnv.hRendezvousWriter  = hRendezvousWriter;
	videoEnv.to_capture    		= captureEnv.from_video_c;
	videoEnv.from_capture     	= captureEnv.to_video_c;
	videoEnv.to_writer     		= writerEnv.from_video_c;
	videoEnv.from_writer     	= writerEnv.to_video_c;
	videoEnv.videoEncoder       = engine->videoEncoders->codecName;
	videoEnv.engineName         = engine->engineName;


	if(pthread_create(&videoThread, &attr, videoThrFxn, &videoEnv)) {
		ERR("Failed to create video thread\n");
		cleanup(EXIT_FAILURE);
	}

	initMask |= VIDEOTHREADCREATED;

	/* Create the videoResize thread */
	videoLowRateEnv.hRendezvousInit    = hRendezvousInit;
	videoLowRateEnv.hRendezvousCleanup = hRendezvousCleanup;
	videoLowRateEnv.hRendezvousWriter  = hRendezvousWriter;
	videoLowRateEnv.to_resize		  = LowRateResize.from_videoresize_c;
	videoLowRateEnv.from_resize		  = LowRateResize.to_videoresize_c;
	videoLowRateEnv.from_writer    	  = writerLowRateEnv.to_writelow_c;
	videoLowRateEnv.to_writer          = writerLowRateEnv.from_writelow_c;
	videoLowRateEnv.videoEncoder       = engine->videoEncoders->codecName;
	videoLowRateEnv.engineName         = engine->engineName;
	DEBUG(DL_DEBUG, "videoLowRateThrFxn thread!!!!\n");

	if(pthread_create(&videoLowThread, &attr, videoLowRateThrFxn, &videoLowRateEnv)) {
		ERR("Failed to create video thread\n");
		cleanup(EXIT_FAILURE);
	}

	initMask |= VIDEOLOWRATETHREAD;

	/* Create the video thread */
	LowRateResize.hRendezvousInit    = hRendezvousInit;
	LowRateResize.hRendezvousCleanup = hRendezvousCleanup;
	LowRateResize.hRendezvousWriter  = hRendezvousWriter;
	LowRateResize.from_capture		 = captureEnv.to_resize_c;
	LowRateResize.to_capture		 = captureEnv.from_resize_c;
	LowRateResize.videoEncoder       = engine->videoEncoders->codecName;
	LowRateResize.engineName         = engine->engineName;
	/* Set the video thread priority */
	schedParam.sched_priority = VIDEO_THREAD_PRIORITY;

	if(pthread_attr_setschedparam(&attr, &schedParam)) {
		ERR("Failed to set scheduler parameters\n");
		cleanup(EXIT_FAILURE);
	}

	DEBUG(DL_DEBUG, "ResizeLowThrFxn thread!!!!\n");

	if(pthread_create(&resizeLowThread, &attr, ResizeLowThrFxn, &LowRateResize)) {
		ERR("Failed to create video thread\n");
		cleanup(EXIT_FAILURE);
	}

	initMask |= RESIZELOWRATETHREAD;

	Rendezvous_meet(hRendezvousWriter);

	/* Set the writer thread priority */
	schedParam.sched_priority = WRITER_THREAD_PRIORITY;

	if(pthread_attr_setschedparam(&attr, &schedParam)) {
		ERR("Failed to set scheduler parameters\n");
		cleanup(EXIT_FAILURE);
	}

	/* Create the writer thread */
	writerEnv.hRendezvousInit    = hRendezvousInit;
	writerEnv.hRendezvousCleanup = hRendezvousCleanup;
	writerEnv.outBufSize         = videoEnv.outBufSize;

	DEBUG(DL_DEBUG, "writerThrFxn thread!!!!\n");

	if(pthread_create(&writerThread, &attr, writerThrFxn, &writerEnv)) {
		ERR("Failed to create writer thread\n");
		cleanup(EXIT_FAILURE);
	}

	initMask |= WRITERTHREADCREATED;

	/* Create the writer thread */
	writerLowRateEnv.hRendezvousInit         = hRendezvousInit;
	writerLowRateEnv.hRendezvousCleanup      = hRendezvousCleanup;
	writerLowRateEnv.outBufSize              = videoLowRateEnv.outBufSize;

	DEBUG(DL_DEBUG, "writerLowThrFxn thread!!!!\n");

	if(pthread_create(&writerLowThread, &attr, writerLowThrFxn, &writerLowRateEnv)) {
		ERR("Failed to create writerResize thread\n");
		cleanup(EXIT_FAILURE);
	}

	initMask |= WRITELOWRATETHREAD;

	/* Set the thread priority */
	schedParam.sched_priority = AUDIO_THREAD_PRIORITY;

	if(pthread_attr_setschedparam(&attr, &schedParam)) {
		ERR("Failed to set scheduler parameters\n");
		cleanup(EXIT_FAILURE);
	}

	DEBUG(DL_DEBUG, "Audio thread Function!!!!\n");
	/* Create the audio thread */
	audioEnv.hRendezvousInit    = hRendezvousInit;
	audioEnv.hRendezvousCleanup = hRendezvousCleanup;
	audioEnv.engineName         = engine->engineName;
	audioEnv.audioEncoder       = engine->audioEncoders->codecName;

	if(pthread_create(&audioThread, &attr, audioThrFxn, &audioEnv)) {
		ERR("Failed to create speech thread\n");
		cleanup(EXIT_FAILURE);
	}

	initMask |= AUDIOTHREADCREATED;
#ifdef DSS_ENC_1100_1200

	if(pthread_create(&webListenThread, &attr, weblistenThrFxn, NULL)) {
		ERR("Failed to create web listen thread\n");
		cleanup(EXIT_FAILURE);
	}

	initMask |= WEBLISTENCREATED;
#endif
	/* Main thread becomes the control thread */
	ctrlEnv.hRendezvousInit    = hRendezvousInit;
	ctrlEnv.hRendezvousCleanup = hRendezvousCleanup;
	ctrlEnv.engineName         = engine->engineName;
	ret = ctrlThrFxn(&ctrlEnv);

	if(ret == THREAD_FAILURE) {
		status = EXIT_FAILURE;
	}

	DEBUG(DL_DEBUG, "Exit All Thread!!\n");
cleanup:

	/* Make sure the other threads aren't waiting for init to complete */
	if(hRendezvousWriter) {
		Rendezvous_force(hRendezvousWriter);
	}

	if(hRendezvousInit) {
		Rendezvous_force(hRendezvousInit);
	}

	DEBUG(DL_DEBUG, "EXIT Common Mutex!!!\n");
	DestorygblCommonMutex();
	DEBUG(DL_DEBUG, "EXIT pthread Mutex!!!\n");
	DestroyMutexPthread();

	if(initMask & AUDIOTHREADCREATED) {
		if(pthread_join(audioThread, &ret) == 0) {
			if(ret == THREAD_FAILURE) {
				status = EXIT_FAILURE;
			}
		}
	}

	DEBUG(DL_DEBUG, "EXIT audio pThread!!!\n");

	if(initMask & VIDEOTHREADCREATED) {
		if(pthread_join(videoThread, &ret) == 0) {
			if(ret == THREAD_FAILURE) {
				status = EXIT_FAILURE;
			}
		}
	}

	DEBUG(DL_DEBUG, "EXIT video pThread!!!\n");

	if(initMask & WRITERTHREADCREATED) {
		if(pthread_join(writerThread, &ret) == 0) {
			if(ret == THREAD_FAILURE) {
				status = EXIT_FAILURE;
			}
		}
	}

	DEBUG(DL_DEBUG, "EXIT write pThread!!!\n");

	if(initMask & CAPTURETHREADCREATED) {
		if(pthread_join(captureThread, &ret) == 0) {
			if(ret == THREAD_FAILURE) {
				status = EXIT_FAILURE;
			}
		}
	}

	DEBUG(DL_DEBUG, "EXIT capture pThread!!!\n");

	if(initMask & VIDEOLOWRATETHREAD) {
		if(pthread_join(videoLowThread, &ret) == 0) {
			if(ret == THREAD_FAILURE) {
				status = EXIT_FAILURE;
			}
		}
	}

	if(initMask & RESIZELOWRATETHREAD) {
		if(pthread_join(resizeLowThread, &ret) == 0) {
			if(ret == THREAD_FAILURE) {
				status = EXIT_FAILURE;
			}
		}
	}

	if(initMask & WRITELOWRATETHREAD) {
		if(pthread_join(writerLowThread, &ret) == 0) {
			if(ret == THREAD_FAILURE) {
				status = EXIT_FAILURE;
			}
		}
	}

	if(pthread_join(id_listen[PORT_ONE], &ret) == 0) {
		if(ret == THREAD_FAILURE) {
			status = EXIT_FAILURE;
		}
	}

	if(captureEnv.to_video_c) {
		Fifo_delete(captureEnv.to_video_c);
	}

	if(captureEnv.from_video_c) {
		Fifo_delete(captureEnv.from_video_c);
	}

	if(captureEnv.to_resize_c) {
		Fifo_delete(captureEnv.to_resize_c);
	}

	if(captureEnv.from_resize_c) {
		Fifo_delete(captureEnv.from_resize_c);
	}

	if(writerEnv.to_video_c) {
		Fifo_delete(writerEnv.to_video_c);
	}

	if(writerEnv.from_video_c) {
		Fifo_delete(writerEnv.from_video_c);
	}

	if(writerLowRateEnv.from_writelow_c) {
		Fifo_delete(writerLowRateEnv.from_video_c);
	}

	if(writerLowRateEnv.to_writelow_c) {
		Fifo_delete(writerLowRateEnv.to_writelow_c);
	}

	if(LowRateResize.to_videoresize_c) {
		Fifo_delete(LowRateResize.to_videoresize_c);
	}

	if(LowRateResize.from_videoresize_c) {
		Fifo_delete(LowRateResize.from_videoresize_c);
	}

	DEBUG(DL_DEBUG, "EXIT Rendezvous cleanup pThread!!!\n");

	if(hRendezvousCleanup) {
		Rendezvous_delete(hRendezvousCleanup);
	}

	DEBUG(DL_DEBUG, "EXIT Rendezvous init pThread!!!\n");

	if(hRendezvousInit) {
		Rendezvous_delete(hRendezvousInit);
	}

	DEBUG(DL_DEBUG, "EXIT Rendezvous cleanup pThread!!!\n");
	/*
	    if (initMask & LOGSINITIALIZED) {
	        TraceUtil_stop();
		}	*/
	DEBUG(DL_DEBUG, "EXIT TraceUtil_stop !!!\n");
	pthread_mutex_destroy(&gbl.mutex);
	DEBUG(DL_DEBUG, "process EXIT!!!\n");
	exit(1);
}
Beispiel #15
0
Void *videoThrFxn(Void *arg)
{
	VideoEnv *envp = (VideoEnv *) arg;

	Venc1_Handle hVe1 = NULL;
	VIDENC1_Params params = Venc1_Params_DEFAULT;
	VIDENC1_DynamicParams dynParams = Venc1_DynamicParams_DEFAULT;
	IH264VENC_Params h264Params = IH264VENC_PARAMS;
	IH264VENC_DynamicParams h264DynParams = H264VENC_TI_IH264VENC_DYNAMICPARAMS;
	VUIParamBuffer VUI_Buffer = H264VENC_TI_VUIPARAMBUFFER;

	BufTab_Handle hVidBufTab = NULL;
	Buffer_Handle hVInBuf, hWOutBuf;
	BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT;

	ColorSpace_Type colorSpace = ColorSpace_YUV420PSEMI;

	Int bufSize = 0;

	Void *status = THREAD_SUCCESS;

	/* Initialization */
	params.maxWidth = envp->imageWidth;
	params.maxHeight = envp->imageHeight;
	params.inputChromaFormat = XDM_YUV_420SP;
	params.reconChromaFormat = XDM_YUV_420SP;
	params.maxFrameRate = envp->videoFrameRate;
	params.encodingPreset = XDM_USER_DEFINED;
	params.rateControlPreset = IVIDEO_USER_DEFINED;
	params.maxBitRate = 10000000;

	dynParams.targetBitRate = envp->videoBitRate*0.9;
	dynParams.inputWidth = envp->imageWidth;
	dynParams.captureWidth = Dmai_roundUp(BufferGfx_calcLineLength(envp->imageWidth, colorSpace), 32);
	dynParams.inputHeight = envp->imageHeight;
	dynParams.refFrameRate = params.maxFrameRate;
	dynParams.targetFrameRate = params.maxFrameRate;
	dynParams.intraFrameInterval = 0;
	dynParams.interFrameInterval = 0;

	h264Params.videncParams = params;
	h264Params.videncParams.size = sizeof(IH264VENC_Params);
	h264Params.encQuality = 1;
	h264Params.enableDDRbuff = 1; /* Uses DDR instead of VICP buffers */
	h264Params.enableARM926Tcm = 0;
	h264Params.enableVUIparams = (0x1 << 1);
	h264Params.videncParams.inputContentType = IVIDEO_PROGRESSIVE;

	h264DynParams.videncDynamicParams = dynParams;
	h264DynParams.videncDynamicParams.size = sizeof(IH264VENC_DynamicParams);

	h264DynParams.VUI_Buffer = &VUI_Buffer;
	h264DynParams.VUI_Buffer->aspectRatioInfoPresentFlag = 1;
	h264DynParams.VUI_Buffer->overscanInfoPresentFlag = 0;
	h264DynParams.VUI_Buffer->videoSignalTypePresentFlag = 0;
	h264DynParams.VUI_Buffer->timingInfoPresentFlag = 1;
	h264DynParams.VUI_Buffer->numUnitsInTicks = 1;
	h264DynParams.VUI_Buffer->timeScale = params.maxFrameRate / 1000;
	h264DynParams.VUI_Buffer->fixedFrameRateFlag = 1; 
	h264DynParams.VUI_Buffer->nalHrdParameterspresentFlag = 1;
	h264DynParams.VUI_Buffer->picStructPresentFlag = 1;

	h264DynParams.idrFrameInterval = 15;

	hVe1 = Venc1_create(envp->hEngine, envp->videoEncoder,
			(IVIDENC1_Params *) &h264Params,
			(IVIDENC1_DynamicParams *) &h264DynParams);
	if (hVe1 == NULL) {
		ERR("Failed to create video encoder: %s\n", envp->videoEncoder);
		cleanup(THREAD_FAILURE);
	}

	/* Store the output buffer size in the environment */
	envp->outBufSize = Venc1_getOutBufSize(hVe1);

	/* Signal that the codec is created and output buffer size available */
	Rendezvous_meet(envp->hRendezvousWriter);

	/* Video BufTab create */
	BufferGfx_calcDimensions(VideoStd_D1_PAL, colorSpace, &gfxAttrs.dim);
	gfxAttrs.dim.width = 704;
	gfxAttrs.dim.height = 576;
	gfxAttrs.dim.lineLength = Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width, colorSpace), 32);
	gfxAttrs.colorSpace = colorSpace;
	bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2;
	hVidBufTab = BufTab_create(NUM_VIDEO_BUFS, bufSize, BufferGfx_getBufferAttrs(&gfxAttrs));
	if (hVidBufTab == NULL) {
		ERR("Failed to create video buftab\n");
		cleanup(THREAD_FAILURE);
	}

	/* Set input buffer table */
    Venc1_setBufTab(hVe1, hVidBufTab);

	/* Send video buffers to DEI */
	Int nBufId = 0;
	for (nBufId = 0; nBufId < NUM_VIDEO_BUFS; nBufId++) {
		hVInBuf = BufTab_getBuf(hVidBufTab, nBufId);
		if (Fifo_put(envp->hVideoOutFifo, hVInBuf) < 0) {
			ERR("Failed to send buffer to dei thread\n");
			cleanup(THREAD_FAILURE);
		}
	}

	/* Signal that initialization is done and wait for other threads */
	Rendezvous_meet(envp->hRendezvousInit);

	while(1) {

		/* Get buffer from DEI thread */
		if(Fifo_get(envp->hVideoInFifo, &hVInBuf) < 0) {
			ERR("Failed to get buffer from dei thread\n");
			cleanup(THREAD_FAILURE);
		}

		/* Get buffer from Writer thread */
		if(Fifo_get(envp->hWriterOutFifo, &hWOutBuf) < 0) {
			ERR("Failed to get buffer from writer thread\n");
			cleanup(THREAD_FAILURE);
		}

		/* Make sure the whole buffer is used for input */
		BufferGfx_resetDimensions(hVInBuf);

		/* Encode */
		if (Venc1_process(hVe1, hVInBuf, hWOutBuf) < 0) {
			ERR("Failed to encode video buffer\n");
			cleanup(THREAD_FAILURE);
		}

		/* Put buffer to dei thread */
		if (Fifo_put(envp->hVideoOutFifo, hVInBuf) < 0) {
			ERR("Failed to send buffer to dei thread\n");
			cleanup(THREAD_FAILURE);
		}

		/* Put buffer to writer thread */
		if (Fifo_put(envp->hWriterInFifo, hWOutBuf) < 0) {
			ERR("Failed to send buffer to dei thread\n");
			cleanup(THREAD_FAILURE);
		}

	}

cleanup:

	/* Make sure the other threads aren't waiting for us */
	Rendezvous_force(envp->hRendezvousInit);
	Rendezvous_force(envp->hRendezvousWriter);

	/* Make sure the other threads aren't waiting for init to complete */
	Rendezvous_meet(envp->hRendezvousCleanup);

	if (hVidBufTab) {
		BufTab_delete(hVidBufTab);
	}

	if (hVe1) {
		Venc1_delete(hVe1);
	}

	return status;
}
Beispiel #16
0
/******************************************************************************
 * gst_tiaudenc1_encode_thread
 *     Call the audio codec to process a full input buffer
 ******************************************************************************/
static void* gst_tiaudenc1_encode_thread(void *arg)
{
    GstTIAudenc1   *audenc1    = GST_TIAUDENC1(gst_object_ref(arg));
    void          *threadRet = GstTIThreadSuccess;
    Buffer_Handle  hDstBuf;
    Int32          encDataConsumed;
    GstBuffer     *encDataWindow = NULL;
    GstClockTime   encDataTime;
    Buffer_Handle  hEncDataWindow;
    GstBuffer     *outBuf;
    GstClockTime   sampleDuration;
    guint          sampleRate;
    guint          numSamples;
    Int            bufIdx;
    Int            ret;

    GST_LOG("starting audenc encode thread\n");

    /* Initialize codec engine */
    ret = gst_tiaudenc1_codec_start(audenc1);

    /* Notify main thread that it is ok to continue initialization */
    Rendezvous_meet(audenc1->waitOnEncodeThread);
    Rendezvous_reset(audenc1->waitOnEncodeThread);

    if (ret == FALSE) {
        GST_ELEMENT_ERROR(audenc1, RESOURCE, FAILED,
        ("Failed to start codec\n"), (NULL));
        goto thread_exit;
    }

    while (TRUE) {

        /* Obtain an raw data frame */
        encDataWindow  = gst_ticircbuffer_get_data(audenc1->circBuf);
        encDataTime    = GST_BUFFER_TIMESTAMP(encDataWindow);
        hEncDataWindow = GST_TIDMAIBUFFERTRANSPORT_DMAIBUF(encDataWindow);

        /* Check if there is enough encoded data to be sent to the codec.
         * The last frame of data may not be sufficient to meet the codec
         * requirements for the amount of input data.  If so just throw
         * away the last bit of data rather than filling with bogus
         * data.
         */
        if (GST_BUFFER_SIZE(encDataWindow) <
            Aenc1_getInBufSize(audenc1->hAe)) {
            GST_LOG("Not enough audio data remains\n");
            if (!audenc1->drainingEOS) {
                goto thread_failure;
            }
            goto thread_exit;
        }

        /* Obtain a free output buffer for the encoded data */
        if (!(hDstBuf = gst_tidmaibuftab_get_buf(audenc1->hOutBufTab))) {
            GST_ELEMENT_ERROR(audenc1, RESOURCE, READ,
                ("Failed to get a free contiguous buffer from BufTab\n"),
                (NULL));
            goto thread_exit;
        }

        /* Invoke the audio encoder */
        GST_LOG("Invoking the audio encoder at 0x%08lx with %u bytes\n",
            (unsigned long)Buffer_getUserPtr(hEncDataWindow),
            GST_BUFFER_SIZE(encDataWindow));
        ret             = Aenc1_process(audenc1->hAe, hEncDataWindow, hDstBuf);
        encDataConsumed = Buffer_getNumBytesUsed(hEncDataWindow);

        if (ret < 0) {
            GST_ELEMENT_ERROR(audenc1, STREAM, ENCODE,
            ("Failed to encode audio buffer\n"), (NULL));
            goto thread_failure;
        }

        /* If no encoded data was used we cannot find the next frame */
        if (ret == Dmai_EBITERROR && encDataConsumed == 0) {
            GST_ELEMENT_ERROR(audenc1, STREAM, ENCODE,
            ("Fatal bit error\n"), (NULL));
            goto thread_failure;
        }

        if (ret > 0) {
            GST_LOG("Aenc1_process returned success code %d\n", ret); 
        }

        sampleRate     = audenc1->samplefreq;
        numSamples     = encDataConsumed / (2 * audenc1->channels) ;
        sampleDuration = GST_FRAMES_TO_CLOCK_TIME(numSamples, sampleRate);

        /* Release the reference buffer, and tell the circular buffer how much
         * data was consumed.
         */
        ret = gst_ticircbuffer_data_consumed(audenc1->circBuf, encDataWindow,
                  encDataConsumed);
        encDataWindow = NULL;

        if (!ret) {
            goto thread_failure;
        }

        /* Set the source pad capabilities based on the encoded frame
         * properties.
         */
        gst_tiaudenc1_set_source_caps(audenc1);

        /* Create a DMAI transport buffer object to carry a DMAI buffer to
         * the source pad.  The transport buffer knows how to release the
         * buffer for re-use in this element when the source pad calls
         * gst_buffer_unref().
         */
        outBuf = gst_tidmaibuffertransport_new(hDstBuf, audenc1->hOutBufTab, NULL, NULL);
        gst_buffer_set_data(outBuf, GST_BUFFER_DATA(outBuf),
            Buffer_getNumBytesUsed(hDstBuf));
        gst_buffer_set_caps(outBuf, GST_PAD_CAPS(audenc1->srcpad));

        /* Set timestamp on output buffer */
        if (audenc1->genTimeStamps) {
            GST_BUFFER_DURATION(outBuf)     = sampleDuration;
            GST_BUFFER_TIMESTAMP(outBuf)    = encDataTime;
        }
        else {
            GST_BUFFER_TIMESTAMP(outBuf)    = GST_CLOCK_TIME_NONE;
        }

        /* Tell circular buffer how much time we consumed */
        gst_ticircbuffer_time_consumed(audenc1->circBuf, sampleDuration);

        /* Push the transport buffer to the source pad */
        GST_LOG("pushing buffer to source pad with timestamp : %"
                GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT,
                GST_TIME_ARGS (GST_BUFFER_TIMESTAMP(outBuf)),
                GST_TIME_ARGS (GST_BUFFER_DURATION(outBuf)));

        if (gst_pad_push(audenc1->srcpad, outBuf) != GST_FLOW_OK) {
            GST_DEBUG("push to source pad failed\n");
            goto thread_failure;
        }

        /* Release buffers no longer in use by the codec */
        Buffer_freeUseMask(hDstBuf, gst_tidmaibuffer_CODEC_FREE);
    }

thread_failure:

    gst_tithread_set_status(audenc1, TIThread_CODEC_ABORTED);
    gst_ticircbuffer_consumer_aborted(audenc1->circBuf);
    threadRet = GstTIThreadFailure;

thread_exit:

    /* Re-claim any buffers owned by the codec */
    bufIdx = BufTab_getNumBufs(GST_TIDMAIBUFTAB_BUFTAB(audenc1->hOutBufTab));

    while (bufIdx-- > 0) {
        Buffer_Handle hBuf = BufTab_getBuf(
            GST_TIDMAIBUFTAB_BUFTAB(audenc1->hOutBufTab), bufIdx);
        Buffer_freeUseMask(hBuf, gst_tidmaibuffer_CODEC_FREE);
    }

    /* Release the last buffer we retrieved from the circular buffer */
    if (encDataWindow) {
        gst_ticircbuffer_data_consumed(audenc1->circBuf, encDataWindow, 0);
    }

    /* We have to wait to shut down this thread until we can guarantee that
     * no more input buffers will be queued into the circular buffer
     * (we're about to delete it).  
     */
    Rendezvous_meet(audenc1->waitOnEncodeThread);
    Rendezvous_reset(audenc1->waitOnEncodeThread);

    /* Notify main thread that we are done draining before we shutdown the
     * codec, or we will hang.  We proceed in this order so the EOS event gets
     * propagated downstream before we attempt to shut down the codec.  The
     * codec-shutdown process will block until all BufTab buffers have been
     * released, and downstream-elements may hang on to buffers until
     * they get the EOS.
     */
    Rendezvous_force(audenc1->waitOnEncodeDrain);

    /* Initialize codec engine */
    if (gst_tiaudenc1_codec_stop(audenc1) < 0) {
        GST_ERROR("failed to stop codec\n");
        GST_ELEMENT_ERROR(audenc1, RESOURCE, FAILED,
        ("Failed to stop codec\n"), (NULL));
    }

    gst_object_unref(audenc1);

    GST_LOG("exit audio encode_thread (%d)\n", (int)threadRet);
    return threadRet;
}