Esempio n. 1
0
/******************************************************************************
 * Sound_alsa_read
 ******************************************************************************/
Int Sound_alsa_read(Sound_Handle hSound, Buffer_Handle hBuf)
{
    Int32 numSamples, readSamples;
    Int8 *bufPtr;

    assert(hSound);
    assert(hBuf);

    readSamples = Buffer_getNumBytesUsed(hBuf) / (2 * hSound->channels);
    bufPtr = Buffer_getUserPtr(hBuf);

    while (readSamples > 0) {
        numSamples = snd_pcm_readi(hSound->rcIn, bufPtr, readSamples);

        if (numSamples == -EAGAIN) 
            continue;

        if (numSamples < 0) {
            if (xrunRecovery(hSound->rcIn,numSamples) < 0) {
                Dmai_err2 ("Failed to read from %s (%s)\n",
                            AUDIO_DEVICE, strerror(numSamples));
                return Dmai_EFAIL;
            }
        }
        else {
            bufPtr += numSamples * 2 * hSound->channels;
            readSamples -= numSamples;
        }
    }

    Buffer_setNumBytesUsed(hBuf, Buffer_getSize(hBuf));

    return Dmai_EOK;
}
Esempio n. 2
0
static gboolean gstti_viddec_process(GstTIDmaidec *dmaidec, GstBuffer *encData,
                    Buffer_Handle hDstBuf,gboolean codecFlushed){
    GstTIDmaidecData *decoder;
    Buffer_Handle   hEncData = NULL;
    Int32           encDataConsumed, originalBufferSize;
    Int             ret;

    decoder = (GstTIDmaidecData *)
       g_type_get_qdata(G_OBJECT_CLASS_TYPE(G_OBJECT_GET_CLASS(dmaidec)),
       GST_TIDMAIDEC_PARAMS_QDATA);

    hEncData = GST_TIDMAIBUFFERTRANSPORT_DMAIBUF(encData);
    g_assert(hEncData != NULL);

    /* Make sure the whole buffer is used for output */
    BufferGfx_resetDimensions(hDstBuf);

    /* Invoke the video decoder */
    originalBufferSize = Buffer_getNumBytesUsed(hEncData);
    GST_DEBUG("invoking the video decoder, with %ld bytes (%p, %p)\n",originalBufferSize,
        Buffer_getUserPtr(hEncData),Buffer_getUserPtr(hDstBuf));
    ret = Vdec_process(dmaidec->hCodec, hEncData, hDstBuf);
    encDataConsumed = (codecFlushed) ? 0 :
        Buffer_getNumBytesUsed(hEncData);

    if (ret < 0) {
        GST_ELEMENT_ERROR(dmaidec,STREAM,DECODE,(NULL),
            ("failed to decode video buffer"));
        return FALSE;
    }

    if (ret == Dmai_EBITERROR){
        GST_ELEMENT_WARNING(dmaidec,STREAM,DECODE,(NULL),
            ("Unable to decode frame with timestamp %"GST_TIME_FORMAT,
                GST_TIME_ARGS(GST_BUFFER_TIMESTAMP(encData))));
        /* We failed to process this buffer, so we need to release it
               because the codec won't do it.
         */
        GST_DEBUG("Freeing buffer because of bit error on the stream");
        Buffer_freeUseMask(hDstBuf, gst_tidmaibuffertransport_GST_FREE |
           decoder->dops->outputUseMask);
        return FALSE;
    }

    return TRUE;
}
/******************************************************************************
 * Framecopy_resizer_accel_execute
 ******************************************************************************/
Int Framecopy_resizer_accel_execute(Framecopy_Handle hFc,
                                    Buffer_Handle hSrcBuf,
                                    Buffer_Handle hDstBuf)
{
#if defined(CONFIG_VIDEO_OMAP34XX_ISP_RESIZER)
    Int         i;
    struct      v4l2_buffer qbuf[2];

    assert(hFc);
    assert(hSrcBuf);
    assert(hDstBuf);

    /* Pointers must be a multiple of 32 bytes */
    assert((Buffer_getPhysicalPtr(hDstBuf) & 0x1F) == 0);
    assert((Buffer_getPhysicalPtr(hSrcBuf) & 0x1F) == 0);

    /* Queue the resizer buffers */
    for (i=0; i < 2; i++) { 

        qbuf[i].type         = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        qbuf[i].memory       = V4L2_MEMORY_USERPTR;
        qbuf[i].index        = i;

        if (ioctl (hFc->fd, RSZ_QUERYBUF, &qbuf[i]) == -1) {
            Dmai_err1("Failed to query buffer index %d\n", i);
            return Dmai_EFAIL;
        }

        if (i == 0) {
            qbuf[i].m.userptr = (unsigned long) Buffer_getUserPtr(hSrcBuf);
        }
        else {
            qbuf[i].m.userptr = (unsigned long) Buffer_getUserPtr(hDstBuf);
        }

        if (ioctl (hFc->fd, RSZ_QUEUEBUF, &qbuf[i]) == -1) {
            Dmai_err1("Failed to queue buffer index %d\n",i);
            return Dmai_EFAIL;
        }
    }

    if (ioctl(hFc->fd, RSZ_RESIZE, NULL) == -1) {
        Dmai_err0("Failed to execute resize job\n");
        return Dmai_EFAIL;
    }

    Buffer_setNumBytesUsed(hDstBuf, Buffer_getNumBytesUsed(hSrcBuf));

    return Dmai_EOK;
#else
    Dmai_err0("not implemented\n");
    return Dmai_ENOTIMPL;
#endif /* end CONFIG_VIDEO_OMAP34XX_ISP_RESIZER */
}
Esempio n. 4
0
/******************************************************************************
 * Resize_execute
 ******************************************************************************/
Int Resize_execute(Resize_Handle hResize,
                   Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf)
{
#ifdef CONFIG_DM365_IPIPE
    struct imp_convert  rsz;
    BufferGfx_Dimensions srcDim;
    BufferGfx_Dimensions dstDim;
    UInt32               srcOffset;
    UInt32               dstOffset;
    
    assert(hResize);
    assert(hSrcBuf);
    assert(hDstBuf);

    Dmai_clear(rsz);

    BufferGfx_getDimensions(hSrcBuf, &srcDim);
    BufferGfx_getDimensions(hDstBuf, &dstDim);

    srcOffset = srcDim.y * srcDim.lineLength + (srcDim.x << 1);
    dstOffset = dstDim.y * dstDim.lineLength + (dstDim.x << 1);

    rsz.in_buff.index     = -1;
    rsz.in_buff.buf_type  = IMP_BUF_IN;
    rsz.in_buff.offset    = (UInt32) Buffer_getUserPtr(hSrcBuf) + srcOffset;
    rsz.in_buff.size      = Buffer_getSize(hSrcBuf);

    rsz.out_buff1.index    = -1;
    rsz.out_buff1.buf_type = IMP_BUF_OUT1;
    rsz.out_buff1.offset   = (UInt32) Buffer_getUserPtr(hDstBuf) + dstOffset;
    rsz.out_buff1.size     = Buffer_getSize(hDstBuf);
    
    /* 
     * The IPIPE requires that the memory offsets of the input and output
     * buffers start on 32-byte boundaries.
     */
    assert((rsz.in_buff.offset  & 0x1F) == 0);
    assert((rsz.out_buff1.offset & 0x1F) == 0);

    /* Start IPIPE operation */
    if (ioctl(hResize->fd, RSZ_RESIZE, &rsz) == -1) {
        Dmai_err0("Failed RSZ_RESIZE\n");
        return Dmai_EFAIL;
    }

    Buffer_setNumBytesUsed(hDstBuf, Buffer_getNumBytesUsed(hSrcBuf));
    return Dmai_EOK;
#else
    Dmai_err0("not implemented\n");
    return Dmai_ENOTIMPL;
#endif
}
Esempio n. 5
0
/******************************************************************************
 * writeFrame
******************************************************************************/
static Int writeFrame(Buffer_Handle hBuf, FILE *outFile)
{
    Int8 *ptr = Buffer_getUserPtr(hBuf);
    Int size;

    size = Buffer_getNumBytesUsed(hBuf);

    if (fwrite(ptr, size, 1, outFile) != 1) {
        fprintf(stderr,"Failed to write data to disk\n");
        return -1;
    }

    printf("Wrote audio frame size %d to disk\n", size);

    return 0;
}
Esempio n. 6
0
/******************************************************************************
 * Sound_alsa_write
 ******************************************************************************/
Int Sound_alsa_write(Sound_Handle hSound, Buffer_Handle hBuf)
{
    Int32 numSamples, writeSamples;
    Int8 *bufPtr;

    assert(hSound);
    assert(hBuf);

    writeSamples = Buffer_getNumBytesUsed(hBuf) / (2 * hSound->channels);
    bufPtr = Buffer_getUserPtr(hBuf);

    while (writeSamples > 0) {
        /* start by doing a blocking wait for free space. */
        snd_pcm_wait (hSound->rcOut, PCM_TIMEOUT);
        numSamples = snd_pcm_writei(hSound->rcOut, bufPtr, writeSamples);

        if (numSamples == -EAGAIN) 
            continue;

        if (numSamples < 0) {
            if (xrunRecovery(hSound->rcOut,numSamples) < 0) {
                Dmai_err2 ("Failed to write to %s (%s)\n",
                            AUDIO_DEVICE, strerror(-numSamples));
                return Dmai_EFAIL;
            }
        }
        else {
            bufPtr += numSamples * 2 * hSound->channels;
            writeSamples -= numSamples;
        }
    }

    Buffer_setNumBytesUsed(hBuf, Buffer_getSize(hBuf));

    return Dmai_EOK;
}
Esempio n. 7
0
/******************************************************************************
 * Idec1_process
 ******************************************************************************/
Int Idec1_process(Idec1_Handle hId, Buffer_Handle hInBuf,
                 Buffer_Handle hOutBuf)
{
    BufferGfx_Dimensions    dim;
    IMGDEC1_DynamicParams   dynParams;
    IMGDEC1_InArgs          inArgs;
    IMGDEC1_OutArgs         outArgs;
    IMGDEC1_Status          decStatus;
    XDM1_BufDesc            inBufDesc;
    XDM1_BufDesc            outBufDesc;
    XDAS_Int32              status;
    XDAS_Int8 *             inPtr;
    XDAS_Int8 *             outPtr;
    UInt32                  offset = 0;
    UInt32                  i;
    
    assert(hId);
    assert(hInBuf);
    assert(hOutBuf);
    assert(Buffer_getSize(hInBuf));   
    assert(Buffer_getUserPtr(hInBuf)); 
    assert(Buffer_getUserPtr(hOutBuf));
    assert(Buffer_getNumBytesUsed(hInBuf));
    assert(Buffer_getSize(hOutBuf));
    assert(Buffer_getType(hOutBuf) == Buffer_Type_GRAPHICS);

    BufferGfx_getDimensions(hOutBuf, &dim);
    
    inPtr  = Buffer_getUserPtr(hInBuf);
    outPtr = Buffer_getUserPtr(hOutBuf);

    inArgs.size             = sizeof(IMGDEC1_InArgs);
    inArgs.numBytes         = Buffer_getNumBytesUsed(hInBuf);

    outArgs.size            = sizeof(IMGDEC1_OutArgs);

    inBufDesc.numBufs       = 1;
    outBufDesc.numBufs      = hId->minNumOutBufs;
    
    inBufDesc.descs[0].buf = inPtr;
    inBufDesc.descs[0].bufSize = Buffer_getSize(hInBuf);

    for(i = 0; i < hId->minNumOutBufs; i++)
    {
        outBufDesc.descs[i].buf = (XDAS_Int8 *)((unsigned int)outPtr + offset);
        offset +=  hId->minOutBufSize[i];
        outBufDesc.descs[i].bufSize = hId->minOutBufSize[i];
    }   
        
    /* Decode image buffer */
    status = IMGDEC1_process(hId->hDecode, &inBufDesc, &outBufDesc, &inArgs,
                            &outArgs);

    Buffer_setNumBytesUsed(hInBuf, outArgs.bytesConsumed);

    if (status != IMGDEC1_EOK) {
        if (XDM_ISFATALERROR(outArgs.extendedError)) {
            Dmai_err2("IMGDEC1_process() failed with error (%d ext: 0x%x)\n",
                      (Int)status, (Uns) outArgs.extendedError);
            return Dmai_EFAIL;
        }
        else {
            Dmai_dbg1("IMGDEC1_process() non-fatal error 0x%x\n",
                      (Uns) outArgs.extendedError);
            return Dmai_EBITERROR;
        }
    }

    /* Get the dynamic codec status */
    decStatus.data.buf = NULL;
    decStatus.size = sizeof(IMGDEC1_Status);
    dynParams.size = sizeof(IMGDEC1_DynamicParams);
    status = IMGDEC1_control(hId->hDecode, XDM_GETSTATUS, &dynParams,
                            &decStatus);

    if (status != IMGDEC1_EOK) {
        Dmai_err1("XDM_GETSTATUS failed, status=%d\n", status);
        return Dmai_EFAIL;
    }

    /* Set output Color Format */
    switch (decStatus.outputChromaFormat) {
        case XDM_YUV_422ILE:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_UYVY); 
            break;
        case XDM_YUV_420P:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_YUV420P); 
            break;
        case XDM_YUV_422P:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_YUV422P); 
            break;
        case XDM_YUV_444P:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_YUV444P); 
            break;
        case XDM_GRAY:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_GRAY); 
            break;
        default:
            printf("Unsupported output color space.\n");
            return Dmai_EFAIL;        
    }
       
    dim.x = dim.y = 0;
    dim.width = decStatus.outputWidth;
    dim.height = decStatus.outputHeight;
    dim.lineLength = decStatus.outputWidth *
                      ColorSpace_getBpp(BufferGfx_getColorSpace(hOutBuf)) / 8;
    
    if (BufferGfx_setDimensions(hOutBuf, &dim) < 0) {
        Dmai_err0("Frame does not fit in allocated buffer\n");
        return Dmai_EFAIL;
    }

    return Dmai_EOK;
}
Esempio n. 8
0
/******************************************************************************
 * speechThrFxn
 ******************************************************************************/
Void *speechThrFxn(Void *arg)
{
    SpeechEnv              *envp                = (SpeechEnv *) arg;
    SPHDEC1_Params          defaultParams       = Sdec1_Params_DEFAULT;
    SPHDEC1_DynamicParams   defaultDynParams    = Sdec1_DynamicParams_DEFAULT;
    Void                   *status              = THREAD_SUCCESS;
    Sound_Attrs             sAttrs              = Sound_Attrs_MONO_DEFAULT;
    Loader_Attrs            lAttrs              = Loader_Attrs_DEFAULT;
    Buffer_Attrs            bAttrs              = Buffer_Attrs_DEFAULT;
    Sdec1_Handle            hSd1                = NULL;
    Sound_Handle            hSound              = NULL;
    Loader_Handle           hLoader             = NULL;
    Engine_Handle           hEngine             = NULL;
    Buffer_Handle           hOutBuf             = NULL;
    SPHDEC1_Params         *params;
    SPHDEC1_DynamicParams  *dynParams;
    Buffer_Handle           hInBuf;

    /* Open the codec engine */
    hEngine = Engine_open(envp->engineName, NULL, NULL);

    if (hEngine == NULL) {
        ERR("Failed to open codec engine %s\n", envp->engineName);
        cleanup(THREAD_FAILURE);
    }

    /* Create the sound device */
    sAttrs.sampleRate = 8000;
    sAttrs.mode = Sound_Mode_OUTPUT;
    sAttrs.leftGain  = 127;
    sAttrs.rightGain = 127;
    sAttrs.bufSize   = 128;
    hSound = Sound_create(&sAttrs);

    if (hSound == NULL) {
        ERR("Failed to create audio device\n");
        cleanup(THREAD_FAILURE);
    }

    /* Set the sample rate for the user interface */
    gblSetSamplingFrequency(sAttrs.sampleRate);

    /* Use supplied params if any, otherwise use defaults */
    params = envp->params ? envp->params : &defaultParams;
    dynParams = envp->dynParams ? envp->dynParams : &defaultDynParams;

    /* Create the speech decoder */
    hSd1 = Sdec1_create(hEngine, envp->speechDecoder, params, dynParams);

    if (hSd1 == NULL) {
        ERR("Failed to create speech decoder: %s\n", envp->speechDecoder);
        cleanup(THREAD_FAILURE);
    }

    /*
     * Make the output buffer size twice the size of what the codec needs
     * as the codec needs mono and the Sound module converts the decoded
     * mono samples to stereo before writing to the device driver.
     */
    hOutBuf = Buffer_create(OUTBUFSIZE, &bAttrs);

    if (hOutBuf == NULL) {
        ERR("Failed to allocate output buffer\n");
        cleanup(THREAD_FAILURE);
    }

    /* How much encoded data to feed the codec each process call */
    lAttrs.readSize = INBUFSIZE;

    /* Make the total ring buffer larger */
    lAttrs.readBufSize = lAttrs.readSize * 512;

    /* Create the file loader for reading encoded data */
    hLoader = Loader_create(envp->speechFile, &lAttrs);

    if (hLoader == NULL) {
        ERR("Failed to create loader\n");
        cleanup(THREAD_FAILURE);
    }

    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    /* Prime the file loader */
    Loader_prime(hLoader, &hInBuf);

    while (!gblGetQuit()) {
        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Decode the audio buffer */
        if (Sdec1_process(hSd1, hInBuf, hOutBuf) < 0) {
            ERR("Failed to decode audio buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Increment statistics for user interface */
        gblIncSoundBytesProcessed(Buffer_getNumBytesUsed(hInBuf));

        /*
         * Force the output buffer size since we are forcing the size of the
         * output buffer allocated as opposed to asking the codec for a size.
         */
        Buffer_setNumBytesUsed(hOutBuf, OUTBUFSIZE);

        /* Write the decoded samples to the sound device */
        if (Sound_write(hSound, hOutBuf) < 0) {
            ERR("Failed to write audio buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Load a new frame from the file system */
        if (Loader_getFrame(hLoader, hInBuf) < 0) {
            ERR("Failed to read a frame of encoded data\n");
            cleanup(THREAD_FAILURE);
        }

        /* Check if the clip has ended */
        if (Buffer_getUserPtr(hInBuf) == NULL) {
            /* Wait for the video clip to finish, if applicable */
            Rendezvous_meet(envp->hRendezvousLoop);

            /* If we are to loop the clip, start over */
            if (envp->loop) {
                /* Recreate the speech codec */
                Sdec1_delete(hSd1);
                hSd1 = Sdec1_create(hEngine, envp->speechDecoder,
                                    params, dynParams);

                if (hSd1 == NULL) {
                    ERR("Failed to create speech decoder: %s\n",
                        envp->speechDecoder);
                    cleanup(THREAD_FAILURE);
                }

                /* Re-prime the file loader */
                Loader_prime(hLoader, &hInBuf);
            }
            else {
                printf("End of clip reached, exiting..\n");
                cleanup(THREAD_SUCCESS);
            }
        }
    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousInit);
    Rendezvous_force(envp->hRendezvousLoop);
    Pause_off(envp->hPauseProcess);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    /* Clean up the thread before exiting */
    if (hLoader) {
        Loader_delete(hLoader);
    }

    if (hSd1) {
        Sdec1_delete(hSd1);
    }

    if (hSound) {
        Sound_delete(hSound);
    }

    if (hOutBuf) {
        Buffer_delete(hOutBuf);
    }

    if (hEngine) {
        Engine_close(hEngine);
    }

    return status;
}
Esempio n. 9
0
File: appMain.c Progetto: sv99/DVSDK
/******************************************************************************
 * appMain
 ******************************************************************************/
Int appMain(Args * args)
{
    VIDENC1_Params         params    = Venc1_Params_DEFAULT;
    VIDENC1_DynamicParams  dynParams = Venc1_DynamicParams_DEFAULT;
    BufferGfx_Attrs        gfxAttrs  = BufferGfx_Attrs_DEFAULT;
    Buffer_Attrs           bAttrs    = Buffer_Attrs_DEFAULT;
    Time_Attrs             tAttrs    = Time_Attrs_DEFAULT;
    Venc1_Handle           hVe1      = NULL;
    FILE                  *outFile   = NULL;
    FILE                  *reconFile = NULL;
    FILE                  *inFile    = NULL;
    Engine_Handle          hEngine   = NULL;
    Time_Handle            hTime     = NULL;
    Bool                   flushed   = FALSE;
    Bool                   mustExit  = FALSE;
    BufTab_Handle          hBufTab   = NULL;
    Buffer_Handle          hOutBuf   = NULL;
    Buffer_Handle          hFreeBuf  = NULL;
    Buffer_Handle          hInBuf    = NULL;
    Buffer_Handle          hReconBuf = NULL;
    Int                    numFrame  = 0;
    Int                    flushCntr = 1;
    Int                    bufIdx;
    Int                    inBufSize, outBufSize;
    Cpu_Device             device;
    Int                    numBufs;
    ColorSpace_Type        colorSpace;
    UInt32                 time;
    Int                    ret = Dmai_EOK;

    printf("Starting application...\n");

    /* Initialize the codec engine run time */
    CERuntime_init();

    /* Initialize DMAI */
    Dmai_init();

    /* Determine which device the application is running on */
    if (Cpu_getDevice(NULL, &device) < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to determine target board\n");
        goto cleanup;
    }

    if (args->benchmark) {
        hTime = Time_create(&tAttrs);

        if (hTime == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to create Time object\n");
            goto cleanup;
        }
    }

    /* Open the input file with raw yuv data */
    inFile = fopen(args->inFile, "rb");

    if (inFile == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to open input file %s\n", args->inFile);
        goto cleanup;
    }

    /* Using a larger vbuf to enhance performance of file i/o */
    if (setvbuf(inFile, vbufferIn, _IOFBF, sizeof(vbufferIn)) != 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to setvbuf on input file descriptor\n");
        goto cleanup;
    }

    /* Open the output file where to put encoded data */
    outFile = fopen(args->outFile, "wb");

    if (outFile == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to open output file %s\n", args->outFile);
        goto cleanup;
    }

    /* Using a larger vbuf to enhance performance of file i/o */
    if (setvbuf(outFile, vbufferOut, _IOFBF, sizeof(vbufferOut)) != 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to setvbuf on output file descriptor\n");
        goto cleanup;
    }

    /* Open the output file where to put reconstructed frames */
    if (args->writeReconFrames) {
        reconFile = fopen(args->reconFile, "wb");

        if (reconFile == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to open output file %s\n", args->reconFile);
            goto cleanup;
        }

        /* Using a larger vbuf to enhance performance of file i/o */
        if (setvbuf(reconFile, vbufferRecon, _IOFBF,
                    sizeof(vbufferRecon)) != 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to setvbuf on output file descriptor\n");
            goto cleanup;
        }
    }

    /* Open the codec engine */
    hEngine = Engine_open(args->engineName, NULL, NULL);

    if (hEngine == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to open codec engine: %s\n", args->engineName);
        goto cleanup;
    }

    /* Set up codec parameters depending on bit rate */
    if (args->bitRate < 0) {
        /* Variable bit rate */
        params.rateControlPreset = IVIDEO_NONE;

        /*
         * If variable bit rate use a bogus bit rate value (> 0)
         * since it will be ignored.
         */
        params.maxBitRate        = 2000000;
    }
    else {
        /* Constant bit rate */
        params.rateControlPreset = IVIDEO_LOW_DELAY;
        params.maxBitRate        = args->bitRate;
    }

    /* Set up codec parameters depending on device */
    switch (device) {
    case Cpu_Device_DM6467:
        params.inputChromaFormat = XDM_YUV_420SP;
        params.reconChromaFormat = XDM_CHROMA_NA;
        break;
    case Cpu_Device_DM355:
        params.inputChromaFormat = XDM_YUV_422ILE;
        params.reconChromaFormat = XDM_YUV_420P;
        break;
    case Cpu_Device_DM365:
    case Cpu_Device_DM368:
        params.inputChromaFormat = XDM_YUV_420SP;
        params.reconChromaFormat = XDM_YUV_420SP;
        break;
    case Cpu_Device_DM3730:
        params.rateControlPreset = IVIDEO_STORAGE;
        params.inputChromaFormat = XDM_YUV_422ILE;
        break;
    default:
        params.inputChromaFormat = XDM_YUV_422ILE;
        break;
    }

    params.maxWidth              = args->width;
    params.maxHeight             = args->height;

    /* Workaround for SDOCM00068944: h264fhdvenc fails
       to create codec when params.dataEndianness is
       set as XDM_BYTE */
    if(device == Cpu_Device_DM6467) {
        if (!strcmp(args->codecName, "h264fhdvenc")) {
            params.dataEndianness        = XDM_LE_32;
        }
    }

    params.maxInterFrameInterval = 1;
    dynParams.targetBitRate      = params.maxBitRate;
    dynParams.inputWidth         = params.maxWidth;
    dynParams.inputHeight        = params.maxHeight;

    /* Create the video encoder */
    hVe1 = Venc1_create(hEngine, args->codecName, &params, &dynParams);

    if (hVe1 == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to create video encoder: %s\n", args->codecName);
        goto cleanup;
    }

    /* Ask the codec how much input data it needs */
    inBufSize = Venc1_getInBufSize(hVe1);

    /* Ask the codec how much space it needs for output data */
    outBufSize = Venc1_getOutBufSize(hVe1);

    /* Which color space to use in the graphics buffers depends on the device */
    colorSpace = ((device == Cpu_Device_DM6467)||
                  (device == Cpu_Device_DM365) ||
                  (device == Cpu_Device_DM368)) ? ColorSpace_YUV420PSEMI :
                 ColorSpace_UYVY;

    /* Align buffers to cache line boundary */
    gfxAttrs.bAttrs.memParams.align = bAttrs.memParams.align = BUFSIZEALIGN;

    /* Use cached buffers if requested */
    if (args->cache) {
        gfxAttrs.bAttrs.memParams.flags = bAttrs.memParams.flags
                                          = Memory_CACHED;
    }

    gfxAttrs.dim.width      = args->width;
    gfxAttrs.dim.height     = args->height;
    if ((device == Cpu_Device_DM6467) || (device == Cpu_Device_DM365)
            || (device == Cpu_Device_DM368)) {
        gfxAttrs.dim.height = Dmai_roundUp(gfxAttrs.dim.height, CODECHEIGHTALIGN);
    }
    gfxAttrs.dim.lineLength = BufferGfx_calcLineLength(args->width, colorSpace);
    gfxAttrs.colorSpace     = colorSpace;

    if (inBufSize < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to calculate buffer attributes\n");
        goto cleanup;
    }

    /* Number of input buffers required */
    if(params.maxInterFrameInterval>1) {
        /* B frame support */
        numBufs = params.maxInterFrameInterval;
    }
    else {
        numBufs = 1;
    }

    /* Create a table of input buffers of the size requested by the codec */
    hBufTab =
        BufTab_create(numBufs, Dmai_roundUp(inBufSize, BUFSIZEALIGN),
                      BufferGfx_getBufferAttrs(&gfxAttrs));

    if (hBufTab == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to allocate contiguous buffers\n");
        goto cleanup;
    }

    /* Set input buffer table */
    Venc1_setBufTab(hVe1, hBufTab);

    /* Create the reconstructed frame buffer for raw yuv data */
    if (args->writeReconFrames) {
        hReconBuf =
            Buffer_create(Dmai_roundUp(inBufSize, BUFSIZEALIGN),
                          BufferGfx_getBufferAttrs(&gfxAttrs));

        if (hReconBuf == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to allocate contiguous buffer\n");
            goto cleanup;
        }
    }

    /* Create the output buffer for encoded video data */
    hOutBuf = Buffer_create(Dmai_roundUp(outBufSize, BUFSIZEALIGN), &bAttrs);

    if (hOutBuf == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to create contiguous buffer\n");
        goto cleanup;
    }

    while (1) {

        /* Get a buffer for input */
        hInBuf = BufTab_getFreeBuf(hBufTab);

        if (hInBuf == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to get a free contiguous buffer from BufTab\n");
            BufTab_print(hBufTab);
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_reset(hTime) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to reset timer\n");
                goto cleanup;
            }
        }

        /* Read a yuv input frame */
        printf("\n Frame %d: ", numFrame);
        if ((device == Cpu_Device_DM6467)||
                (device == Cpu_Device_DM365) ||
                (device == Cpu_Device_DM368)) {
            if(args->sp) {
                if (readFrame420SP(hInBuf, inFile, args->height) < 0) {
                    ret = Dmai_EFAIL;
                    goto cleanup;
                }
            } else {
                if (readFrame420P(hInBuf, inFile, args->height) < 0) {
                    ret = Dmai_EFAIL;
                    goto cleanup;
                }
            }
        }
        else {
            if (readFrameUYVY(hInBuf, inFile) < 0) {
                ret = Dmai_EFAIL;
                mustExit = TRUE;
            }
        }

        if (++numFrame == args->numFrames||mustExit == TRUE) {
            if(!(params.maxInterFrameInterval>1)) {
                /* No B-frame support */
                printf("... exiting \n");
                goto cleanup;
            }

            /*
             * When encoding a stream with B-frames, ending the processing
             * requires to free the buffer held by the encoder. This is done by
             * flushing the encoder and performing a last process() call
             * with a dummy input buffer.
             */
            printf("\n... exiting with flush (B-frame stream) \n");
            flushCntr = params.maxInterFrameInterval-1;
            flushed = TRUE;
            Venc1_flush(hVe1);
        }

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer delta\n");
                goto cleanup;
            }

            printf("Read time: %uus\n", (Uns)time);
        }

        /*
         * Following flushing loop will iterate more than one time only
         * when the encoder completes processing by flushing the frames
         * held by the encoder. All flushed frames will be encoded as P
         * or I frames.
         */

        for(bufIdx = 0; bufIdx < flushCntr; bufIdx++) {

            if (args->cache) {
                /*
                *  To meet xDAIS DMA Rule 7, when input buffers are cached, we
                *  must writeback the cache into physical memory.  Also, per DMA
                *  Rule 7, we must invalidate the output buffer from
                *  cache before providing it to any xDAIS algorithm.
                */
                Memory_cacheWbInv(Buffer_getUserPtr(hInBuf),
                                  Buffer_getSize(hInBuf));

                /* Per DMA Rule 7, our output buffer cache lines must be cleaned */
                Memory_cacheInv(Buffer_getUserPtr(hOutBuf),
                                Buffer_getSize(hOutBuf));

                if (args->benchmark) {
                    if (Time_delta(hTime, &time) < 0) {
                        ret = Dmai_EFAIL;
                        fprintf(stderr,"Failed to get timer delta\n");
                        goto cleanup;
                    }

                    printf("Pre-process cache maintenance: %uus \n", (Uns) time);
                }
            }

            /* Make sure the whole buffer is used for input */
            BufferGfx_resetDimensions(hInBuf);

            /* Encode the video buffer */
            if (Venc1_process(hVe1, hInBuf, hOutBuf) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to encode video buffer\n");
                goto cleanup;
            }

            /* if encoder generated output content, free released buffer */
            if (Buffer_getNumBytesUsed(hOutBuf)>0) {
                /* Get free buffer */
                hFreeBuf = Venc1_getFreeBuf(hVe1);
                /* Free buffer */
                BufTab_freeBuf(hFreeBuf);
            }
            /* if encoder did not generate output content */
            else {
                /* if non B frame sequence */
                /* encoder skipped frame probably exceeding target bitrate */
                if (params.maxInterFrameInterval<=1) {
                    /* free buffer */
                    printf(" Encoder generated 0 size frame\n");
                    BufTab_freeBuf(hInBuf);
                }
            }

            if (args->benchmark) {
                if (Time_delta(hTime, &time) < 0) {
                    ret = Dmai_EFAIL;
                    fprintf(stderr,"Failed to get encode time\n");
                    goto cleanup;
                }

                printf("[%d] Encode: %uus\n", numFrame, (Uns)time);
            }

            if (args->cache) {
                /* Writeback the outBuf. */
                Memory_cacheWb(Buffer_getUserPtr(hOutBuf),
                               Buffer_getSize(hOutBuf));

                if (args->benchmark) {
                    if (Time_delta(hTime, &time) < 0) {
                        ret = Dmai_EFAIL;
                        fprintf(stderr,"Failed to get timer delta\n");
                        goto cleanup;
                    }

                    printf("Post-process cache write back: %uus \n", (Uns) time);
                }
            }

            /* Write the encoded frame to the file system */
            if (Buffer_getNumBytesUsed(hOutBuf)) {
                if (fwrite(Buffer_getUserPtr(hOutBuf),
                           Buffer_getNumBytesUsed(hOutBuf), 1, outFile) != 1) {
                    ret = Dmai_EFAIL;
                    fprintf(stderr,"Failed to write encoded video data to file\n");
                    goto cleanup;
                }
            }

            /* Write the reconstructed frame to the file system */
            if (args->writeReconFrames) {
                processReconData(Venc1_getReconBufs(hVe1), hInBuf, hReconBuf);

                if (Buffer_getNumBytesUsed(hReconBuf)) {
                    if (fwrite(Buffer_getUserPtr(hReconBuf),
                               Buffer_getNumBytesUsed(hReconBuf), 1, reconFile) != 1) {
                        ret = Dmai_EFAIL;
                        fprintf(stderr,"Failed to write reconstructed frame to file\n");
                        goto cleanup;
                    }
                }
            }

            if (args->benchmark) {
                if (Time_delta(hTime, &time) < 0) {
                    ret = Dmai_EFAIL;
                    printf("Failed to get timer delta\n");
                    goto cleanup;
                }

                printf("File write time: %uus\n", (Uns)time);

                if (Time_total(hTime, &time) < 0) {
                    ret = Dmai_EFAIL;
                    fprintf(stderr,"Failed to get timer total\n");
                    goto cleanup;
                }

                printf("Total: %uus\n", (Uns)time);
            }
        }

        /* If the codec flushing completed, exit main thread */
        if (flushed) {
            /* Free dummy input buffer used for flushing process() calls */
            printf("freeing dummy input buffer ... \n");
            BufTab_freeBuf(hInBuf);
            break;
        }
    }

cleanup:
    /* Clean up the application */
    if (hOutBuf) {
        Buffer_delete(hOutBuf);
    }

    if (hReconBuf) {
        Buffer_delete(hReconBuf);
    }

    if (hVe1) {
        Venc1_delete(hVe1);
    }

    if (hBufTab) {
        BufTab_delete(hBufTab);
    }

    if (hEngine) {
        Engine_close(hEngine);
    }

    if (inFile) {
        fclose(inFile);
    }

    if (outFile) {
        fclose(outFile);
    }

    if (reconFile) {
        fclose(reconFile);
    }

    if (hTime) {
        Time_delete(hTime);
    }

    printf("End of application.\n");

    if (ret == Dmai_EFAIL)
        return 1;
    else
        return 0;
}
Esempio n. 10
0
/******************************************************************************
 * appMain
 ******************************************************************************/
Void appMain(Args * args)
{
    Buffer_Attrs            bAttrs      = Buffer_Attrs_DEFAULT;
    Time_Attrs              tAttrs      = Time_Attrs_DEFAULT;
    SPHENC1_Params          params      = Senc1_Params_DEFAULT;
    SPHENC1_DynamicParams   dynParams   = Senc1_DynamicParams_DEFAULT;
    Senc1_Handle            hSe1        = NULL;
    Engine_Handle           hEngine     = NULL;
    Buffer_Handle           hOutBuf     = NULL;
    Buffer_Handle           hInBuf      = NULL;
    Time_Handle             hTime       = NULL;
    FILE                   *inFile      = NULL;
    FILE                   *outFile     = NULL;
    Int                     numFrame    = 0;
    Int32                   bytesRead;
    UInt32                  time;

   printf("Starting application...\n");

   if (args->benchmark) {
        hTime = Time_create(&tAttrs);

        if (hTime == NULL) {
            printf("Failed to create Time object\n");
            goto cleanup;
        }
    }

    /* Initialize the codec engine run time */
    CERuntime_init();

    /* Initialize DMAI */
    Dmai_init();

    /* Open the output file */
    outFile = fopen(args->outFile, "wb");

    if (outFile == NULL) {
        printf("Failed to create output file %s\n", args->outFile);
        goto cleanup;
    }

    /* Open the input file */
    inFile = fopen(args->inFile, "rb");

    if (inFile == NULL) {
        printf("Failed to open input file %s\n", args->inFile);
        goto cleanup;
    }

    /* Using a larger vbuf to enhance performance of file i/o */
        if (setvbuf(outFile, vbuffer, _IOFBF, sizeof(vbuffer)) != 0) {
            printf("Failed to setvbuf on file descriptor\n");
            goto cleanup;
    }

    /* Open the codec engine */
    hEngine = Engine_open(args->engineName, NULL, NULL);

    if (hEngine == NULL) {
        printf("Failed to open codec engine %s\n", args->engineName);
        goto cleanup;
    }
    
    params.compandingLaw = args->compandingLaw;

    /* Create the SPHENC1 based speech encoder */
    hSe1 = Senc1_create(hEngine, args->codecName, &params, &dynParams);

    if (hSe1 == NULL) {
        printf("Failed to create %s\n", args->codecName);
        goto cleanup;
    }

    /* Align buffers to cache line boundary */    
    bAttrs.memParams.align = BUFSIZEALIGN; 
    
    /* Use cached buffers if requested */  
    if (args->cache) {
        bAttrs.memParams.flags = Memory_CACHED;
    } 
    
    /* Create an output buffer for encoded data */
    hOutBuf = Buffer_create(
        Dmai_roundUp(Senc1_getOutBufSize(hSe1), BUFSIZEALIGN), &bAttrs); 

    if (hOutBuf == NULL) {
        printf("Failed to create contiguous buffer\n");
        goto cleanup;
    }

    /* Create an input buffer for input data */
    hInBuf = Buffer_create(Dmai_roundUp(Senc1_getInBufSize(hSe1), BUFSIZEALIGN),
                           &bAttrs);

    if (hInBuf == NULL) {
        printf("Failed to create contiguous buffer\n");
        goto cleanup;
    }

    while (numFrame++ < args->numFrames) {
        printf("Frame %d: ", numFrame);
        if (args->benchmark) {
            if (Time_reset(hTime) < 0) {
                printf("Failed to reset timer\n");
                goto cleanup;
            }
        }

        /* Read raw PCM data from input file */
        bytesRead = fread(Buffer_getUserPtr(hInBuf), 1,
                          Senc1_getInBufSize(hSe1), inFile);

        if (bytesRead < Senc1_getInBufSize(hSe1)) {
            if (ferror(inFile)) {
                printf("Failed to read data from input file\n");
                goto cleanup;
            }
             printf("Failed to read full frame %ld bytes, read %ld bytes\n",
                                    Senc1_getInBufSize(hSe1),bytesRead);
             goto cleanup;
        }

        Buffer_setNumBytesUsed(hInBuf, bytesRead);

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                printf("Failed to get timer delta\n");
                goto cleanup;
            }
            printf("Read: %uus ", (Uns) time);
        }

        if (args->cache) {
            /*
             *  To meet xDAIS DMA Rule 7, when input buffers are cached, we
             *  must writeback the cache into physical memory.  Also, per DMA
             *  Rule 7, we must invalidate the output buffer from
             *  cache before providing it to any xDAIS algorithm.
             */
            Memory_cacheWbInv(Buffer_getUserPtr(hInBuf),
                              Buffer_getSize(hInBuf));

            /* Per DMA Rule 7, our output buffer cache lines must be cleaned */
            Memory_cacheInv(Buffer_getUserPtr(hOutBuf),
                            Buffer_getSize(hOutBuf));

            if (args->benchmark) {
                if (Time_delta(hTime, &time) < 0) {
                    printf("Failed to get timer delta\n");
                    goto cleanup;
                }
                printf("Pre-process cache maintenance: %uus ", (Uns) time);
            }
        }

        /* Encode the speech buffer */
        if (Senc1_process(hSe1, hInBuf, hOutBuf) < 0) {
            printf("Failed to encode speech buffer\n");
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                printf("Failed to get timer delta\n");
                goto cleanup;
            }
            printf("Encode: %uus ", (Uns) time);
        }

        if (args->cache) {
            /* Writeback the outBuf. */
            Memory_cacheWb(Buffer_getUserPtr(hOutBuf), Buffer_getSize(hOutBuf));

            if (args->benchmark) {
                if (Time_delta(hTime, &time) < 0) {
                    printf("Failed to get timer delta\n");
                    goto cleanup;
                }
                printf("Post-process cache write back: %uus ", (Uns) time);
            }
        }

        printf("Write encoded speech data to output file \n");

        /* Write the encoded frame to the file system */
        if (Buffer_getNumBytesUsed(hOutBuf)) {
            if (fwrite(Buffer_getUserPtr(hOutBuf),
                       Buffer_getNumBytesUsed(hOutBuf), 1, outFile) != 1) {
                printf("Failed to write encoded speech data to file\n");
                goto cleanup;
            }
        }

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                printf("Failed to get timer delta\n");
                goto cleanup;
            }
            printf("Write: %uus ", (Uns) time);

            if (Time_total(hTime, &time) < 0) {
                printf("Failed to get timer total\n");
                goto cleanup;
            }

            printf("Total: %uus\n", (unsigned int)time);
        }
    }
cleanup:
    /* Clean up the application */
    if (hSe1) {
        Senc1_delete(hSe1);
    }

    if (hInBuf) {
        Buffer_delete(hInBuf);
    }

    if (hOutBuf) {
        Buffer_delete(hOutBuf);
    }

    if (hEngine) {
        Engine_close(hEngine);
    }

    if (hTime) {
        Time_delete(hTime);
    }

    if (inFile) {
        fclose(inFile);
    }

    if (outFile) {
        fclose(outFile);
    }

    printf("End of application.\n");

    return;
}
Esempio n. 11
0
Void *writerThrFxn(Void *arg)
{
    WriterEnv *envp = (WriterEnv *) arg;
    Void *status = THREAD_SUCCESS;
    FILE *outFile = NULL;
    Buffer_Attrs bAttrs = Buffer_Attrs_DEFAULT;
    BufTab_Handle hBufTab = NULL;
    Buffer_Handle hOutBuf;
    Int bufIdx;

    /* Initialization */

	/* Open the output video file */
    outFile = fopen(envp->videoFile, "w");
	if (outFile == NULL) {
        ERR("Failed to open %s for writing\n", envp->videoFile);
        cleanup(THREAD_FAILURE);
    }

    /* Create buftab for video thread */
    hBufTab = BufTab_create(NUM_WRITER_BUFS, envp->outBufSize, &bAttrs);
    if (hBufTab == NULL) {
        ERR("Failed to allocate contiguous buffers\n");
        cleanup(THREAD_FAILURE);
    }

	/* Send all buffers to the video thread to be filled with encoded data */
    for (bufIdx = 0; bufIdx < NUM_WRITER_BUFS; bufIdx++) {
        if (Fifo_put(envp->hWriterOutFifo, BufTab_getBuf(hBufTab, bufIdx)) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }
    }

	/* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    while(1) {

    	/* Get an encoded buffer from the video thread */
        if (Fifo_get(envp->hWriterInFifo, &hOutBuf) < 0) {
            ERR("Failed to get buffer from video thread\n");
            cleanup(THREAD_FAILURE);
        }

		if (Buffer_getNumBytesUsed(hOutBuf)) {
			if (fwrite(Buffer_getUserPtr(hOutBuf), Buffer_getNumBytesUsed(hOutBuf), 1, outFile) != 1) {
				ERR("Error writing the encoded data to video file\n");
				cleanup(THREAD_FAILURE);
			}
		} 
		else {
			printf("Warning, writer received 0 byte encoded frame\n");
		}

		/* Return buffer to capture thread */
        if (Fifo_put(envp->hWriterOutFifo, hOutBuf) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }

    }

cleanup:

    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousInit);
    Pause_off(envp->hPauseProcess);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    /* Clean up the thread before exiting */
    if (outFile) {
        fclose(outFile);
    }

    if (hBufTab) {
        BufTab_delete(hBufTab);
    }
    
	return status;    
}
Esempio n. 12
0
/******************************************************************************
 * Resize_execute
 ******************************************************************************/
Int Resize_execute(Resize_Handle hResize,
                   Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf, Buffer_Handle hsDstBuf)
{
    struct imp_convert  rsz;
	struct rsz_channel_config rsz_chan_config;
	struct rsz_single_shot_config rsz_ss_config;
    BufferGfx_Dimensions srcDim;
    BufferGfx_Dimensions dstDim;
    BufferGfx_Dimensions sdstDim;
    UInt32               srcOffset;
    UInt32               dstOffset;
    UInt32               sdstOffset;
    UInt32               srcSize;
    UInt32               dstSize;
    
    assert(hResize);
    assert(hSrcBuf);
    assert(hDstBuf);

    Dmai_clear(rsz);

    BufferGfx_getDimensions(hSrcBuf, &srcDim);
    BufferGfx_getDimensions(hDstBuf, &dstDim);
    hsDstBuf?BufferGfx_getDimensions(hsDstBuf, &sdstDim):NULL;
    srcSize = srcDim.width * srcDim.height;
    dstSize = dstDim.width * dstDim.height;

    /* the resize operation to ColorSpace_UYVY is different from ColorSpace_YUV420PSEMI*/
    if (BufferGfx_getColorSpace(hSrcBuf) == ColorSpace_UYVY) {
        srcOffset = srcDim.y * srcDim.lineLength + (srcDim.x << 1);
        dstOffset = dstDim.y * dstDim.lineLength + (dstDim.x << 1);
        sdstOffset = hsDstBuf?(sdstDim.y * sdstDim.lineLength + (sdstDim.x << 1)):0;

        rsz.in_buff.index     = -1;
        rsz.in_buff.buf_type  = IMP_BUF_IN;
        rsz.in_buff.offset    = Buffer_getPhysicalPtr(hSrcBuf) + srcOffset;
        rsz.in_buff.size      = Buffer_getSize(hSrcBuf);

        rsz.out_buff1.index    = -1;
        rsz.out_buff1.buf_type = IMP_BUF_OUT1;
        rsz.out_buff1.offset   = Buffer_getPhysicalPtr(hDstBuf) + dstOffset;
        rsz.out_buff1.size     = Buffer_getSize(hDstBuf);
        
        rsz.out_buff2.index    = -1;
        rsz.out_buff2.buf_type = IMP_BUF_OUT2;
        rsz.out_buff2.offset   = hsDstBuf?(Buffer_getPhysicalPtr(hsDstBuf) + sdstOffset):0;
        rsz.out_buff2.size     = hsDstBuf?Buffer_getSize(hsDstBuf):0;
        
        /* 
         * The IPIPE requires that the memory offsets of the input and output
         * buffers start on 32-byte boundaries.
         */
        assert((rsz.in_buff.offset  & 0x1F) == 0);
        assert((rsz.out_buff1.offset & 0x1F) == 0);
        assert((rsz.out_buff2.offset & 0x1F) == 0);

        /* Start IPIPE operation */
        if (ioctl(hResize->fd, RSZ_RESIZE, &rsz) == -1) {
            Dmai_err0("Failed RSZ_RESIZE\n");
            return Dmai_EFAIL;
        }

        Buffer_setNumBytesUsed(hDstBuf, Buffer_getSize(hDstBuf));
        hsDstBuf?Buffer_setNumBytesUsed(hsDstBuf, Buffer_getSize(hsDstBuf)):NULL;
    } else {
        /* configure for the ColorSpace_YUV420PSEMI*/
	    Dmai_clear(rsz_ss_config);
	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = 0;
	    rsz_chan_config.config = NULL; /* to set defaults in driver */
	    if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in setting default configuration for single shot mode\n");
            return Dmai_EFAIL;
	    }

	    /* default configuration setting in Resizer successfull */
	    Dmai_clear(rsz_ss_config);
	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);
	    rsz_chan_config.config = &rsz_ss_config;

	    if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in getting configuration from driver\n");
            return Dmai_EFAIL;
	    }

	    /* input params are set at the resizer */
	    rsz_ss_config.input.image_width  = srcDim.width;
	    rsz_ss_config.input.image_height = srcDim.height;
	    rsz_ss_config.input.ppln = rsz_ss_config.input.image_width + 8;
	    rsz_ss_config.input.lpfr = rsz_ss_config.input.image_height + 10;
	    rsz_ss_config.input.pix_fmt = IPIPE_420SP_Y;
	    rsz_ss_config.output1.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hDstBuf));
        rsz_ss_config.output1.enable = 1;
	    rsz_ss_config.output1.width = dstDim.width;
	    rsz_ss_config.output1.height = dstDim.height;
	    rsz_ss_config.output2.enable = 0;

	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);
	    if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in setting default configuration for single shot mode\n");
            return Dmai_EFAIL;
	    }
	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);

	    /* read again and verify */
	    if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in getting configuration from driver\n");
            return Dmai_EFAIL;
	    }

        /* execute for ColorSpace_YUV420PSEMI*/
        Dmai_clear(rsz);
        //srcOffset = srcDim.y * srcDim.lineLength + (srcDim.x << 1);
        //dstOffset = dstDim.y * dstDim.lineLength + (dstDim.x << 1);
        rsz.in_buff.buf_type  = IMP_BUF_IN;
        rsz.in_buff.index     = -1;
        rsz.in_buff.offset    = Buffer_getPhysicalPtr(hSrcBuf);
        //rsz.in_buff.size      = Buffer_getSize(hSrcBuf);
        rsz.in_buff.size      = srcSize;

        rsz.out_buff1.buf_type = IMP_BUF_OUT1;
        rsz.out_buff1.index    = -1;
        rsz.out_buff1.offset    = Buffer_getPhysicalPtr(hDstBuf);
        //rsz.out_buff1.size     = Buffer_getSize(hDstBuf);
        rsz.out_buff1.size     = dstSize;
        
        /* 
         * The IPIPE requires that the memory offsets of the input and output
         * buffers start on 32-byte boundaries.
         */
        assert((rsz.in_buff.offset  & 0x1F) == 0);
        assert((rsz.out_buff1.offset & 0x1F) == 0);
        /* Start IPIPE operation */
        if (ioctl(hResize->fd, RSZ_RESIZE, &rsz) == -1) {
            Dmai_err0("Failed RSZ_RESIZE\n");
            return Dmai_EFAIL;
        }

	    /* input params are set at the resizer */
	    rsz_ss_config.input.image_width  = srcDim.width;
	    rsz_ss_config.input.image_height = srcDim.height>>1;
	    rsz_ss_config.input.ppln = rsz_ss_config.input.image_width + 8;
	    rsz_ss_config.input.lpfr = rsz_ss_config.input.image_height + 10;
	    rsz_ss_config.input.pix_fmt = IPIPE_420SP_C;
        rsz_ss_config.output1.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hDstBuf));
	    rsz_ss_config.output1.enable = 1;
	    rsz_ss_config.output1.width = dstDim.width;
	    rsz_ss_config.output1.height = dstDim.height;
        rsz_ss_config.output2.enable = 0;

	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);
	    if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in setting default configuration for single shot mode\n");
            return Dmai_EFAIL;
	    }
	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);

	    /* read again and verify */
	    if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in getting configuration from driver\n");
            return Dmai_EFAIL;
	    }

        Dmai_clear(rsz);
        //srcOffset = srcDim.y * srcDim.lineLength + (srcDim.x << 1);
        //dstOffset = dstDim.y * dstDim.lineLength + (dstDim.x << 1);
        rsz.in_buff.buf_type  = IMP_BUF_IN;
        rsz.in_buff.index     = -1;
        rsz.in_buff.offset    = Buffer_getPhysicalPtr(hSrcBuf)  + srcSize;
        rsz.in_buff.size      = srcSize>>1;

        rsz.out_buff1.buf_type = IMP_BUF_OUT1;
        rsz.out_buff1.index    = -1;
        rsz.out_buff1.offset   = Buffer_getPhysicalPtr(hDstBuf)  + dstSize;
        rsz.out_buff1.size     = dstSize>>1;
        
        /* 
         * The IPIPE requires that the memory offsets of the input and output
         * buffers start on 32-byte boundaries.
         */
        assert((rsz.in_buff.offset  & 0x1F) == 0);
        assert((rsz.out_buff1.offset & 0x1F) == 0);
        /* Start IPIPE operation */
        if (ioctl(hResize->fd, RSZ_RESIZE, &rsz) == -1) {
            Dmai_err0("Failed RSZ_RESIZE\n");
            return Dmai_EFAIL;
        }

        Buffer_setNumBytesUsed(hDstBuf, Buffer_getNumBytesUsed(hSrcBuf));

    }
    return Dmai_EOK;
}
Esempio n. 13
0
/******************************************************************************
 * main
 ******************************************************************************/
Int appMain(Args * args)
{
    Buffer_Attrs            bAttrs      = Buffer_Attrs_DEFAULT;
    Loader_Attrs            lAttrs      = Loader_Attrs_DEFAULT;
    AUDDEC1_Params          params      = Adec1_Params_DEFAULT;
    AUDDEC1_DynamicParams   dynParams   = Adec1_DynamicParams_DEFAULT;
    Time_Attrs              tAttrs      = Time_Attrs_DEFAULT;
    Adec1_Handle            hAd1        = NULL;
    Loader_Handle           hLoader     = NULL;
    Engine_Handle           hEngine     = NULL;
    Buffer_Handle           hOutBuf     = NULL;
    Time_Handle             hTime       = NULL;
    Buffer_Handle           hInBuf      = NULL;
    FILE                   *outFile     = NULL;
    Int                     numFrame    = 0;
    UInt32                  time;
    Int                     ret         = Dmai_EOK;
    Cpu_Device              device;

    printf("Starting application...\n");
        
    if (args->benchmark) {
        hTime = Time_create(&tAttrs);

        if (hTime == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr, "Failed to create Time object\n");
            goto cleanup;
        }
    }

    /* Initialize the codec engine run time */
    CERuntime_init();

    /* Initialize DMAI */
    Dmai_init();

    /* Determine which device the application is running on */
    if (Cpu_getDevice(NULL, &device) < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr, "Failed to determine target board\n");
        goto cleanup;
    }

    /* Open the output file */
    outFile = fopen(args->outFile, "wb");

    if (outFile == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to create output file %s\n", args->outFile);
        goto cleanup;
    }

    /* Using a larger vbuf to enhance performance of file i/o */
    if (setvbuf(outFile, vbuffer, _IOFBF, sizeof(vbuffer)) != 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr, "Failed to setvbuf on file descriptor\n");
        goto cleanup;
    }

    /* Open the codec engine */
    hEngine = Engine_open(args->engineName, NULL, NULL);

    if (hEngine == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr, "Failed to open codec engine %s\n", args->engineName);
        goto cleanup;
    }

    if (device == Cpu_Device_DM365 || device == Cpu_Device_OMAP3530 ||
        device == Cpu_Device_DM368 || device == Cpu_Device_DM3730) {
        params.dataEndianness = XDM_LE_16;
    }


    /* Create the AUDDEC1 based audio decoder */
    hAd1 = Adec1_create(hEngine, args->codecName, &params, &dynParams);

    if (hAd1 == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr, "Failed to create audio decoder\n");
        goto cleanup;
    }

    /* Align buffers to cache line boundary */    
    bAttrs.memParams.align = lAttrs.mParams.align = BUFSIZEALIGN; 
    
    /* Use cached buffers if requested */    
    if (args->cache) {
        bAttrs.memParams.flags = lAttrs.mParams.flags = Memory_CACHED;
    } 
    
    /* Ask the codec how much input data it needs */
    lAttrs.readSize = Adec1_getInBufSize(hAd1);

    /* Make the total ring buffer larger */
    lAttrs.readBufSize = Dmai_roundUp(lAttrs.readSize * 10, BUFSIZEALIGN);

    /* Increase the stdio buffer size for loader for better RTDX performance */
    lAttrs.vBufSize = VBUFSIZE;

    /* Create the file loader */
    hLoader = Loader_create(args->inFile, &lAttrs);

    if (hLoader == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr, "Failed to create loader\n");
        goto cleanup;
    }

    /* Create an output buffer for decoded data */
    hOutBuf = Buffer_create(
        Dmai_roundUp(Adec1_getOutBufSize(hAd1), BUFSIZEALIGN), &bAttrs);

    if (hOutBuf == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr, "Failed to create contiguous buffers\n");
        goto cleanup;
    }

    /* Prime the file loader */
    Loader_prime(hLoader, &hInBuf);

    while (numFrame++ < args->numFrames) {
        if (args->benchmark) {
            if (Time_reset(hTime) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr, "Failed to reset timer\n");
                goto cleanup;
            }
        }

        if (args->cache) {
            /*
             *  To meet xDAIS DMA Rule 7, when input buffers are cached, we
             *  must writeback the cache into physical memory.  Also, per DMA
             *  Rule 7, we must invalidate the output buffer from
             *  cache before providing it to any xDAIS algorithm.
             */
            Memory_cacheWbInv(Buffer_getUserPtr(hInBuf),Buffer_getSize(hInBuf));

            /* Per DMA Rule 7, our output buffer cache lines must be cleaned */
            Memory_cacheInv(Buffer_getUserPtr(hOutBuf),Buffer_getSize(hOutBuf));

            if (args->benchmark) {
                if (Time_delta(hTime, &time) < 0) {
                    ret = Dmai_EFAIL;
                    fprintf(stderr,"Failed to get timer delta\n");
                    goto cleanup;
                }

                printf("Pre-process cache maintenance: %uus ", (Uns) time);
            }
        }

        /* Decode the audio buffer */
        ret = Adec1_process(hAd1, hInBuf, hOutBuf);

        if ((ret == Dmai_EFAIL)||
            (ret == Dmai_EBITERROR && Buffer_getNumBytesUsed(hInBuf) == 0)) {
            ret = Dmai_EFAIL;
            fprintf(stderr, "Failed to decode audio buffer\n");
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr, "Failed to get timer delta\n");
                goto cleanup;
            }

            printf("Decode: %uus ", (Uns) time);
        }

        if (args->cache) {
            /* Writeback the outBuf. */
            Memory_cacheWb(Buffer_getUserPtr(hOutBuf), Buffer_getSize(hOutBuf));

            if (args->benchmark) {
                if (Time_delta(hTime, &time) < 0) {
                    ret = Dmai_EFAIL;
                    fprintf(stderr, "Failed to get timer delta\n");
                    goto cleanup;
                }

                printf("Post-process cache write back: %uus ", (Uns) time);
            }
        }

        /* Load a new frame from the file system */
        Loader_getFrame(hLoader, hInBuf);

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer delta\n");
                goto cleanup;
            }

            printf("Loader: %uus\n", (Uns) time);
        }

        if (Buffer_getNumBytesUsed(hOutBuf)) {
            if (numFrame >= args->startFrame) {
                printf("Frame %d: ", numFrame);
                if (writeFrame(hOutBuf, outFile) < 0) {
                    ret = Dmai_EFAIL;
                    goto cleanup;
                }
            }
        }

        if (Buffer_getUserPtr(hInBuf) == NULL) {
            printf("Loader returned null, clip finished\n");
            break;
        }

        if (args->benchmark) {
            if (Time_total(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer total\n");
                goto cleanup;
            }

            printf("Total: %uus\n", (unsigned int)time);
        }
    }

cleanup:
    /* Clean up the application */
    if (hLoader) {
        Loader_delete(hLoader);
    }

    if (hAd1) {
        Adec1_delete(hAd1);
    }

    if (hOutBuf) {
        Buffer_delete(hOutBuf);
    }

    if (hEngine) {
        Engine_close(hEngine);
    }

    if (hTime) {
        Time_delete(hTime);
    }

    if (outFile) {
        fclose(outFile);
    }

    printf("End of application.\n");

    if (ret == Dmai_EFAIL)
        return 1;
    else
        return 0;
}
Esempio n. 14
0
/******************************************************************************
 * Sdec_process
 ******************************************************************************/
Int Sdec_process(Sdec_Handle hSd, Buffer_Handle hInBuf, Buffer_Handle hOutBuf)
{
    SPHDEC_DynamicParams    dynamicParams;
    SPHDEC_Status           decStatus;
    XDM_BufDesc             inBufDesc;
    XDM_BufDesc             outBufDesc;
    XDAS_Int32              inBufSizeArray[1];
    XDAS_Int32              outBufSizeArray[1];
    XDAS_Int32              status;
    SPHDEC_InArgs           inArgs;
    SPHDEC_OutArgs          outArgs;
    XDAS_Int8              *inPtr;
    XDAS_Int8              *outPtr;

    assert(hSd);
    assert(hInBuf);
    assert(hOutBuf);
    assert(Buffer_getUserPtr(hInBuf));
    assert(Buffer_getUserPtr(hOutBuf));
    assert(Buffer_getNumBytesUsed(hInBuf));
    assert(Buffer_getSize(hOutBuf));

    inPtr = Buffer_getUserPtr(hInBuf);
    outPtr = Buffer_getUserPtr(hOutBuf);

    inBufSizeArray[0]          = Buffer_getNumBytesUsed(hInBuf);
    outBufSizeArray[0]         = Buffer_getSize(hOutBuf);

    inBufDesc.bufSizes         = inBufSizeArray;
    inBufDesc.bufs             = &inPtr;
    inBufDesc.numBufs          = 1;

    outBufDesc.bufSizes        = outBufSizeArray;
    outBufDesc.bufs            = &outPtr;
    outBufDesc.numBufs         = 1;

    inArgs.size                = sizeof(SPHDEC_InArgs);
    inArgs.inBufferSize        = Buffer_getNumBytesUsed(hInBuf);

    outArgs.size               = sizeof(SPHDEC_OutArgs);

    /* Decode the speech buffer */
    status = SPHDEC_process(hSd->hDecode, &inBufDesc, &outBufDesc, &inArgs,
                            &outArgs);

    if (status != SPHDEC_EOK) {
        decStatus.size = sizeof(SPHDEC_Status);
        dynamicParams.size = sizeof(SPHDEC_DynamicParams);
        status = SPHDEC_control(hSd->hDecode, XDM_GETSTATUS, &dynamicParams,
                                &decStatus);
        if (status != SPHDEC_EOK) {
            Dmai_err1("XDM_GETSTATUS failed, status=%d\n", status);
            return Dmai_EFAIL;
        }

        if (status == SPHDEC_ERUNTIME ||
            XDM_ISFATALERROR(decStatus.extendedError)) {

            Dmai_err2("SPHDEC_process() failed with error (%d ext: 0x%x)\n",
                      (Int)status, (Uns) decStatus.extendedError);
            return Dmai_EFAIL;
        }
        else {
            Dmai_dbg1("SPHDEC_process() non-fatal error 0x%x\n",
                      (Uns) decStatus.extendedError);
            return Dmai_EBITERROR;
        }
    }

    /* A fixed x2 decompression ratio, only works for g711 */
    Buffer_setNumBytesUsed(hOutBuf, Buffer_getNumBytesUsed(hInBuf) * 2);

    return Dmai_EOK;
}
Esempio n. 15
0
/******************************************************************************
 * appMain
 ******************************************************************************/
Void appMain(Args * args)
{
    VIDENC_Params           params      = Venc_Params_DEFAULT;
    VIDENC_DynamicParams    dynParams   = Venc_DynamicParams_DEFAULT;
    BufferGfx_Attrs         gfxAttrs    = BufferGfx_Attrs_DEFAULT;
    Buffer_Attrs            bAttrs      = Buffer_Attrs_DEFAULT;
    Time_Attrs              tAttrs      = Time_Attrs_DEFAULT;
    Venc_Handle             hVe         = NULL;
    FILE                   *outFile     = NULL;
    FILE                   *inFile      = NULL;
    Engine_Handle           hEngine     = NULL;
    Time_Handle             hTime       = NULL;
    Int                     numFrame    = 0;
    Buffer_Handle           hOutBuf     = NULL;
    Buffer_Handle           hInBuf      = NULL;
    ColorSpace_Type         colorSpace;
    UInt32                  time;

    printf("Starting application...\n");
    
    /* Initialize the codec engine run time */
    CERuntime_init();
    
    /* Initialize DMAI */
    Dmai_init();
    
    if (args->benchmark) {
        hTime = Time_create(&tAttrs);

        if (hTime == NULL) {
            printf("Failed to create Time object\n");
            goto cleanup;
        }
    }

    /* Open input file */
    inFile = fopen(args->inFile, "rb");
    
    if (inFile == NULL) {
        printf("Failed to open input file %s\n", args->inFile);
        goto cleanup;
    }

    /* Using a larger vbuf to enhance performance of file i/o */
    if (setvbuf(inFile, vbufferIn, _IOFBF, sizeof(vbufferIn)) != 0) {
        printf("Failed to setvbuf on input file descriptor\n");
        goto cleanup;   
    }   

    
    /* Open output file */
    outFile = fopen(args->outFile, "wb");

    if (outFile == NULL) {
        printf("Failed to open output file %s\n", args->outFile);
        goto cleanup;
    }

    /* Using a larger vbuf to enhance performance of file i/o */
    if (setvbuf(outFile, vbufferOut, _IOFBF, sizeof(vbufferOut)) != 0) {
        printf("Failed to setvbuf on output file descriptor\n");
        goto cleanup;   
    }
    
    /* Open the codec engine */
    hEngine = Engine_open(args->engineName, NULL, NULL);

    if (hEngine == NULL) {
        printf("Failed to open codec engine: %s\n", args->engineName);
        goto cleanup;
    }

    params.maxWidth = args->width;
    params.maxHeight = args->height;

    /* Set up codec parameters depending on bit rate */
    if (args->bitRate < 0) {
        /* Variable bit rate */
        params.rateControlPreset = IVIDEO_NONE;

        /*
         * If variable bit rate use a bogus bit rate value (> 0)
         * since it will be ignored.
         */
        params.maxBitRate        = 2000000;
    }
    else {
        /* Constant bit rate */
        params.rateControlPreset = IVIDEO_LOW_DELAY;
        params.maxBitRate        = args->bitRate;
    }

    params.inputChromaFormat = XDM_YUV_422ILE;

    dynParams.targetBitRate = params.maxBitRate;
    dynParams.inputWidth = params.maxWidth;
    dynParams.inputHeight = params.maxHeight;
    
    /* Create the video encoder */
    hVe = Venc_create(hEngine, args->codecName, &params, &dynParams);

    if (hVe == NULL) {
        printf("Failed to create video encoder: %s\n", args->codecName);
        goto cleanup;
    }

    /* Only the UYVY colorspace is supported in this application */
    colorSpace = ColorSpace_UYVY;

    /* Align buffers to cache line boundary */    
    gfxAttrs.bAttrs.memParams.align = bAttrs.memParams.align = BUFSIZEALIGN; 
    
    /* Use cached buffers if requested */    
    if (args->cache) {
        gfxAttrs.bAttrs.memParams.flags = bAttrs.memParams.flags 
            = Memory_CACHED;
    } 
    
    /* Calculate the buffer attributes */
    gfxAttrs.dim.width = args->width;
    gfxAttrs.dim.height = args->height;
    gfxAttrs.dim.lineLength = BufferGfx_calcLineLength(args->width, colorSpace);
    gfxAttrs.colorSpace = colorSpace;

    /* Create input buffer */
    hInBuf = Buffer_create(Dmai_roundUp(Venc_getInBufSize(hVe), BUFSIZEALIGN),
                           BufferGfx_getBufferAttrs(&gfxAttrs));

    if (hInBuf == NULL) {
        printf("Failed to allocate contiguous buffer\n");
        goto cleanup;
    }

    /* Create output buffer */
    hOutBuf = Buffer_create(Dmai_roundUp(Venc_getOutBufSize(hVe), BUFSIZEALIGN),
        &bAttrs);

    if (hOutBuf == NULL) {
        printf("Failed to create contiguous buffer\n");
        goto cleanup;
    }

    while (numFrame++ < args->numFrames) {
        /* Read a yuv input frame */
        printf("Frame %d: ", numFrame);
        if (readFrameUYVY(hInBuf, inFile) < 0) {
            goto cleanup;        
        }
        
        if (args->benchmark) {
            if (Time_reset(hTime) < 0) {
                printf("Failed to reset timer\n");
                goto cleanup;
            }
        }

        if (args->cache) {
            /*  
             *  To meet xDAIS DMA Rule 7, when input buffers are cached, we 
             *  must writeback the cache into physical memory.  Also, per DMA 
             *  Rule 7, we must invalidate the output buffer from
             *  cache before providing it to any xDAIS algorithm.
             */
            Memory_cacheWbInv(Buffer_getUserPtr(hInBuf), Buffer_getSize(hInBuf));
    
            /* Per DMA Rule 7, our output buffer cache lines must be cleaned */
            Memory_cacheInv(Buffer_getUserPtr(hOutBuf), Buffer_getSize(hOutBuf));
    
            if (args->benchmark) {
                if (Time_delta(hTime, &time) < 0) {
                    printf("Failed to get timer delta\n");
                    goto cleanup;
                }
    
                printf("Pre-process cache maintenance: %uus \n", (Uns) time);
            }
        }

        /* Encode the video buffer */
        if (Venc_process(hVe, hInBuf, hOutBuf) < 0) {
            printf("Failed to encode video buffer\n");
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                printf("Failed to get encode time\n");
                goto cleanup;
            }

            printf("[%d] Encode: %uus\n", numFrame, (Uns)time);
        }

        if (args->cache) {
            /* Writeback the outBuf. */
            Memory_cacheWb(Buffer_getUserPtr(hOutBuf), Buffer_getSize(hOutBuf));
    
            if (args->benchmark) {
                if (Time_delta(hTime, &time) < 0) {
                    printf("Failed to get timer delta\n");
                    goto cleanup;
                }
    
                printf("Post-process cache write back: %uus \n", (Uns) time);
            }
        }
        
        /* Write the encoded frame to the file system */
        if (Buffer_getNumBytesUsed(hOutBuf)) {
            if (fwrite(Buffer_getUserPtr(hOutBuf),
                       Buffer_getNumBytesUsed(hOutBuf), 1, outFile) != 1) {
                printf("Failed to write encoded video data to file\n");
                goto cleanup;
            }
        }
        
        if (args->benchmark) {
            if (Time_total(hTime, &time) < 0) {
                printf("Failed to get timer total\n");
                goto cleanup;
            }

            printf("Total: %uus\n", (unsigned int)time);
        }
    }

cleanup:
    /* Clean up the application */
    if (hOutBuf) {
        Buffer_delete(hOutBuf);
    }

    if (hInBuf) {
        Buffer_delete(hInBuf);
    }

    if (hVe) {
        Venc_delete(hVe);
    }

    if (hEngine) {
        Engine_close(hEngine);
    }

    if (inFile) {
        fclose(inFile);
    }
   
    if (outFile) {
        fclose(outFile);
    }

    if (hTime) {
        Time_delete(hTime);
    }

    printf("End of application.\n");
    
    return;
}
Esempio n. 16
0
/******************************************************************************
 * gst_tiaudenc1_encode_thread
 *     Call the audio codec to process a full input buffer
 ******************************************************************************/
static void* gst_tiaudenc1_encode_thread(void *arg)
{
    GstTIAudenc1   *audenc1    = GST_TIAUDENC1(gst_object_ref(arg));
    void          *threadRet = GstTIThreadSuccess;
    Buffer_Handle  hDstBuf;
    Int32          encDataConsumed;
    GstBuffer     *encDataWindow = NULL;
    GstClockTime   encDataTime;
    Buffer_Handle  hEncDataWindow;
    GstBuffer     *outBuf;
    GstClockTime   sampleDuration;
    guint          sampleRate;
    guint          numSamples;
    Int            bufIdx;
    Int            ret;

    GST_LOG("starting audenc encode thread\n");

    /* Initialize codec engine */
    ret = gst_tiaudenc1_codec_start(audenc1);

    /* Notify main thread that it is ok to continue initialization */
    Rendezvous_meet(audenc1->waitOnEncodeThread);
    Rendezvous_reset(audenc1->waitOnEncodeThread);

    if (ret == FALSE) {
        GST_ELEMENT_ERROR(audenc1, RESOURCE, FAILED,
        ("Failed to start codec\n"), (NULL));
        goto thread_exit;
    }

    while (TRUE) {

        /* Obtain an raw data frame */
        encDataWindow  = gst_ticircbuffer_get_data(audenc1->circBuf);
        encDataTime    = GST_BUFFER_TIMESTAMP(encDataWindow);
        hEncDataWindow = GST_TIDMAIBUFFERTRANSPORT_DMAIBUF(encDataWindow);

        /* Check if there is enough encoded data to be sent to the codec.
         * The last frame of data may not be sufficient to meet the codec
         * requirements for the amount of input data.  If so just throw
         * away the last bit of data rather than filling with bogus
         * data.
         */
        if (GST_BUFFER_SIZE(encDataWindow) <
            Aenc1_getInBufSize(audenc1->hAe)) {
            GST_LOG("Not enough audio data remains\n");
            if (!audenc1->drainingEOS) {
                goto thread_failure;
            }
            goto thread_exit;
        }

        /* Obtain a free output buffer for the encoded data */
        if (!(hDstBuf = gst_tidmaibuftab_get_buf(audenc1->hOutBufTab))) {
            GST_ELEMENT_ERROR(audenc1, RESOURCE, READ,
                ("Failed to get a free contiguous buffer from BufTab\n"),
                (NULL));
            goto thread_exit;
        }

        /* Invoke the audio encoder */
        GST_LOG("Invoking the audio encoder at 0x%08lx with %u bytes\n",
            (unsigned long)Buffer_getUserPtr(hEncDataWindow),
            GST_BUFFER_SIZE(encDataWindow));
        ret             = Aenc1_process(audenc1->hAe, hEncDataWindow, hDstBuf);
        encDataConsumed = Buffer_getNumBytesUsed(hEncDataWindow);

        if (ret < 0) {
            GST_ELEMENT_ERROR(audenc1, STREAM, ENCODE,
            ("Failed to encode audio buffer\n"), (NULL));
            goto thread_failure;
        }

        /* If no encoded data was used we cannot find the next frame */
        if (ret == Dmai_EBITERROR && encDataConsumed == 0) {
            GST_ELEMENT_ERROR(audenc1, STREAM, ENCODE,
            ("Fatal bit error\n"), (NULL));
            goto thread_failure;
        }

        if (ret > 0) {
            GST_LOG("Aenc1_process returned success code %d\n", ret); 
        }

        sampleRate     = audenc1->samplefreq;
        numSamples     = encDataConsumed / (2 * audenc1->channels) ;
        sampleDuration = GST_FRAMES_TO_CLOCK_TIME(numSamples, sampleRate);

        /* Release the reference buffer, and tell the circular buffer how much
         * data was consumed.
         */
        ret = gst_ticircbuffer_data_consumed(audenc1->circBuf, encDataWindow,
                  encDataConsumed);
        encDataWindow = NULL;

        if (!ret) {
            goto thread_failure;
        }

        /* Set the source pad capabilities based on the encoded frame
         * properties.
         */
        gst_tiaudenc1_set_source_caps(audenc1);

        /* Create a DMAI transport buffer object to carry a DMAI buffer to
         * the source pad.  The transport buffer knows how to release the
         * buffer for re-use in this element when the source pad calls
         * gst_buffer_unref().
         */
        outBuf = gst_tidmaibuffertransport_new(hDstBuf, audenc1->hOutBufTab, NULL, NULL);
        gst_buffer_set_data(outBuf, GST_BUFFER_DATA(outBuf),
            Buffer_getNumBytesUsed(hDstBuf));
        gst_buffer_set_caps(outBuf, GST_PAD_CAPS(audenc1->srcpad));

        /* Set timestamp on output buffer */
        if (audenc1->genTimeStamps) {
            GST_BUFFER_DURATION(outBuf)     = sampleDuration;
            GST_BUFFER_TIMESTAMP(outBuf)    = encDataTime;
        }
        else {
            GST_BUFFER_TIMESTAMP(outBuf)    = GST_CLOCK_TIME_NONE;
        }

        /* Tell circular buffer how much time we consumed */
        gst_ticircbuffer_time_consumed(audenc1->circBuf, sampleDuration);

        /* Push the transport buffer to the source pad */
        GST_LOG("pushing buffer to source pad with timestamp : %"
                GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT,
                GST_TIME_ARGS (GST_BUFFER_TIMESTAMP(outBuf)),
                GST_TIME_ARGS (GST_BUFFER_DURATION(outBuf)));

        if (gst_pad_push(audenc1->srcpad, outBuf) != GST_FLOW_OK) {
            GST_DEBUG("push to source pad failed\n");
            goto thread_failure;
        }

        /* Release buffers no longer in use by the codec */
        Buffer_freeUseMask(hDstBuf, gst_tidmaibuffer_CODEC_FREE);
    }

thread_failure:

    gst_tithread_set_status(audenc1, TIThread_CODEC_ABORTED);
    gst_ticircbuffer_consumer_aborted(audenc1->circBuf);
    threadRet = GstTIThreadFailure;

thread_exit:

    /* Re-claim any buffers owned by the codec */
    bufIdx = BufTab_getNumBufs(GST_TIDMAIBUFTAB_BUFTAB(audenc1->hOutBufTab));

    while (bufIdx-- > 0) {
        Buffer_Handle hBuf = BufTab_getBuf(
            GST_TIDMAIBUFTAB_BUFTAB(audenc1->hOutBufTab), bufIdx);
        Buffer_freeUseMask(hBuf, gst_tidmaibuffer_CODEC_FREE);
    }

    /* Release the last buffer we retrieved from the circular buffer */
    if (encDataWindow) {
        gst_ticircbuffer_data_consumed(audenc1->circBuf, encDataWindow, 0);
    }

    /* We have to wait to shut down this thread until we can guarantee that
     * no more input buffers will be queued into the circular buffer
     * (we're about to delete it).  
     */
    Rendezvous_meet(audenc1->waitOnEncodeThread);
    Rendezvous_reset(audenc1->waitOnEncodeThread);

    /* Notify main thread that we are done draining before we shutdown the
     * codec, or we will hang.  We proceed in this order so the EOS event gets
     * propagated downstream before we attempt to shut down the codec.  The
     * codec-shutdown process will block until all BufTab buffers have been
     * released, and downstream-elements may hang on to buffers until
     * they get the EOS.
     */
    Rendezvous_force(audenc1->waitOnEncodeDrain);

    /* Initialize codec engine */
    if (gst_tiaudenc1_codec_stop(audenc1) < 0) {
        GST_ERROR("failed to stop codec\n");
        GST_ELEMENT_ERROR(audenc1, RESOURCE, FAILED,
        ("Failed to stop codec\n"), (NULL));
    }

    gst_object_unref(audenc1);

    GST_LOG("exit audio encode_thread (%d)\n", (int)threadRet);
    return threadRet;
}