コード例 #1
0
ファイル: Resize.c プロジェクト: Soorma07/dm368-dvsdk-reprise
/******************************************************************************
 * Resize_execute
 ******************************************************************************/
Int Resize_execute(Resize_Handle hResize,
                   Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf)
{
#ifdef CONFIG_DM365_IPIPE
    struct imp_convert  rsz;
    BufferGfx_Dimensions srcDim;
    BufferGfx_Dimensions dstDim;
    UInt32               srcOffset;
    UInt32               dstOffset;
    
    assert(hResize);
    assert(hSrcBuf);
    assert(hDstBuf);

    Dmai_clear(rsz);

    BufferGfx_getDimensions(hSrcBuf, &srcDim);
    BufferGfx_getDimensions(hDstBuf, &dstDim);

    srcOffset = srcDim.y * srcDim.lineLength + (srcDim.x << 1);
    dstOffset = dstDim.y * dstDim.lineLength + (dstDim.x << 1);

    rsz.in_buff.index     = -1;
    rsz.in_buff.buf_type  = IMP_BUF_IN;
    rsz.in_buff.offset    = (UInt32) Buffer_getUserPtr(hSrcBuf) + srcOffset;
    rsz.in_buff.size      = Buffer_getSize(hSrcBuf);

    rsz.out_buff1.index    = -1;
    rsz.out_buff1.buf_type = IMP_BUF_OUT1;
    rsz.out_buff1.offset   = (UInt32) Buffer_getUserPtr(hDstBuf) + dstOffset;
    rsz.out_buff1.size     = Buffer_getSize(hDstBuf);
    
    /* 
     * The IPIPE requires that the memory offsets of the input and output
     * buffers start on 32-byte boundaries.
     */
    assert((rsz.in_buff.offset  & 0x1F) == 0);
    assert((rsz.out_buff1.offset & 0x1F) == 0);

    /* Start IPIPE operation */
    if (ioctl(hResize->fd, RSZ_RESIZE, &rsz) == -1) {
        Dmai_err0("Failed RSZ_RESIZE\n");
        return Dmai_EFAIL;
    }

    Buffer_setNumBytesUsed(hDstBuf, Buffer_getNumBytesUsed(hSrcBuf));
    return Dmai_EOK;
#else
    Dmai_err0("not implemented\n");
    return Dmai_ENOTIMPL;
#endif
}
コード例 #2
0
ファイル: appMain.c プロジェクト: sv99/DVSDK
/******************************************************************************
 * readFrameUYVY
 ******************************************************************************/
Int readFrameUYVY(Buffer_Handle hBuf, FILE *outFile)
{
    Int8 *ptr = Buffer_getUserPtr(hBuf);
    Int y;

    BufferGfx_Dimensions dim;

    BufferGfx_getDimensions(hBuf, &dim);

    for (y = 0; y < dim.height; y++) {
        if (fread(ptr, dim.width * 2, 1, outFile) != 1) {
            fprintf(stderr,"Failed to read data from file\n");
            return -1;
        }

        ptr += dim.lineLength;
    }

    printf("Read UYVY frame size %d (%dx%d) from file\n",
           (Int) (dim.width * 2 * dim.height),
           (Int) dim.width, (Int) dim.height);

    Buffer_setNumBytesUsed(hBuf, dim.width * 2 * dim.height);

    return 0;
}
コード例 #3
0
ファイル: appMain.c プロジェクト: sv99/DVSDK
/******************************************************************************
 * readFrame420P
 ******************************************************************************/
Int readFrame420P(Buffer_Handle hBuf, FILE *outFile, Int imageHeight)
{
    Int8 *yPtr = Buffer_getUserPtr(hBuf);
    Int8 *cbcrPtr;
    Int y, x;

    BufferGfx_Dimensions dim;

    BufferGfx_getDimensions(hBuf, &dim);

    /* Write Y plane */
    for (y = 0; y < imageHeight; y++) {
        if (fread(yPtr, dim.width, 1, outFile) != 1) {
            fprintf(stderr,"Failed to read data from file\n");
            return -1;
        }

        yPtr += dim.lineLength;
    }

    /* Join Cb to CbCr interleaved */
    cbcrPtr = Buffer_getUserPtr(hBuf) + Buffer_getSize(hBuf) * 2 / 3;
    for (y = 0; y < imageHeight / 2; y++) {
        for (x = 0; x < dim.width; x += 2) {
            if (fread(&cbcrPtr[x], 1, 1, outFile) != 1) {
                fprintf(stderr,"Failed to read data from file\n");
                return -1;
            }
        }
        cbcrPtr += dim.lineLength;
    }

    /* Join Cr to CbCr interleaved */
    cbcrPtr = Buffer_getUserPtr(hBuf) + Buffer_getSize(hBuf) * 2 / 3;
    for (y = 0; y < imageHeight / 2; y++) {
        for (x = 1; x < dim.width; x += 2) {
            if (fread(&cbcrPtr[x], 1, 1, outFile) != 1) {
                fprintf(stderr,"Failed to read data from file\n");
                return -1;
            }
        }
        cbcrPtr += dim.lineLength;
    }

    printf("Read 420P frame size %d (%dx%d) from file\n",
           (Int) (dim.width * 3 / 2 * imageHeight),
           (Int) dim.width, (Int) imageHeight);

    Buffer_setNumBytesUsed(hBuf, Buffer_getSize(hBuf));

    return 0;
}
コード例 #4
0
ファイル: appMain.c プロジェクト: Soorma07/dvsdk-dmai
/******************************************************************************
 * writeFrameUYVY
******************************************************************************/
static Int writeFrameUYVY(Buffer_Handle hBuf, FILE *outFile)
{
    Int8 *ptr = Buffer_getUserPtr(hBuf);

    BufferGfx_Dimensions dim;

    BufferGfx_getDimensions(hBuf, &dim);

    if (fwrite(ptr, 1, dim.width * dim.height * 2, outFile) != 
        dim.width * dim.height * 2) {
      fprintf(stderr,"Failed to write data to disk\n");
      return -1;
    }

    printf("Wrote UYVY frame size %d (%dx%d) to disk\n",
           (Int) (dim.width * 2 * dim.height),
           (Int) dim.width, (Int) dim.height);

    return 0;
}
コード例 #5
0
ファイル: appMain.c プロジェクト: sv99/DVSDK
/******************************************************************************
 * processReconData
 *    Transform reconstructed buffer data into a UYVY frame.  The format of
 *    the reconstructed buffer data is expected to conform to the specification
 *    found in "MPEG4 Simple Profile Encoder Codec on DM355 User's Guide"
 *    (SPRUFE4C), and may not work for other codecs.
 ******************************************************************************/
Void processReconData(IVIDEO1_BufDesc* reconBufs, Buffer_Handle hSrcBuf,
                      Buffer_Handle hDstBuf)
{
    Int16                 mbSizeY;
    Int16                 mbSizeX;
    Uint32                lumaColLength;
    Uint32                chromaColSize;
    Uint32                UYVYRowLength;
    Uint32                UYVYMbSize;
    UInt8                *lum_buf;
    UInt8                *chr_buf;
    UInt8                *curr_mb;
    UInt8                *curr_lum_mb;
    UInt8                *curr_chroma_mb;
    Int16                 i, j, k, l;
    BufferGfx_Dimensions  dim;

    /*
     * A single Master Block is 16x16 pixels.  Get our master block dimensions
     * by divding the pixel height and width by 16.
     */
    BufferGfx_getDimensions(hSrcBuf, &dim);

    mbSizeY = dim.height >> 4;
    mbSizeX = dim.width  >> 4;

    /*
     * Our luma buffer is a series of 16x16 byte blocks, and our chroma buffer
     * is a series of 16x8 byte blocks.  Each block corresponds to information
     * for one master block.  The first block in each buffer contains header
     * information.  Set lum_buf and chr_buf to the first block after the
     * header.
     */
    lum_buf = (UInt8*) (reconBufs->bufDesc[0].buf + 16 * 16);
    chr_buf = (UInt8*) (reconBufs->bufDesc[1].buf + 16 * 8);

    /*
     * The luma and chroma buffers are constructed in column-major order.
     * The blocks for a single column are followed by two padding blocks
     * before the next column starts.  Set lumaColLength and chromaColSize
     * to the number of bytes that must be skipped over to get to the next
     * column in the corresponding buffer.
     */
    lumaColLength = (16*16) * (mbSizeY + 2);
    chromaColSize = (16*8)  * (mbSizeY + 2);

    /*
     * Calculate the number of bytes that must be skipped over to go to the
     * next row in the reconstructed UYVY frame.  Also calculate how many
     * bytes in the UYVY file are needed to represent a single master block.
     */
    UYVYRowLength = 32 * mbSizeX;
    UYVYMbSize    = 32 * 16;

    /*
     * Copy the reconstructed buffer information into a UYVY frame.
     */
    for (i = 0; i < mbSizeX; i++) {
        for (j = 0; j < mbSizeY; j++) {

            /* Calculate input and output buffer offsets for the current */
            /* master block                                              */
            curr_lum_mb    = lum_buf + (lumaColLength * i) + (256 * j);
            curr_chroma_mb = chr_buf + (chromaColSize * i) + (128 * j);
            curr_mb        = (UInt8 *) Buffer_getUserPtr(hDstBuf) +
                             (j * (UYVYMbSize * mbSizeX)) + (i * 32);

            /* Copy Luma information */
            for (k = 0; k < 16; k++) {
                for (l = 0; l < 16; l++) {
                    curr_mb[(k * UYVYRowLength) + (l * 2) + 1] =
                        curr_lum_mb[k * 16 + l];
                }
            }

            /* Copy Chroma information */
            for (k = 0; k < 8; k++) {
                for (l = 0; l < 16; l++) {
                    curr_mb[((k * 2) * UYVYRowLength) + (l * 2)] =
                        curr_chroma_mb[k * 16 + l];
                    curr_mb[((k * 2 + 1) * UYVYRowLength) + (l * 2)] =
                        curr_chroma_mb[k * 16 + l];
                }
            }
        }
    }

    Buffer_setNumBytesUsed(hDstBuf, dim.width * dim.height * 2);
}
コード例 #6
0
ファイル: capture.c プロジェクト: ChrisX34/stuff
Void CapBuf_blackFill(Buffer_Handle hBuf)
{
    switch (BufferGfx_getColorSpace(hBuf)) {
        case ColorSpace_YUV422PSEMI:
        {
            Int8  *yPtr     = Buffer_getUserPtr(hBuf);
            Int32  ySize    = Buffer_getSize(hBuf) / 2;
            Int8  *cbcrPtr  = yPtr + ySize;
            Int    bpp = ColorSpace_getBpp(ColorSpace_YUV422PSEMI);
            Int    y;
            BufferGfx_Dimensions dim;
            UInt32 offset;

            BufferGfx_getDimensions(hBuf, &dim);
            offset = dim.y * dim.lineLength + dim.x * bpp / 8;
            for (y = 0; y < dim.height; y++) {
                memset(yPtr + offset, 0x0, dim.width * bpp / 8);
                yPtr += dim.lineLength;
            }

            for (y = 0; y < dim.height; y++) {
                memset(cbcrPtr + offset, 0x80, dim.width * bpp / 8);
                cbcrPtr += dim.lineLength;
            }

            break;
        }

        case ColorSpace_YUV420PSEMI:
        {
            Int8  *yPtr     = Buffer_getUserPtr(hBuf);
            Int32  ySize    = Buffer_getSize(hBuf) * 2 / 3;
            Int8  *cbcrPtr  = yPtr + ySize;
            Int    bpp = ColorSpace_getBpp(ColorSpace_YUV420PSEMI);
            Int    y;
            BufferGfx_Dimensions dim;

            BufferGfx_getDimensions(hBuf, &dim);
            yPtr += dim.y * dim.lineLength + dim.x * bpp / 8;
            for (y = 0; y < dim.height; y++) {
                memset(yPtr, 0x0, dim.width * bpp / 8);
                yPtr += dim.lineLength;
            }

            cbcrPtr += dim.y * dim.lineLength / 2 + dim.x * bpp / 8;
            for (y = 0; y < dim.height / 2; y++) {
                memset(cbcrPtr, 0x80, dim.width * bpp / 8);
                cbcrPtr += dim.lineLength;
            }
            
            break;
        }

        case ColorSpace_UYVY:
        {
            Int32 *bufPtr = (Int32*)Buffer_getUserPtr(hBuf);
            Int    bpp    = ColorSpace_getBpp(ColorSpace_UYVY);
            Int    i, j;
            BufferGfx_Dimensions dim;

            BufferGfx_getDimensions(hBuf, &dim);
            bufPtr += (dim.y * dim.lineLength + dim.x * bpp / 8) / sizeof(Int32);

            /* Make sure display buffer is 4-byte aligned */
            assert((((UInt32) bufPtr) & 0x3) == 0);

            for (i = 0; i < dim.height; i++) {
                for (j = 0; j < dim.width / 2; j++) {
                    bufPtr[j] = UYVY_BLACK;
                }
                bufPtr += dim.lineLength / sizeof(Int32);
            }

            break;
        }

        case ColorSpace_RGB565:
        {
            memset(Buffer_getUserPtr(hBuf), 0, Buffer_getSize(hBuf));
            break;
        }

        default:
        {
                ERR("Unsupported color space (%d) for _Dmai_blackFill\n",
                BufferGfx_getColorSpace(hBuf));
            break;
        }
    }
}
コード例 #7
0
/******************************************************************************
 * Framecopy_accel_execute
 ******************************************************************************/
Int Framecopy_accel_config(Framecopy_Handle hFc,
                           Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf)
{
    vdce_params_t params;
    BufferGfx_Dimensions srcDim;
    BufferGfx_Dimensions dstDim;
    Int width, height;

    /* Buffer needs to be graphics buffers */
    if (Buffer_getType(hSrcBuf) != Buffer_Type_GRAPHICS ||
        Buffer_getType(hDstBuf) != Buffer_Type_GRAPHICS) {

        Dmai_err0("Src and dst buffers need to be graphics buffers\n");
        return Dmai_EINVAL;
    }

    /* Sanity check buffer color space */
    if (BufferGfx_getColorSpace(hSrcBuf) !=
        BufferGfx_getColorSpace(hDstBuf)) {

        Dmai_err0("Src and dst buffers need to have same colorspace\n");
        return Dmai_EINVAL;
    }

    BufferGfx_getDimensions(hSrcBuf, &srcDim);
    BufferGfx_getDimensions(hDstBuf, &dstDim);

    /* Source and destination width must be even */
    if (dstDim.width % 2 || srcDim.width % 2) {
        Dmai_err2("Output (%d) and input width (%d) must be even\n",
                  dstDim.width, srcDim.width);
        return Dmai_EINVAL;
    }

    /* Pitches must be a multiple of 8 */
    if (dstDim.lineLength % 8 || srcDim.lineLength % 8) {
        Dmai_err2("Dst (%ld) and src (%ld) pitch must be a multiple of 8\n",
                  dstDim.lineLength, srcDim.lineLength);
        return Dmai_EINVAL;
    }

   /* Select the smallest resolution */
    width = srcDim.width < dstDim.width ? srcDim.width : dstDim.width;
    height = srcDim.height < dstDim.height ? srcDim.height : dstDim.height;

    Dmai_dbg2("Configuring resizer to copy image resolution %dx%d\n",
              width, height);

    params.vdce_mode = VDCE_OPERATION_RESIZING;

    if (ioctl(hFc->fd, VDCE_GET_DEFAULT, &params) < 0) {
        Dmai_err0("default params failed error.\n");
        return Dmai_EFAIL;
    }

    switch (BufferGfx_getColorSpace(hSrcBuf)) {
        case ColorSpace_YUV422PSEMI:
            params.vdce_mode_params.rsz_params.rsz_mode = VDCE_MODE_422;
            /* Speed up the conversion, note that this value hangs the resizer
             * for 420P color format 
             */
            params.common_params.prcs_unit_value = 256;
            break;
        case ColorSpace_YUV420PSEMI:
            params.vdce_mode_params.rsz_params.rsz_mode = VDCE_MODE_420;
            break;
        default:
            Dmai_err0("Colorspace format not supported\n");
            return Dmai_ENOTIMPL;
    }
 
    params.common_params.src_hsz_luminance = width;
    params.common_params.src_vsz_luminance = height;

    params.common_params.dst_hsz_luminance = width;
    params.common_params.dst_vsz_luminance = height;

    params.common_params.src_processing_mode = VDCE_PROGRESSIVE;
    params.common_params.src_mode = VDCE_FRAME_MODE;
    params.common_params.res_mode = VDCE_FRAME_MODE;

    /* call ioctl to set parameters */
    if (ioctl(hFc->fd, VDCE_SET_PARAMS, &params) < 0) {
        Dmai_err0("VDCE_SET_PARAMS failed \n");
        return Dmai_EFAIL;
    }

    return Dmai_EOK;
}
コード例 #8
0
ファイル: gsttiprepencbuf.c プロジェクト: xieran1988/parents
/*****************************************************************************
 * gst_tiprepencbuf_copy_input
 *  Make the input data in src available in the physically contiguous memory
 *  in dst in the best way possible.  Preferably an accelerated copy or
 *  color conversion.
 ****************************************************************************/
static Int
gst_tiprepencbuf_copy_input(GstTIPrepEncBuf * prepencbuf,
    Buffer_Handle hDstBuf, GstBuffer * src)
{
    Framecopy_Attrs  fcAttrs = Framecopy_Attrs_DEFAULT;
    gboolean         accel   = FALSE;
    Buffer_Handle    hInBuf  = NULL;
    Int              ret     = -1;

#if defined(Platform_dm365) || defined(Platform_dm368)
    BufferGfx_Dimensions dim;
#endif

    /* Check to see if we need to execute ccv on dm6467 */
    if (prepencbuf->device == Cpu_Device_DM6467 &&
        prepencbuf->srcColorSpace == ColorSpace_YUV422PSEMI) {
        return gst_tiprepencbuf_422psemi_420psemi(hDstBuf, src, prepencbuf);
    }

    GST_LOG("gst_tiphyscontig_copy_input - begin\n");

    if (prepencbuf->hFc == NULL) {
        /* Enable the accel framecopy based on contiguousInputFrame.
         * If accel is set to FALSE then DMAI will use regular memcpy function
         * else will use HW accelerated framecopy.
         */

        /* If we are getting dmai transport buffer then enable HW 
         * acceleration */
        if (GST_IS_TIDMAIBUFFERTRANSPORT(src)) {
            accel = TRUE;
        }
        else {
            accel = prepencbuf->contiguousInputFrame;
        }

        fcAttrs.accel = prepencbuf->contiguousInputFrame;

        prepencbuf->hFc = Framecopy_create(&fcAttrs);
        if (prepencbuf->hFc == NULL) {
            GST_ERROR("failed to create framecopy handle\n");
            goto exit;
        }

        GST_INFO("HW accel framecopy: %s\n", accel ? "enabled" : "disabled");
    }

    /* Prepare input buffer */
    hInBuf = gst_tiprepencbuf_convert_gst_to_dmai(prepencbuf, src, TRUE);
    if (hInBuf == NULL) {
        GST_ERROR("failed to get dmai buffer\n");
        goto exit;
    }

#if defined(Platform_dm365) || defined(Platform_dm368)
    /* Handle resizer 32-byte issue on DM365 platform */
    if (prepencbuf->device == Cpu_Device_DM365 || prepencbuf->device == Cpu_Device_DM368) {
        if ((prepencbuf->srcColorSpace == ColorSpace_YUV420PSEMI)) {
            BufferGfx_getDimensions(hInBuf, &dim);
            dim.lineLength = Dmai_roundUp(dim.lineLength, 32);
            BufferGfx_setDimensions(hInBuf, &dim);
        }
    }
#endif

    /* Prepare output buffer */
    if (Framecopy_config(prepencbuf->hFc, hInBuf, hDstBuf) < 0) {
        GST_ERROR("failed to configure framecopy\n");
        goto exit;
    }

    if (Framecopy_execute(prepencbuf->hFc, hInBuf, hDstBuf) < 0) {
        GST_ERROR("failed to execute framecopy\n");
        goto exit;
    }

    ret = GST_BUFFER_SIZE(src);

exit:
    if (hInBuf) {
        Buffer_delete(hInBuf);
    }

    GST_LOG("gst_tiprepencbuf_copy_input - end\n");
    return ret;
}
コード例 #9
0
ファイル: gsttidmaivideosink.c プロジェクト: sv99/gst-ti-dmai
/*******************************************************************************
 * gst_tidmaivideosink_render
*******************************************************************************/
static GstFlowReturn gst_tidmaivideosink_render(GstBaseSink * bsink,
                         GstBuffer * buf)
{
    BufferGfx_Attrs       gfxAttrs  = BufferGfx_Attrs_DEFAULT;
    Buffer_Handle         hDispBuf  = NULL;
    Buffer_Handle         inBuf     = NULL;
    BufferGfx_Dimensions  inDimSave;
    GstTIDmaiVideoSink   *sink      = GST_TIDMAIVIDEOSINK_CAST(bsink);

    GST_DEBUG("Begin, buffer %p",buf);

    /* The base sink send us the first buffer twice, so we avoid processing 
     * it again, since the Display_put may fail on this case when using 
     * pad_allocation 
     */
    if (sink->prerolledBuffer == buf){
        GST_DEBUG("Not displaying previously pre-rolled buffer");
        sink->prerolledBuffer = NULL;
        return GST_FLOW_OK;
    }
    sink->prerolledBuffer = NULL;

    /* If the input buffer is non dmai buffer, then allocate dmai buffer and
     *  copy input buffer in dmai buffer using memcpy routine.
     */
    if (GST_IS_TIDMAIBUFFERTRANSPORT(buf)) {
        inBuf = GST_TIDMAIBUFFERTRANSPORT_DMAIBUF(buf);
    } else {
        /* allocate DMAI buffer */
        if (sink->tempDmaiBuf == NULL) {

            GST_DEBUG("Input buffer is non-dmai, allocating new buffer");
            gfxAttrs.dim.width          = sink->width;
            gfxAttrs.dim.height         = sink->height;
            gfxAttrs.dim.lineLength     = BufferGfx_calcLineLength(sink->width,
                                            sink->colorSpace);
            gfxAttrs.colorSpace         = sink->colorSpace;
            sink->tempDmaiBuf           = Buffer_create(GST_BUFFER_SIZE(buf),
                                           BufferGfx_getBufferAttrs(&gfxAttrs));

            if (sink->tempDmaiBuf == NULL) {
                GST_ELEMENT_ERROR(sink,STREAM,FAILED,(NULL),
                    ("Failed to allocate memory for the input buffer"));
                return GST_FLOW_UNEXPECTED;
            }
        }
        inBuf = sink->tempDmaiBuf;

        memcpy(Buffer_getUserPtr(inBuf), buf->data, buf->size);
    }

    if (Buffer_getBufTab(inBuf) == Display_getBufTab(sink->hDisplay)) {
        GST_DEBUG("Flipping pad allocated buffer");
        /* We got a buffer that is already on video memory, just flip it */
        hDispBuf = inBuf;
        if (sink->numAllocatedBuffers)
            sink->numAllocatedBuffers--;
        sink->allocatedBuffers[Buffer_getId(inBuf)] = NULL;
        if (buf == sink->lastAllocatedBuffer){
            sink->lastAllocatedBuffer = NULL;
        }
    } else {
        /* Check if we can allocate a new buffer, otherwise we may need 
         * to drop the buffer
         */
        BufferGfx_getDimensions(inBuf, &inDimSave);
        if ((sink->numAllocatedBuffers >= 
             (BufTab_getNumBufs(Display_getBufTab(sink->hDisplay)) - 1)) &&
             (sink->numUnusedBuffers == 0)){
            GST_ELEMENT_WARNING(sink,RESOURCE,NO_SPACE_LEFT,(NULL),
                ("Dropping incoming buffers because no display buffer"
                    " available"));
            return GST_FLOW_OK;
        } else {
            GST_DEBUG("Obtaining display buffer");
            hDispBuf = gst_tidmaivideosink_get_display_buffer(sink,inBuf);
            if (!hDispBuf){
                return GST_FLOW_UNEXPECTED;
            }
        }

        if (Framecopy_config(sink->hFc, inBuf, hDispBuf) < 0) {
            GST_ELEMENT_ERROR(sink,STREAM,FAILED,(NULL),
                ("Failed to configure the frame copy"));
            return GST_FLOW_UNEXPECTED;
        }

        if (Framecopy_execute(sink->hFc, inBuf, hDispBuf) < 0) {
            GST_ELEMENT_ERROR(sink,STREAM,FAILED,(NULL),
                ("Failed to execute the frame copy"));
            return GST_FLOW_UNEXPECTED;
        }

        BufferGfx_resetDimensions(hDispBuf);
        BufferGfx_setDimensions(inBuf, &inDimSave);
    }

    /* Send filled buffer to display device driver to be displayed */
    if (Display_put(sink->hDisplay, hDispBuf) < 0) {
        GST_ELEMENT_ERROR(sink,STREAM,FAILED,(NULL),
            ("Failed to put the buffer on display"));
        return GST_FLOW_UNEXPECTED;
    }

    GST_DEBUG("Finish");

    return GST_FLOW_OK;
}
コード例 #10
0
ファイル: Idec1.c プロジェクト: aCayF/dmai_1_21_00_10
/******************************************************************************
 * Idec1_process
 ******************************************************************************/
Int Idec1_process(Idec1_Handle hId, Buffer_Handle hInBuf,
                 Buffer_Handle hOutBuf)
{
    BufferGfx_Dimensions    dim;
    IMGDEC1_DynamicParams   dynParams;
    IMGDEC1_InArgs          inArgs;
    IMGDEC1_OutArgs         outArgs;
    IMGDEC1_Status          decStatus;
    XDM1_BufDesc            inBufDesc;
    XDM1_BufDesc            outBufDesc;
    XDAS_Int32              status;
    XDAS_Int8 *             inPtr;
    XDAS_Int8 *             outPtr;
    UInt32                  offset = 0;
    UInt32                  i;
    
    assert(hId);
    assert(hInBuf);
    assert(hOutBuf);
    assert(Buffer_getSize(hInBuf));   
    assert(Buffer_getUserPtr(hInBuf)); 
    assert(Buffer_getUserPtr(hOutBuf));
    assert(Buffer_getNumBytesUsed(hInBuf));
    assert(Buffer_getSize(hOutBuf));
    assert(Buffer_getType(hOutBuf) == Buffer_Type_GRAPHICS);

    BufferGfx_getDimensions(hOutBuf, &dim);
    
    inPtr  = Buffer_getUserPtr(hInBuf);
    outPtr = Buffer_getUserPtr(hOutBuf);

    inArgs.size             = sizeof(IMGDEC1_InArgs);
    inArgs.numBytes         = Buffer_getNumBytesUsed(hInBuf);

    outArgs.size            = sizeof(IMGDEC1_OutArgs);

    inBufDesc.numBufs       = 1;
    outBufDesc.numBufs      = hId->minNumOutBufs;
    
    inBufDesc.descs[0].buf = inPtr;
    inBufDesc.descs[0].bufSize = Buffer_getSize(hInBuf);

    for(i = 0; i < hId->minNumOutBufs; i++)
    {
        outBufDesc.descs[i].buf = (XDAS_Int8 *)((unsigned int)outPtr + offset);
        offset +=  hId->minOutBufSize[i];
        outBufDesc.descs[i].bufSize = hId->minOutBufSize[i];
    }   
        
    /* Decode image buffer */
    status = IMGDEC1_process(hId->hDecode, &inBufDesc, &outBufDesc, &inArgs,
                            &outArgs);

    Buffer_setNumBytesUsed(hInBuf, outArgs.bytesConsumed);

    if (status != IMGDEC1_EOK) {
        if (XDM_ISFATALERROR(outArgs.extendedError)) {
            Dmai_err2("IMGDEC1_process() failed with error (%d ext: 0x%x)\n",
                      (Int)status, (Uns) outArgs.extendedError);
            return Dmai_EFAIL;
        }
        else {
            Dmai_dbg1("IMGDEC1_process() non-fatal error 0x%x\n",
                      (Uns) outArgs.extendedError);
            return Dmai_EBITERROR;
        }
    }

    /* Get the dynamic codec status */
    decStatus.data.buf = NULL;
    decStatus.size = sizeof(IMGDEC1_Status);
    dynParams.size = sizeof(IMGDEC1_DynamicParams);
    status = IMGDEC1_control(hId->hDecode, XDM_GETSTATUS, &dynParams,
                            &decStatus);

    if (status != IMGDEC1_EOK) {
        Dmai_err1("XDM_GETSTATUS failed, status=%d\n", status);
        return Dmai_EFAIL;
    }

    /* Set output Color Format */
    switch (decStatus.outputChromaFormat) {
        case XDM_YUV_422ILE:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_UYVY); 
            break;
        case XDM_YUV_420P:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_YUV420P); 
            break;
        case XDM_YUV_422P:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_YUV422P); 
            break;
        case XDM_YUV_444P:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_YUV444P); 
            break;
        case XDM_GRAY:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_GRAY); 
            break;
        default:
            printf("Unsupported output color space.\n");
            return Dmai_EFAIL;        
    }
       
    dim.x = dim.y = 0;
    dim.width = decStatus.outputWidth;
    dim.height = decStatus.outputHeight;
    dim.lineLength = decStatus.outputWidth *
                      ColorSpace_getBpp(BufferGfx_getColorSpace(hOutBuf)) / 8;
    
    if (BufferGfx_setDimensions(hOutBuf, &dim) < 0) {
        Dmai_err0("Frame does not fit in allocated buffer\n");
        return Dmai_EFAIL;
    }

    return Dmai_EOK;
}
コード例 #11
0
ファイル: Resize.c プロジェクト: aCayF/dmai_1_21_00_10
/******************************************************************************
 * Resize_execute
 ******************************************************************************/
Int Resize_execute(Resize_Handle hResize,
                   Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf, Buffer_Handle hsDstBuf)
{
    struct imp_convert  rsz;
	struct rsz_channel_config rsz_chan_config;
	struct rsz_single_shot_config rsz_ss_config;
    BufferGfx_Dimensions srcDim;
    BufferGfx_Dimensions dstDim;
    BufferGfx_Dimensions sdstDim;
    UInt32               srcOffset;
    UInt32               dstOffset;
    UInt32               sdstOffset;
    UInt32               srcSize;
    UInt32               dstSize;
    
    assert(hResize);
    assert(hSrcBuf);
    assert(hDstBuf);

    Dmai_clear(rsz);

    BufferGfx_getDimensions(hSrcBuf, &srcDim);
    BufferGfx_getDimensions(hDstBuf, &dstDim);
    hsDstBuf?BufferGfx_getDimensions(hsDstBuf, &sdstDim):NULL;
    srcSize = srcDim.width * srcDim.height;
    dstSize = dstDim.width * dstDim.height;

    /* the resize operation to ColorSpace_UYVY is different from ColorSpace_YUV420PSEMI*/
    if (BufferGfx_getColorSpace(hSrcBuf) == ColorSpace_UYVY) {
        srcOffset = srcDim.y * srcDim.lineLength + (srcDim.x << 1);
        dstOffset = dstDim.y * dstDim.lineLength + (dstDim.x << 1);
        sdstOffset = hsDstBuf?(sdstDim.y * sdstDim.lineLength + (sdstDim.x << 1)):0;

        rsz.in_buff.index     = -1;
        rsz.in_buff.buf_type  = IMP_BUF_IN;
        rsz.in_buff.offset    = Buffer_getPhysicalPtr(hSrcBuf) + srcOffset;
        rsz.in_buff.size      = Buffer_getSize(hSrcBuf);

        rsz.out_buff1.index    = -1;
        rsz.out_buff1.buf_type = IMP_BUF_OUT1;
        rsz.out_buff1.offset   = Buffer_getPhysicalPtr(hDstBuf) + dstOffset;
        rsz.out_buff1.size     = Buffer_getSize(hDstBuf);
        
        rsz.out_buff2.index    = -1;
        rsz.out_buff2.buf_type = IMP_BUF_OUT2;
        rsz.out_buff2.offset   = hsDstBuf?(Buffer_getPhysicalPtr(hsDstBuf) + sdstOffset):0;
        rsz.out_buff2.size     = hsDstBuf?Buffer_getSize(hsDstBuf):0;
        
        /* 
         * The IPIPE requires that the memory offsets of the input and output
         * buffers start on 32-byte boundaries.
         */
        assert((rsz.in_buff.offset  & 0x1F) == 0);
        assert((rsz.out_buff1.offset & 0x1F) == 0);
        assert((rsz.out_buff2.offset & 0x1F) == 0);

        /* Start IPIPE operation */
        if (ioctl(hResize->fd, RSZ_RESIZE, &rsz) == -1) {
            Dmai_err0("Failed RSZ_RESIZE\n");
            return Dmai_EFAIL;
        }

        Buffer_setNumBytesUsed(hDstBuf, Buffer_getSize(hDstBuf));
        hsDstBuf?Buffer_setNumBytesUsed(hsDstBuf, Buffer_getSize(hsDstBuf)):NULL;
    } else {
        /* configure for the ColorSpace_YUV420PSEMI*/
	    Dmai_clear(rsz_ss_config);
	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = 0;
	    rsz_chan_config.config = NULL; /* to set defaults in driver */
	    if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in setting default configuration for single shot mode\n");
            return Dmai_EFAIL;
	    }

	    /* default configuration setting in Resizer successfull */
	    Dmai_clear(rsz_ss_config);
	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);
	    rsz_chan_config.config = &rsz_ss_config;

	    if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in getting configuration from driver\n");
            return Dmai_EFAIL;
	    }

	    /* input params are set at the resizer */
	    rsz_ss_config.input.image_width  = srcDim.width;
	    rsz_ss_config.input.image_height = srcDim.height;
	    rsz_ss_config.input.ppln = rsz_ss_config.input.image_width + 8;
	    rsz_ss_config.input.lpfr = rsz_ss_config.input.image_height + 10;
	    rsz_ss_config.input.pix_fmt = IPIPE_420SP_Y;
	    rsz_ss_config.output1.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hDstBuf));
        rsz_ss_config.output1.enable = 1;
	    rsz_ss_config.output1.width = dstDim.width;
	    rsz_ss_config.output1.height = dstDim.height;
	    rsz_ss_config.output2.enable = 0;

	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);
	    if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in setting default configuration for single shot mode\n");
            return Dmai_EFAIL;
	    }
	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);

	    /* read again and verify */
	    if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in getting configuration from driver\n");
            return Dmai_EFAIL;
	    }

        /* execute for ColorSpace_YUV420PSEMI*/
        Dmai_clear(rsz);
        //srcOffset = srcDim.y * srcDim.lineLength + (srcDim.x << 1);
        //dstOffset = dstDim.y * dstDim.lineLength + (dstDim.x << 1);
        rsz.in_buff.buf_type  = IMP_BUF_IN;
        rsz.in_buff.index     = -1;
        rsz.in_buff.offset    = Buffer_getPhysicalPtr(hSrcBuf);
        //rsz.in_buff.size      = Buffer_getSize(hSrcBuf);
        rsz.in_buff.size      = srcSize;

        rsz.out_buff1.buf_type = IMP_BUF_OUT1;
        rsz.out_buff1.index    = -1;
        rsz.out_buff1.offset    = Buffer_getPhysicalPtr(hDstBuf);
        //rsz.out_buff1.size     = Buffer_getSize(hDstBuf);
        rsz.out_buff1.size     = dstSize;
        
        /* 
         * The IPIPE requires that the memory offsets of the input and output
         * buffers start on 32-byte boundaries.
         */
        assert((rsz.in_buff.offset  & 0x1F) == 0);
        assert((rsz.out_buff1.offset & 0x1F) == 0);
        /* Start IPIPE operation */
        if (ioctl(hResize->fd, RSZ_RESIZE, &rsz) == -1) {
            Dmai_err0("Failed RSZ_RESIZE\n");
            return Dmai_EFAIL;
        }

	    /* input params are set at the resizer */
	    rsz_ss_config.input.image_width  = srcDim.width;
	    rsz_ss_config.input.image_height = srcDim.height>>1;
	    rsz_ss_config.input.ppln = rsz_ss_config.input.image_width + 8;
	    rsz_ss_config.input.lpfr = rsz_ss_config.input.image_height + 10;
	    rsz_ss_config.input.pix_fmt = IPIPE_420SP_C;
        rsz_ss_config.output1.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hDstBuf));
	    rsz_ss_config.output1.enable = 1;
	    rsz_ss_config.output1.width = dstDim.width;
	    rsz_ss_config.output1.height = dstDim.height;
        rsz_ss_config.output2.enable = 0;

	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);
	    if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in setting default configuration for single shot mode\n");
            return Dmai_EFAIL;
	    }
	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);

	    /* read again and verify */
	    if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in getting configuration from driver\n");
            return Dmai_EFAIL;
	    }

        Dmai_clear(rsz);
        //srcOffset = srcDim.y * srcDim.lineLength + (srcDim.x << 1);
        //dstOffset = dstDim.y * dstDim.lineLength + (dstDim.x << 1);
        rsz.in_buff.buf_type  = IMP_BUF_IN;
        rsz.in_buff.index     = -1;
        rsz.in_buff.offset    = Buffer_getPhysicalPtr(hSrcBuf)  + srcSize;
        rsz.in_buff.size      = srcSize>>1;

        rsz.out_buff1.buf_type = IMP_BUF_OUT1;
        rsz.out_buff1.index    = -1;
        rsz.out_buff1.offset   = Buffer_getPhysicalPtr(hDstBuf)  + dstSize;
        rsz.out_buff1.size     = dstSize>>1;
        
        /* 
         * The IPIPE requires that the memory offsets of the input and output
         * buffers start on 32-byte boundaries.
         */
        assert((rsz.in_buff.offset  & 0x1F) == 0);
        assert((rsz.out_buff1.offset & 0x1F) == 0);
        /* Start IPIPE operation */
        if (ioctl(hResize->fd, RSZ_RESIZE, &rsz) == -1) {
            Dmai_err0("Failed RSZ_RESIZE\n");
            return Dmai_EFAIL;
        }

        Buffer_setNumBytesUsed(hDstBuf, Buffer_getNumBytesUsed(hSrcBuf));

    }
    return Dmai_EOK;
}
コード例 #12
0
ファイル: Resize.c プロジェクト: aCayF/dmai_1_21_00_10
/******************************************************************************
 * Resize_config
 ******************************************************************************/
Int Resize_config(Resize_Handle hResize,
                  Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf, Buffer_Handle hsDstBuf)
{
    BufferGfx_Dimensions srcDim, dstDim, sdstDim;
    UInt                 hDif;
    UInt                 vDif;    
    UInt                 shDif;
    UInt                 svDif;    
	struct rsz_channel_config rsz_chan_config;
	struct rsz_single_shot_config rsz_ss_config;

    /* Make sure our input parameters are valid */
    if (!hResize) {
        Dmai_err0("Resize_Handle parameter must not be NULL\n");
        return Dmai_EINVAL;
    }   
    
    if (!hSrcBuf) {
        Dmai_err0("Source buffer parameter must not be NULL\n");
        return Dmai_EINVAL;
    }   

    if (!hDstBuf) {
        Dmai_err0("Destination buffer parameter must not be NULL\n");
        return Dmai_EINVAL;
    }
    
    if (!hsDstBuf) {
        Dmai_err0("Second destination buffer parameter is NULL\n");
    }
    
    /* Buffer needs to be graphics buffers */
    if (Buffer_getType(hSrcBuf) != Buffer_Type_GRAPHICS ||
        Buffer_getType(hDstBuf) != Buffer_Type_GRAPHICS ||
        hsDstBuf?(Buffer_getType(hsDstBuf) != Buffer_Type_GRAPHICS):FALSE) {
        Dmai_err0("Src and dst buffers need to be graphics buffers\n");
        return Dmai_EINVAL;
    }

    BufferGfx_getDimensions(hSrcBuf, &srcDim);
    BufferGfx_getDimensions(hDstBuf, &dstDim);
    hsDstBuf?BufferGfx_getDimensions(hsDstBuf, &sdstDim):NULL;

    if (dstDim.width <= 0) {
        Dmai_err0("Destination buffer width must be greater than zero\n");
        return Dmai_EINVAL;
    }

    if (dstDim.height <= 0) {
        Dmai_err0("Destination buffer height must be greater than zero\n");
        return Dmai_EINVAL;
    }

    if (hsDstBuf?(sdstDim.width <= 0):FALSE) {
        Dmai_err0("Second destination buffer width must be greater than zero\n");
        return Dmai_EINVAL;
    }

    if (hsDstBuf?(sdstDim.height <= 0):FALSE) {
        Dmai_err0("Second destination buffer height must be greater than zero\n");
        return Dmai_EINVAL;
    }

    if ((srcDim.lineLength & 0x1F) != 0) {
        Dmai_err0("Source buffer pitch must be a multiple of 32 bytes\n");
        return Dmai_EINVAL;
    }

    if ((dstDim.lineLength & 0x1F) != 0) {
        Dmai_err0("Destination buffer pitch must be a multiple of 32 bytes\n");
        return Dmai_EINVAL;
    }

    if (hsDstBuf?((sdstDim.lineLength & 0x1F) != 0):FALSE) {
        Dmai_err0("Second destination buffer pitch must be a multiple of 32 bytes\n");
        return Dmai_EINVAL;
    }


    /* Check for valid buffer scaling */
    hDif = srcDim.width  * 256 / dstDim.width;
    vDif = srcDim.height * 256 / dstDim.height;

    if (hDif < 32) {
        Dmai_err0("Horizontal up-scaling must not exceed 8x\n");
        return Dmai_EINVAL;
    }

    if (hDif > 4096) {
        Dmai_err0("Horizontal down-scaling must not exceed 1/16x\n");
        return Dmai_EINVAL;
    }

    if (vDif < 32) {
        Dmai_err0("Vertical up-scaling must not exceed 8x\n");
        return Dmai_EINVAL;
    }

    if (vDif > 4096) {
        Dmai_err0("Vertical down-scaling must not exceed 1/16x\n");
        return Dmai_EINVAL;
    }    

    shDif = hsDstBuf?(srcDim.width  * 256 / sdstDim.width):0;
    svDif = hsDstBuf?(srcDim.height * 256 / sdstDim.height):0;

    if (shDif && shDif < 32) {
        Dmai_err0("Second horizontal up-scaling must not exceed 8x\n");
        return Dmai_EINVAL;
    }

    if (shDif && shDif > 4096) {
        Dmai_err0("Second horizontal down-scaling must not exceed 1/16x\n");
        return Dmai_EINVAL;
    }

    if (svDif && svDif < 32) {
        Dmai_err0("Second vertical up-scaling must not exceed 8x\n");
        return Dmai_EINVAL;
    }

    if (svDif && svDif > 4096) {
        Dmai_err0("Second vertical down-scaling must not exceed 1/16x\n");
        return Dmai_EINVAL;
    }    

    /* Configure for ColorSpace_UYVY_*/
    if (BufferGfx_getColorSpace(hSrcBuf) == ColorSpace_UYVY) {
        /* Setting default configuration in Resizer */
	    Dmai_clear(rsz_ss_config);
	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = 0;
	    rsz_chan_config.config = NULL; /* to set defaults in driver */
	    if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in setting default configuration for single shot mode\n");
            goto faco_error;
	    }

	    /* default configuration setting in Resizer successfull */
	    Dmai_clear(rsz_ss_config);
	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);
	    rsz_chan_config.config = &rsz_ss_config;

	    if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in getting resizer channel configuration from driver\n");
            goto faco_error;
	    }

	    /* input params are set at the resizer */
	    rsz_ss_config.input.image_width  = srcDim.width;
	    rsz_ss_config.input.image_height = srcDim.height;
	    rsz_ss_config.input.ppln = rsz_ss_config.input.image_width + 8;
	    rsz_ss_config.input.lpfr = rsz_ss_config.input.image_height + 10;
	    rsz_ss_config.input.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hSrcBuf));
	    rsz_ss_config.output1.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hDstBuf));
	    rsz_ss_config.output1.enable = 1;
	    rsz_ss_config.output1.width = dstDim.width;
	    rsz_ss_config.output1.height = dstDim.height;
	    rsz_ss_config.output2.pix_fmt = hsDstBuf?pixFormatConversion(BufferGfx_getColorSpace(hsDstBuf)):0;
	    rsz_ss_config.output2.enable = hsDstBuf?1:0;
	    rsz_ss_config.output2.width = hsDstBuf?sdstDim.width:0;
	    rsz_ss_config.output2.height = hsDstBuf?sdstDim.height:0;

	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);
	    if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in setting default configuration for single shot mode\n");
            goto faco_error;
	    }
	    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
	    rsz_chan_config.chain = 0;
	    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);

	    /* read again and verify */
	    if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) {
	    	Dmai_err0("Error in getting configuration from driver\n");
            goto faco_error;
	    }
    }

    return Dmai_EOK;
    
faco_error:
	close(hResize->fd);
    hResize->fd = 0;    
    return Dmai_EFAIL;
    
}
コード例 #13
0
ファイル: Resize.c プロジェクト: Soorma07/dm368-dvsdk-reprise
/******************************************************************************
 * Resize_config
 ******************************************************************************/
Int Resize_config(Resize_Handle hResize,
                  Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf)
{
#ifdef CONFIG_DM365_IPIPE
    BufferGfx_Dimensions srcDim, dstDim;
    UInt                 hDif;
    UInt                 vDif;    
    struct rsz_channel_config rsz_chan_config;
    struct rsz_single_shot_config rsz_ss_config;

    /* Make sure our input parameters are valid */
    if (!hResize) {
        Dmai_err0("Resize_Handle parameter must not be NULL\n");
        return Dmai_EINVAL;
    }   
    
    if (!hSrcBuf) {
        Dmai_err0("Source buffer parameter must not be NULL\n");
        return Dmai_EINVAL;
    }   

    if (!hDstBuf) {
        Dmai_err0("Destination buffer parameter must not be NULL\n");
        return Dmai_EINVAL;
    }
    
    /* Buffer needs to be graphics buffers */
    if (Buffer_getType(hSrcBuf) != Buffer_Type_GRAPHICS ||
        Buffer_getType(hDstBuf) != Buffer_Type_GRAPHICS) {

        Dmai_err0("Src and dst buffers need to be graphics buffers\n");
        return Dmai_EINVAL;
    }

    BufferGfx_getDimensions(hSrcBuf, &srcDim);
    BufferGfx_getDimensions(hDstBuf, &dstDim);

    if (dstDim.width <= 0) {
        Dmai_err0("Destination buffer width must be greater than zero\n");
        return Dmai_EINVAL;
    }

    if (dstDim.height <= 0) {
        Dmai_err0("Destination buffer height must be greater than zero\n");
        return Dmai_EINVAL;
    }

    if ((srcDim.lineLength & 0x1F) != 0) {
        Dmai_err0("Source buffer pitch must be a multiple of 32 bytes\n");
        return Dmai_EINVAL;
    }

    if ((dstDim.lineLength & 0x1F) != 0) {
        Dmai_err0("Destination buffer pitch must be a multiple of 32 bytes\n");
        return Dmai_EINVAL;
    }


    /* Check for valid buffer scaling */
    hDif = srcDim.width  * 256 / dstDim.width;
    vDif = srcDim.height * 256 / dstDim.height;

    if (hDif < 32) {
        Dmai_err0("Horizontal up-scaling must not exceed 8x\n");
        return Dmai_EINVAL;
    }

    if (hDif > 4096) {
        Dmai_err0("Horizontal down-scaling must not exceed 1/16x\n");
        return Dmai_EINVAL;
    }

    if (vDif < 32) {
        Dmai_err0("Vertical up-scaling must not exceed 8x\n");
        return Dmai_EINVAL;
    }

    if (vDif > 4096) {
        Dmai_err0("Vertical down-scaling must not exceed 1/16x\n");
        return Dmai_EINVAL;
    }

    /* Set the driver default parameters and retrieve what was set */
    Dmai_clear(rsz_ss_config);
    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
    rsz_chan_config.chain = 0;
    rsz_chan_config.len = 0;
    rsz_chan_config.config = NULL; /* to set defaults in driver */
    if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) {
        Dmai_err0("Error in setting default configuration for single shot mode\n");
        goto cleanup;
    }

    /* default configuration setting in Resizer successfull */
    Dmai_clear(rsz_ss_config);
    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
    rsz_chan_config.chain = 0;
    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);
    rsz_chan_config.config = &rsz_ss_config;

    if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) {
        Dmai_err0("Error in getting resizer channel configuration from driver\n");
        goto cleanup;
    }

    /* input params are set at the resizer */
    rsz_ss_config.input.image_width  = srcDim.width;
    rsz_ss_config.input.image_height = srcDim.height;
    rsz_ss_config.input.ppln = rsz_ss_config.input.image_width + 8;
    rsz_ss_config.input.lpfr = rsz_ss_config.input.image_height + 10;
    rsz_ss_config.input.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hSrcBuf));
    rsz_ss_config.output1.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hDstBuf));
    rsz_ss_config.output1.enable = 1;
    rsz_ss_config.output1.width = dstDim.width;
    rsz_ss_config.output1.height = dstDim.height;
    rsz_ss_config.output2.enable = 0;

    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
    rsz_chan_config.chain = 0;
    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);
    if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) {
        Dmai_err0("Error in setting default configuration for single shot mode\n");
        goto cleanup;
    }
    rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT;
    rsz_chan_config.chain = 0;
    rsz_chan_config.len = sizeof(struct rsz_single_shot_config);

    /* read again and verify */
    if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) {
        Dmai_err0("Error in getting configuration from driver\n");
        goto cleanup;
    }

    return Dmai_EOK;
    
cleanup:
    close(hResize->fd);
    hResize->fd = 0;    
    return Dmai_EFAIL;
#else
    Dmai_err0("not implemented\n");
    return Dmai_ENOTIMPL;
#endif
}
コード例 #14
0
ファイル: appMain.c プロジェクト: Soorma07/dvsdk-dmai
/******************************************************************************
 * writeFrameSemiPlanar
******************************************************************************/
Int writeFrameSemiPlanar(Buffer_Handle hBuf, FILE *outFile)
{
    Int8   *yPtr, *cbcrPtr;        
    UInt32  ySize;                /* Size of Luma data */
    Uint32  cbcrSize;             /* Size of Chroma (cb and cr) data */
    Char   *colorFmt;
    Int     y, x;

    BufferGfx_Dimensions dim;
    BufferGfx_getDimensions(hBuf, &dim);    

    /*  The three color planes are evenly spaced in the output buffer. Each 
     *  plane has been allocated a size equal to the input creation parameter 
     *  maxWidth x maxHeight.              
     */         
    ySize = dim.width * dim.height;
    
    switch(BufferGfx_getColorSpace(hBuf)){
       case ColorSpace_YUV420PSEMI:
           cbcrSize = dim.width * dim.height / 2;
           colorFmt = "420P";
           break;
       default:
           printf("Unsupported output color space %d.\n", 
            BufferGfx_getColorSpace(hBuf));
           return -1;        
    }    

    /* Write Y plane */
    yPtr = Buffer_getUserPtr(hBuf);
    if (fwrite(yPtr, 1, ySize, outFile) != ySize ) {
        fprintf(stderr,"Failed to write y data to disk\n");
        return -1;
    }

    /* Separate Cb from CbCr interleaved and save Cb plane */
    cbcrPtr = Buffer_getUserPtr(hBuf) + Buffer_getSize(hBuf) * 2/3;
    for (y = 0; y < dim.height / 2; y++) {
      for (x = 0; x < dim.width; x += 2) {
        if (fwrite(&cbcrPtr[x], 1, 1, outFile) != 1) {
            fprintf(stderr,"Failed to write data to disk\n");
            return -1;
        }
      }
      cbcrPtr += dim.lineLength;
    }

    /* Separate Cr from CbCr interleaved and save Cr plane */
    cbcrPtr = Buffer_getUserPtr(hBuf) + Buffer_getSize(hBuf) * 2 / 3;
    for (y = 0; y < dim.height / 2; y++) {
      for (x = 1; x < dim.width; x += 2) {
        if (fwrite(&cbcrPtr[x], 1, 1, outFile) != 1) {
            fprintf(stderr,"Failed to write data to disk\n");
            return -1;
        }
      }
      cbcrPtr += dim.lineLength;
    }

    printf("Wrote %s image size %d (%dx%d) to disk\n",
           colorFmt, 
           (Int) (ySize + cbcrSize),
           (Int) dim.width, (Int) dim.height);

    return 0;
}
コード例 #15
0
ファイル: appMain.c プロジェクト: Soorma07/dvsdk-dmai
/******************************************************************************
 * writeFramePlanar
******************************************************************************/
Int writeFramePlanar(Buffer_Handle hBuf, FILE *outFile)
{
    Int8   *yPtr, *cbcrPtr;        
    UInt32  ySize;                /* Size of Luma data */
    Uint32  cbcrSize;             /* Size of Chroma (cb and cr) data */
    Uint32  cbOffset, crOffSet;   
    Char   *colorFmt;
 
    BufferGfx_Dimensions dim;
    BufferGfx_getDimensions(hBuf, &dim);    

    /*  The three color planes are evenly spaced in the output buffer. Each 
     *  plane has been allocated a size equal to the input creation parameter 
     *  maxWidth x maxHeight.              
     */     
    cbOffset = Buffer_getSize(hBuf) * 1/3;
    crOffSet = Buffer_getSize(hBuf) * 2/3;
    
    ySize = dim.width * dim.height;
    
    switch(BufferGfx_getColorSpace(hBuf)){
       case ColorSpace_YUV444P:
           cbcrSize = dim.width * dim.height * 2;
           colorFmt = "444P";
           break;
       case ColorSpace_YUV422P:
           cbcrSize = dim.width * dim.height;
           colorFmt = "422P";
           break;
       case ColorSpace_YUV420P:
           cbcrSize = dim.width * dim.height / 2;
           colorFmt = "420P";
           break;
       case ColorSpace_GRAY:
           cbcrSize = 0;
           colorFmt = "GRAY";
           break;
       default:
           printf("Unsupported output color space %d.\n", 
            BufferGfx_getColorSpace(hBuf));
           return -1;        
    }    

    /* Write Y plane */
    yPtr = Buffer_getUserPtr(hBuf);
    if (fwrite(yPtr, 1, ySize, outFile) != ySize ) {
        fprintf(stderr,"Failed to write y data to disk\n");
        return -1;
    }

    /* Write Cb plane */
    cbcrPtr = Buffer_getUserPtr(hBuf) + cbOffset;   
    if (fwrite(cbcrPtr, 1, cbcrSize / 2 , outFile) != cbcrSize / 2 ) {
        fprintf(stderr,"Failed to write y data to disk\n");
        return -1;
    }

    /* Write Cr plane */
    cbcrPtr = Buffer_getUserPtr(hBuf) + crOffSet;
    if (fwrite(cbcrPtr, 1, cbcrSize / 2  , outFile) != cbcrSize / 2 ) {
        fprintf(stderr,"Failed to write y data to disk\n");
        return -1;
    }

    printf("Wrote %s image size %d (%dx%d) to disk\n",
           colorFmt, 
           (Int) (ySize + cbcrSize),
           (Int) dim.width, (Int) dim.height);

    return 0;
}
コード例 #16
0
ファイル: appMain.c プロジェクト: Soorma07/dvsdk-dmai
/******************************************************************************
 * appMain
 ******************************************************************************/
Int appMain(Args * args)
{
    Framecopy_Attrs         fcAttrs      = Framecopy_Attrs_DEFAULT;
    BufferGfx_Attrs         gfxAttrs     = BufferGfx_Attrs_DEFAULT;
    Smooth_Attrs            smAttrs      = Smooth_Attrs_DEFAULT;
    Time_Attrs              tAttrs       = Time_Attrs_DEFAULT;
    BufTab_Handle           hCapBufTab   = NULL;
    BufTab_Handle           hDisBufTab   = NULL;
    Display_Handle          hDisplay     = NULL;
    Capture_Handle          hCapture     = NULL;
    Framecopy_Handle        hFc          = NULL;
    Smooth_Handle           hSmooth      = NULL;
    Time_Handle             hTime        = NULL;
    Int                     numFrame     = 0;
    Display_Attrs           dAttrs;
    Capture_Attrs           cAttrs;
    Buffer_Handle           cBuf, dBuf;
    Cpu_Device              device;
    Int                     bufIdx;
    UInt32                  time;
    BufferGfx_Dimensions    dim;
    Int32                   bufSize;
    Int                     ret = Dmai_EOK;

    /* Initialize DMAI */
    Dmai_init();

    if (args->benchmark) {
        hTime = Time_create(&tAttrs);

        if (hTime == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to create Time object\n");
            goto cleanup;
        }
    }

    /* Determine which device the application is running on */
    if (Cpu_getDevice(NULL, &device) < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to determine target board\n");
        goto cleanup;
    }

    /* Set the display and capture attributes depending on device */
    switch (device) {
        case Cpu_Device_DM6467:
            dAttrs = Display_Attrs_DM6467_VID_DEFAULT;
            cAttrs = Capture_Attrs_DM6467_DEFAULT;
            break;
        case Cpu_Device_DM365:
        case Cpu_Device_DM368:
            dAttrs = Display_Attrs_DM365_VID_DEFAULT;
            cAttrs = Capture_Attrs_DM365_DEFAULT;
            dAttrs.colorSpace = ColorSpace_YUV420PSEMI;
            cAttrs.colorSpace = dAttrs.colorSpace;
            break;
        case Cpu_Device_OMAPL138:
            dAttrs = Display_Attrs_OMAPL138_VID_DEFAULT;
            cAttrs = Capture_Attrs_OMAPL138_DEFAULT;
            break;
        case Cpu_Device_OMAP3530:
        case Cpu_Device_DM3730:
            dAttrs     = Display_Attrs_O3530_VID_DEFAULT;
            cAttrs     = Capture_Attrs_OMAP3530_DEFAULT;
            dAttrs.colorSpace = cAttrs.colorSpace = ColorSpace_UYVY;
            dAttrs.rotation = 270;
            break;
        default:
            dAttrs = Display_Attrs_DM6446_DM355_VID_DEFAULT;
            cAttrs = Capture_Attrs_DM6446_DM355_DEFAULT;
            break;
    }

    if (args->displayStd != -1) {
        dAttrs.displayStd = args->displayStd;
    }

    if (args->displayDevice) {
        dAttrs.displayDevice = args->displayDevice;
    }

    if (args->displayNumBufs != -1) {
        dAttrs.numBufs = args->displayNumBufs;
    }

    /* Enable cropping in capture driver if selected */
    if (args->width != -1 && args->height != -1 && args->crop) {
        cAttrs.cropX = args->xIn;
        cAttrs.cropY = args->yIn;
        cAttrs.cropWidth = args->width;
        cAttrs.cropHeight = args->height;
    }

    cAttrs.videoInput = args->videoInput;

    if (Capture_detectVideoStd(NULL, &cAttrs.videoStd, &cAttrs) < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to detect capture video standard\n");
        goto cleanup;
    }

    /* The color space of the capture buffers depend on the device */
    gfxAttrs.colorSpace = cAttrs.colorSpace;

    if (VideoStd_getResolution(cAttrs.videoStd, &gfxAttrs.dim.width, 
                                &gfxAttrs.dim.height) < 0) {
        goto cleanup;
    }

    gfxAttrs.dim.lineLength =
        Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width, 
                                              gfxAttrs.colorSpace), 32); 
    gfxAttrs.dim.x = 0;
    gfxAttrs.dim.y = 0;

    if (gfxAttrs.colorSpace == ColorSpace_YUV422PSEMI) {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2;
    }
    else if (gfxAttrs.colorSpace == ColorSpace_YUV420PSEMI) {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2;
    }
    else {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height;
    }

    if (args->captureUalloc) {
        /* Create a table of video buffers to use with the capture device */
        hCapBufTab = BufTab_create(cAttrs.numBufs, bufSize,
                                   BufferGfx_getBufferAttrs(&gfxAttrs));
        if (hCapBufTab == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to allocate contiguous buffers\n");
            goto cleanup;
        }
    }

    /* Create the capture device driver instance */
    hCapture = Capture_create(hCapBufTab, &cAttrs);

    if (hCapture == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to create capture device\n");
        goto cleanup;
    }

    /* Create the display device driver instance */
    dAttrs.videoStd = Capture_getVideoStd(hCapture);
    dAttrs.videoOutput = args->videoOutput;

    if (args->displayUalloc) {
        /* Create a table of video buffers to use with the display device */
        hDisBufTab = BufTab_create(dAttrs.numBufs, bufSize,
                                   BufferGfx_getBufferAttrs(&gfxAttrs));

        if (hDisBufTab == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to allocate contiguous buffers\n");
            goto cleanup;
        }
    }

    hDisplay = Display_create(hDisBufTab, &dAttrs);

    if (hDisplay == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to create display device\n");
        goto cleanup;
    }

    if (args->smooth) {
        /* Create the smooth job */
        hSmooth = Smooth_create(&smAttrs);

        if (hSmooth == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to create smooth job\n");
        }
    }
    else {
        /* Create the frame copy job */
        fcAttrs.accel = args->accel;
        hFc = Framecopy_create(&fcAttrs);

        if (hFc == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to create frame copy job\n");
            goto cleanup;
        }
    }

    /*
     * If cropping is not used, alter the dimensions of the captured
     * buffers and position the smaller image inside the full screen.
     */
    if (args->width != -1 && args->height != -1 && !args->crop) {
        for (bufIdx = 0;
             bufIdx < BufTab_getNumBufs(Capture_getBufTab(hCapture));
             bufIdx++) {

            cBuf = BufTab_getBuf(Capture_getBufTab(hCapture), bufIdx);
            BufferGfx_getDimensions(cBuf, &dim);

            dim.width   = args->width;
            dim.height  = args->height;
            dim.x       = args->xIn;
            dim.y       = args->yIn;

            if (BufferGfx_setDimensions(cBuf, &dim) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Input resolution does not fit in capture frame\n");
                goto cleanup;
            }
        }
    }

    /*
     * Alter the dimensions of the display buffers and position
     * the smaller image inside the full screen.
     */
    if (args->width != -1 && args->height != -1) {
        for (bufIdx = 0;
             bufIdx < BufTab_getNumBufs(Display_getBufTab(hDisplay));
             bufIdx++) {

            dBuf = BufTab_getBuf(Display_getBufTab(hDisplay), bufIdx);
            BufferGfx_getDimensions(dBuf, &dim);

            dim.width   = args->width;
            dim.height  = args->height;
            dim.x       = args->xOut;
            dim.y       = args->yOut;

            if (BufferGfx_setDimensions(dBuf, &dim) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Output resolution does not fit in display frame\n");
                goto cleanup;
            }
        }
    }

    if (args->smooth) {
        if (Smooth_config(hSmooth,
                          BufTab_getBuf(Capture_getBufTab(hCapture), 0),
                          BufTab_getBuf(Display_getBufTab(hDisplay), 0)) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to configure smooth job\n");
            goto cleanup;
        }
    }
    else {
        /* Configure the frame copy job */
        if (Framecopy_config(hFc, BufTab_getBuf(Capture_getBufTab(hCapture), 0),
                          BufTab_getBuf(Display_getBufTab(hDisplay), 0)) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to configure frame copy job\n");
            goto cleanup;
        }
    }

    while (numFrame++ < args->numFrames || args->numFrames == 0) {
        if (args->benchmark) {
            if (Time_reset(hTime) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to reset timer\n");
                goto cleanup;
            }
        }

        /* Get a captured frame from the capture device */
        if (Capture_get(hCapture, &cBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to get capture buffer\n");
            goto cleanup;
        }

        /* Get a frame from the display device to be filled with data */
        if (Display_get(hDisplay, &dBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to get display buffer\n");
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer delta\n");
                goto cleanup;
            }
        }

        if (args->smooth) {
            /*
             * Remove interlacing artifacts from the captured buffer and
             * store the result in the display buffer.
             */
            if (Smooth_execute(hSmooth, cBuf, dBuf) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to execute smooth job\n");
                goto cleanup;
            }
        }
        else {
            /* Copy the captured buffer to the display buffer */
            if (Framecopy_execute(hFc, cBuf, dBuf) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to execute frame copy job\n");
                goto cleanup;
            }
        }

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer delta\n");
                goto cleanup;
            }

            printf("Smooth / Framecopy: %uus ", (Uns) time);
        }

        /* Give captured buffer back to the capture device driver */
        if (Capture_put(hCapture, cBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to put capture buffer\n");
            goto cleanup;
        }

        /* Send filled buffer to display device driver to be displayed */
        if (Display_put(hDisplay, dBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to put display buffer\n");
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_total(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer total\n");
                goto cleanup;
            }

            printf("Frame time: %uus\n", (Uns) time);
        }
    }

cleanup:
    /* Clean up the application */
    if (hSmooth) {
        Smooth_delete(hSmooth);
    }

    if (hFc) {
        Framecopy_delete(hFc);
    }

    if (hCapture) {
        Capture_delete(hCapture);
    }

    if (hDisplay) {
        Display_delete(hDisplay);
    }

    if (hTime) {
        Time_delete(hTime);
    }

    if (hCapBufTab) {
        BufTab_delete(hCapBufTab);
    }

    if (hDisBufTab) {
        BufTab_delete(hDisBufTab);
    }

    if (ret == Dmai_EFAIL)
        return 1;
    else
        return 0;
}
コード例 #17
0
ファイル: display.c プロジェクト: ChrisX34/stuff
/******************************************************************************
 * displayThrFxn
 ******************************************************************************/
Void *displayThrFxn(Void *arg)
{
    DisplayEnv             *envp       = (DisplayEnv *) arg;
    Display_Attrs           dAttrs     = Display_Attrs_DM365_VID_DEFAULT;
    Display_Handle          hDisplay   = NULL;
    Framecopy_Handle        hFc        = NULL;
    Void                   *status     = THREAD_SUCCESS;
    Uns                     frameCnt   = 0;
    BufferGfx_Dimensions    srcDim;
    Buffer_Handle           hSrcBuf, hDstBuf;
    Int                     fifoRet;
    ColorSpace_Type         colorSpace = ColorSpace_YUV420PSEMI;
    BufferGfx_Attrs         gfxAttrs = BufferGfx_Attrs_DEFAULT;
    BufTab_Handle           hBufTab  = NULL;
    Int32                   bufSize;
    Time_Attrs              tAttrs   = Time_Attrs_DEFAULT;
    Time_Handle             hTime    = NULL;
    Int32                   time, waitTime;
    Int                     bufCnt = 1;

    hTime = Time_create(&tAttrs);

    if (hTime == NULL) {
        ERR("Failed to create Time object\n");
        cleanup(THREAD_FAILURE);
    }

    if(Time_reset(hTime) != Dmai_EOK) {
        ERR("Failed to reset timer\n");
        cleanup(THREAD_FAILURE);
    }

    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    while (!gblGetQuit()) {
        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Pause for priming? */
        Pause_test(envp->hPausePrime);

        /* Get decoded video frame */
        fifoRet = Fifo_get(envp->hInFifo, &hSrcBuf);

        if (fifoRet < 0) {
            ERR("Failed to get buffer from video thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Did the video thread flush the fifo? */
        if (fifoRet == Dmai_EFLUSH) {
            cleanup(THREAD_SUCCESS);
        }
        
        BufferGfx_getDimensions(hSrcBuf, &srcDim);

        /* Prime the display driver with the first NUM_DISPLAY_BUFS buffers */
        if (bufCnt <= NUM_DISPLAY_BUFS) { 
            if (bufCnt == 1) {  // Create the Display at the first frame
                gfxAttrs.dim.width = srcDim.width;
                gfxAttrs.dim.height = srcDim.height;
                gfxAttrs.dim.lineLength = srcDim.lineLength;
                gfxAttrs.dim.x = srcDim.x;
                gfxAttrs.dim.y = srcDim.y;
                if (colorSpace ==  ColorSpace_YUV420PSEMI) {
                    bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 
                        3 / 2;
                } else {
                    bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2;
                }

                /* Create a table of buffers to use with the device drivers */
                gfxAttrs.colorSpace = colorSpace;
                hBufTab = BufTab_create(NUM_DISPLAY_BUFS, bufSize,
                    BufferGfx_getBufferAttrs(&gfxAttrs));
                if (hBufTab == NULL) {
                    ERR("Failed to create buftab\n");
                    cleanup(THREAD_FAILURE);
                }
	
                /* Create the display device instance */
                dAttrs.delayStreamon = TRUE;
                dAttrs.numBufs = NUM_DISPLAY_BUFS;
                dAttrs.videoStd = envp->videoStd;
                /* 
                 * Round down the width to a multiple of 32 as required by 
                 * display driver. Otherwise, the driver would internally round
                 * up the width, resulting in the codec padding showing up
                 * on the display when the image width is not a multiple of 32.
                 */
                dAttrs.width = ((gfxAttrs.dim.width & 0x1f) ?
                    (gfxAttrs.dim.width & ~(0x1f)) : gfxAttrs.dim.width);
                dAttrs.height = gfxAttrs.dim.height;
                dAttrs.videoOutput = envp->displayOutput;
                dAttrs.colorSpace  = colorSpace;
                hDisplay = Display_create(hBufTab, &dAttrs);

                if (hDisplay == NULL) {
                    ERR("Failed to create display device\n");
                    cleanup(THREAD_FAILURE);
                }
            }

            bufCnt++;
        }
        else {
            /* Get a buffer from the display device driver */
            if (Display_get(hDisplay, &hDstBuf) < 0) {
                ERR("Failed to get display buffer\n");
                cleanup(THREAD_FAILURE);
            }

            /* Send buffer back to the video thread */
            if (Fifo_put(envp->hOutFifo, hDstBuf) < 0) {
                ERR("Failed to send buffer to video thread\n");
                cleanup(THREAD_FAILURE);
            }
        }

        if (envp->videoStd == VideoStd_720P_60) {
            if (Time_delta(hTime, (UInt32*)&time) < 0) {
                ERR("Failed to get timer delta\n");
                cleanup(THREAD_FAILURE);
            }
            waitTime = DISPLAYLOOPLATENCY - time;
            if(waitTime > 0) {
                usleep(waitTime);
            }
            if(Time_reset(hTime) != Dmai_EOK) {
                ERR("Failed to reset timer\n");
                cleanup(THREAD_FAILURE);
            }
        }

        /* Incremement statistics for the user interface */
        gblIncFrames();           

        /* Give a filled buffer back to the display device driver */
        if (Display_put(hDisplay, hSrcBuf) < 0) {
            ERR("Failed to put display buffer\n");
            cleanup(THREAD_FAILURE);
        }

        frameCnt++;
    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousInit);
    Pause_off(envp->hPauseProcess);
    Pause_off(envp->hPausePrime);
    Fifo_flush(envp->hOutFifo);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    /* Clean up the thread before exiting */
    if (hFc) {
        Framecopy_delete(hFc);
    }

    if (hDisplay) {
        Display_delete(hDisplay);
    }

    /* Clean up the thread before exiting */
    if (hBufTab) {
        BufTab_delete(hBufTab);
    }

    if(hTime) {
        Time_delete(hTime);
    }

    return status;
}
コード例 #18
0
ファイル: _VideoBuf.c プロジェクト: NearZhxiAo/3730
/******************************************************************************
 * _Dmai_blackFill
 ******************************************************************************/
Void _Dmai_blackFill(Buffer_Handle hBuf)
{
    switch (BufferGfx_getColorSpace(hBuf)) {
        case ColorSpace_YUV422PSEMI:
        {
            Int8  *yPtr     = Buffer_getUserPtr(hBuf);
            Int32  ySize    = Buffer_getSize(hBuf) / 2;
            Int8  *cbcrPtr  = yPtr + ySize;
            Int32  cbCrSize = Buffer_getSize(hBuf) - ySize;
            Int    i;

            /* Fill the Y plane */
            for (i = 0; i < ySize; i++) {
                yPtr[i] = 0x0;
            }

            for (i = 0; i < cbCrSize; i++) {
                cbcrPtr[i] = 0x80;
            }
            break;
        }

        case ColorSpace_YUV420PSEMI:
        {
            Int8  *bufPtr = Buffer_getUserPtr(hBuf);
            Int    y;
            Int    bpp = ColorSpace_getBpp(ColorSpace_YUV420PSEMI);
            BufferGfx_Dimensions dim;
            
            BufferGfx_getDimensions(hBuf, &dim);

            for (y = 0; y < dim.height; y++) {
                memset(bufPtr, 0x0, dim.width * bpp / 8);
                bufPtr += dim.lineLength;
            }

            for (y = 0; y < (dim.height / 2); y++) {
                memset(bufPtr, 0x80, dim.width * bpp / 8);
                bufPtr += dim.lineLength;
            }
            
            break;
        }

        case ColorSpace_UYVY:
        {
            Int32 *bufPtr  = (Int32*)Buffer_getUserPtr(hBuf);
            Int32  bufSize = Buffer_getSize(hBuf) / sizeof(Int32);
            Int    i;

            /* Make sure display buffer is 4-byte aligned */
            assert((((UInt32) bufPtr) & 0x3) == 0);

            for (i = 0; i < bufSize; i++) {
                bufPtr[i] = UYVY_BLACK;
            }
            break;
        }

        case ColorSpace_RGB565:
        {
            memset(Buffer_getUserPtr(hBuf), 0, Buffer_getSize(hBuf));
            break;
        }

        default:
            Dmai_err0("Unsupported color space for _Dmai_blackFill\n");
            break;
    }
}
コード例 #19
0
ファイル: gsttidmaivideosink.c プロジェクト: sv99/gst-ti-dmai
/*******************************************************************************
 * gst_tidmaivideosink_get_display_buffer
 *
 * Function used to obtain a display buffer with the right properties
*******************************************************************************/
static Buffer_Handle gst_tidmaivideosink_get_display_buffer(
    GstTIDmaiVideoSink *sink,Buffer_Handle inBuf){
   
    Buffer_Handle hDispBuf = NULL;
    BufferGfx_Dimensions dim, inDim;
    int i;

    if (sink->numUnusedBuffers > 0){
        /* Recicle some unused buffer */
        for (i = 0; i < sink->numBuffers ; i++){
            if (sink->unusedBuffers[i] != NULL){
                hDispBuf = sink->unusedBuffers[i];
                sink->unusedBuffers[i] = NULL;
                sink->allocatedBuffers[i] = NULL;
                sink->numUnusedBuffers--;
                sink->numAllocatedBuffers--;
                GST_DEBUG("Re-using dropped buffer");
                break;
            }
        }
    } else {
        /* Get a buffer from the display driver */
        if (Display_get(sink->hDisplay, &hDispBuf) < 0) {
            GST_ELEMENT_ERROR(sink,STREAM,FAILED,(NULL),
                ("Failed to get display buffer"));
            return NULL;
        }
    }

    if (!hDispBuf) {
       GST_ELEMENT_ERROR(sink,STREAM,FAILED,(NULL),
       	("Failed to get display buffer"));
       	return NULL;
    }

    GST_LOG("Cleaning Display buffers: buffer %d cleaned",
            (int)Buffer_getId(hDispBuf));

    /*Removing garbage on display buffer*/
    if (sink->cleanBufCtrl[Buffer_getId(hDispBuf)] == DIRTY ){
        if (!gst_ti_blackFill(hDispBuf)){
            GST_ELEMENT_WARNING(sink, RESOURCE, SETTINGS, (NULL), 
            ("Unsupported color space, buffers not painted\n"));
        }
        sink->cleanBufCtrl[Buffer_getId (hDispBuf)] = CLEAN;
        GST_DEBUG("Cleaning Display buffers: buffer %d cleaned",
            (int)Buffer_getId(hDispBuf));
    }

    /* Retrieve the dimensions of the display buffer */
    BufferGfx_resetDimensions(hDispBuf);
    BufferGfx_getDimensions(hDispBuf, &dim);
    GST_LOG("Display size %dx%d pitch %d\n",
            (Int) dim.width, (Int) dim.height, (Int) dim.lineLength);

    /* We will only display the
     * portion of the video that fits on the screen.  If the video is
     * smaller than the display center or place it in the screen.
     */
    /*WIDTH*/
    if (inBuf) {
        BufferGfx_getDimensions(inBuf, &inDim);
    }

    if(!sink->xCentering){
       if(sink->xPosition > 0){
          dim.x = sink->xPosition;
          inDim.width = dim.width - sink->xPosition;
          if(inDim.width > sink->width){
             inDim.width = sink->width;
          }else if(inDim.width < 0){
             inDim.width = 0;
          }
       }else{
          if (inBuf) {
            dim.x = 0;
            inDim.width = sink->xPosition + sink->width > 0 ? 
                sink->xPosition + sink->width : 0;
            inDim.x = -sink->xPosition;
          } else {
              /* For pad allocated buffers we want to leave it up to the 
               * upstream element
               */
              dim.x = sink->xPosition;
          }
       }
    }else{
       dim.x = (dim.width-sink->width)/2;
    }
    /*HEIGHT*/ 
    if(!sink->yCentering){
       if(sink->yPosition > 0){
          dim.y = sink->yPosition;
          inDim.height = dim.height - sink->yPosition;
          if(inDim.height > sink->height){
             inDim.height = sink->height;
          }else if(inDim.height < 0){
             inDim.height = 0;
          }
       }else{
          if (inBuf) {
            dim.y = 0;
            inDim.height = sink->yPosition + sink->height > 0 ? 
                sink->yPosition + sink->height : 0;
            inDim.y = -sink->yPosition;
          } else {
              /* For pad allocated buffers we want to leave it up to the 
               * upstream element
               */
              dim.y = sink->yPosition;
          }
       }
    }else{
       dim.y = (dim.height-sink->height)/2;
    }

    if (inBuf)
        BufferGfx_setDimensions(inBuf, &inDim);
    BufferGfx_setDimensions(hDispBuf, &dim);

    return hDispBuf;
}
コード例 #20
0
/******************************************************************************
 * Framecopy_accel_execute
 ******************************************************************************/
Int Framecopy_accel_execute(Framecopy_Handle hFc,
                             Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf)
{
    vdce_address_start_t params;
    BufferGfx_Dimensions srcDim;
    BufferGfx_Dimensions dstDim;
    UInt32 srcOffset, dstOffset;

    assert(hFc);
    assert(hSrcBuf);
    assert(hDstBuf);

    /* VDCE addresses must be a multiple of 8 */
    assert((((UInt32) Buffer_getUserPtr(hDstBuf)) & 0x7) == 0);
    assert((((UInt32) Buffer_getUserPtr(hSrcBuf)) & 0x7) == 0);

    /* Buffer sizes / 4 must be a multiple of 8 */
    assert(((Buffer_getSize(hSrcBuf) / 4) & 0x7) == 0);
    assert(((Buffer_getSize(hDstBuf) / 4) & 0x7) == 0);

    BufferGfx_getDimensions(hSrcBuf, &srcDim);
    BufferGfx_getDimensions(hDstBuf, &dstDim);

    assert(srcDim.x >= 0);
    assert(srcDim.y >= 0);
    assert(dstDim.x >= 0);
    assert(dstDim.y >= 0);
    assert((srcDim.x & 0x7) == 0);
    assert((dstDim.x & 0x7) == 0);

    srcOffset = srcDim.y * srcDim.lineLength + srcDim.x;
    dstOffset = dstDim.y * dstDim.lineLength + dstDim.x;

    params.buffers[VDCE_BUF_IN].index     = -1;
    params.buffers[VDCE_BUF_IN].buf_type  = VDCE_BUF_IN;
    params.buffers[VDCE_BUF_IN].virt_ptr  = ((Int32) Buffer_getUserPtr(hSrcBuf)+
                                                     srcOffset);
    params.buffers[VDCE_BUF_IN].size      = Buffer_getSize(hSrcBuf);

    params.buffers[VDCE_BUF_OUT].index    = -1;
    params.buffers[VDCE_BUF_OUT].buf_type = VDCE_BUF_OUT;
    params.buffers[VDCE_BUF_OUT].virt_ptr = ((Int32) Buffer_getUserPtr(hDstBuf)+
                                                     dstOffset);
    params.buffers[VDCE_BUF_OUT].size     = Buffer_getSize(hDstBuf);

    params.src_horz_pitch = srcDim.lineLength;
    params.res_horz_pitch = dstDim.lineLength;

    if (ioctl(hFc->fd, VDCE_START, &params) < 0) {
        Dmai_err0("Failed VDCE_START\n");
        return Dmai_EFAIL;
    }

    if (BufferGfx_getColorSpace(hDstBuf) == ColorSpace_YUV420PSEMI) {
        Buffer_setNumBytesUsed(hDstBuf, dstDim.width * dstDim.height * 3 / 2);
    }
    else {
        Buffer_setNumBytesUsed(hDstBuf, dstDim.width * dstDim.height * 2);
    }

    return Dmai_EOK;
}
コード例 #21
0
ファイル: appMain.c プロジェクト: Soorma07/dvsdk-dmai
/******************************************************************************
 * appMain
 ******************************************************************************/
Int appMain(Args * args)
{
    Time_Attrs           tAttrs    = Time_Attrs_DEFAULT;
    BufferGfx_Attrs      gfxAttrs  = BufferGfx_Attrs_DEFAULT;
    Display_Handle       hDisplay  = NULL;
    Time_Handle          hTime     = NULL;
    BufTab_Handle        hDisBufTab= NULL;
    Int                  numFrame  = 0;
    Display_Attrs        dAttrs;
    Buffer_Handle        hDispBuf;
    Int                  y, x, pos, color;
    Cpu_Device           device;
    UInt32               time;
    Int32                bufSize;
    BufferGfx_Dimensions dim;
    Int                  ret = Dmai_EOK;

    /* Initialize DMAI */
    Dmai_init();

    if (args->benchmark) {
        hTime = Time_create(&tAttrs);

        if (hTime == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr, "Failed to create Time object\n");
            goto cleanup;
        }
    }

    /* Determine which device the application is running on */
    if (Cpu_getDevice(NULL, &device) < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to determine target board\n");
        goto cleanup;
    }

    switch (device) {
        case Cpu_Device_DM6467:
            dAttrs = Display_Attrs_DM6467_VID_DEFAULT;
            break;
        case Cpu_Device_OMAP3530:
        case Cpu_Device_DM3730:
            dAttrs = Display_Attrs_O3530_VID_DEFAULT;
            break;
        case Cpu_Device_OMAPL138:
            dAttrs = Display_Attrs_OMAPL138_OSD_DEFAULT;
            break;
        case Cpu_Device_DM365:
        case Cpu_Device_DM368:
            dAttrs = Display_Attrs_DM365_VID_DEFAULT;
            break;            
        case Cpu_Device_OMAPL137:
            dAttrs = Display_Attrs_OMAPL137_OSD_DEFAULT;
            break;        
        default:
            dAttrs = Display_Attrs_DM6446_DM355_VID_DEFAULT;
            break;
    }
 
    if (args->displayUalloc) {
        gfxAttrs.colorSpace = dAttrs.colorSpace;

        if (VideoStd_getResolution(args->videoStd, &gfxAttrs.dim.width, 
                                   &gfxAttrs.dim.height) < 0) {
            goto cleanup;
        }

        gfxAttrs.dim.lineLength =
            Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width, 
                                                  gfxAttrs.colorSpace), 32); 

        gfxAttrs.dim.x = 0;
        gfxAttrs.dim.y = 0;

        if (gfxAttrs.colorSpace == ColorSpace_YUV422PSEMI) {
            bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2;
        }
        else if (gfxAttrs.colorSpace == ColorSpace_YUV420PSEMI) {
            bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2;
        }
        else {
            bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height;
        }

        if (bufSize < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to calculated size for display buffers\n");
            goto cleanup;
        }

        /* Create a table of video buffers to use with the display device */
        hDisBufTab = BufTab_create(dAttrs.numBufs, bufSize,
                                   BufferGfx_getBufferAttrs(&gfxAttrs));

        if (hDisBufTab == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to allocate contiguous buffers\n");
            goto cleanup;
        }
    }

    /* Create the video display */
    dAttrs.videoStd = args->videoStd;
    dAttrs.videoOutput = args->videoOutput;
    hDisplay = Display_create(hDisBufTab, &dAttrs);

    if (hDisplay == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to open display device\n");
        goto cleanup;
    }

    x = color = 0;

    while (numFrame++ < args->numFrames) {
        if (args->benchmark) {
            if (Time_reset(hTime) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to reset timer\n");
                goto cleanup;
            }
        }

        /* Get a buffer from the display driver */
        if (Display_get(hDisplay, &hDispBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to get display buffer\n");
            goto cleanup;
        }

        /* Retrieve the dimensions of the display buffer */
        BufferGfx_getDimensions(hDispBuf, &dim);

        printf("Display size %dx%d pitch %d x = %d color %d\n", (Int) dim.width,
               (Int) dim.height, (Int) dim.lineLength, x, color);

        /* Draw a vertical bar of a color */
        for (y = 0; y < dim.height; y++) {
            pos = y * dim.lineLength + x * 2;
            memset(Buffer_getUserPtr(hDispBuf) + pos, color, 2);
        }

        x = (x + 1) % dim.width;
        color = (color + 1) % 0xff;

        /* Give the display buffer back to be displayed */
        if (Display_put(hDisplay, hDispBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to put display buffer\n");
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_total(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer total\n");
                goto cleanup;
            }

            printf("[%d] Frame time: %uus\n", numFrame, (Uns) time);
        }
    }

cleanup:
    /* Clean up the application */
    if (hDisplay) {
        Display_delete(hDisplay);
    }

    if (hDisBufTab) {
        BufTab_delete(hDisBufTab);
    }
    
    if (hTime) {
        Time_delete(hTime);
    }

    if (ret == Dmai_EFAIL)
        return 1;
    else
        return 0;
}
コード例 #22
0
/******************************************************************************
 * Framecopy_resizer_accel_config
 ******************************************************************************/
Int Framecopy_resizer_accel_config(Framecopy_Handle hFc,
                           Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf)
{
#if defined(CONFIG_VIDEO_OMAP34XX_ISP_RESIZER)
    struct v4l2_requestbuffers reqbuf;
    Int rszRate;
    BufferGfx_Dimensions srcDim, dstDim;
    struct rsz_params  params = {
        0,                              /* in_hsize (set at run time) */
        0,                              /* in_vsize (set at run time) */
        0,                              /* in_pitch (set at run time) */
        RSZ_INTYPE_YCBCR422_16BIT,      /* inptyp */
        0,                              /* vert_starting_pixel */
        0,                              /* horz_starting_pixel */
        0,                              /* cbilin */
        RSZ_PIX_FMT_UYVY,               /* pix_fmt */
        0,                              /* out_hsize (set at run time) */
        0,                              /* out_vsize (set at run time) */
        0,                              /* out_pitch (set at run time) */
        0,                              /* hstph */
        0,                              /* vstph */
        {                               /* hfilt_coeffs */
            256, 0, 0, 0, 256, 0, 0, 0, 256, 0, 0, 0, 256, 0, 0, 0,
            256, 0, 0, 0, 256, 0, 0, 0, 256, 0, 0, 0, 256, 0, 0, 0
        },
        {                               /* vfilt_coeffs */
            256, 0, 0, 0, 256, 0, 0, 0, 256, 0, 0, 0, 256, 0, 0, 0,
            256, 0, 0, 0, 256, 0, 0, 0, 256, 0, 0, 0, 256, 0, 0, 0
        },
        {                               /* yenh_params */
            0,                              /* type */
            0,                              /* gain */
            0,                              /* slop */
            0                               /* core */
        }
    };

    /* Pointers must be a multiple of 4096 bytes */
    assert((Buffer_getPhysicalPtr(hDstBuf) & 0xFFF) == 0);
    assert((Buffer_getPhysicalPtr(hSrcBuf) & 0xFFF) == 0);

    BufferGfx_getDimensions(hSrcBuf, &srcDim);
    BufferGfx_getDimensions(hDstBuf, &dstDim);

    Dmai_dbg2("Configuring resizer to copy image of resolution %dx%d\n",
              srcDim.width, srcDim.height);

    /* Source and destination pitch must be 32 bytes aligned */
    if (srcDim.lineLength % 32 || dstDim.lineLength % 32) {
        Dmai_err2("Src (%ld) and dst (%ld) must be aligned on 32 bytes\n",
                  srcDim.lineLength, dstDim.lineLength);
        return Dmai_EINVAL;
    }

    /* Set up the copy job */
    params.in_hsize            = srcDim.width;
    params.in_vsize            = srcDim.height;
    params.in_pitch            = srcDim.lineLength;
    params.out_hsize           = dstDim.width;
    params.out_vsize           = dstDim.height;
    params.out_pitch           = dstDim.lineLength;
    params.cbilin              = 0;
    params.pix_fmt             = RSZ_PIX_FMT_UYVY;
    params.vert_starting_pixel = 0;
    params.horz_starting_pixel = 0;
    params.inptyp              = RSZ_INTYPE_YCBCR422_16BIT;
    params.hstph               = 0;
    params.vstph               = 0;
    params.yenh_params.type    = 0;
    params.yenh_params.gain    = 0;
    params.yenh_params.slop    = 0;
    params.yenh_params.core    = 0;

    hFc->inSize  = srcDim.lineLength * params.in_vsize;
    hFc->outSize = dstDim.lineLength * params.out_vsize;

    if (ioctl(hFc->fd, RSZ_S_PARAM, &params) == -1) {
        Dmai_err0("Framecopy setting parameters failed.\n");
        return Dmai_EFAIL;
    }

    rszRate = 0x0;

    if (ioctl(hFc->fd, RSZ_S_EXP, &rszRate) == -1) {
        Dmai_err0("Framecopy setting read cycle failed.\n");
        return Dmai_EFAIL;
    }

    reqbuf.type     = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    reqbuf.memory   = V4L2_MEMORY_USERPTR;
    reqbuf.count    = 2;

    if (ioctl(hFc->fd, RSZ_REQBUF, &reqbuf) == -1) {
        Dmai_err0("Request buffer failed.\n");
        return Dmai_EFAIL;
    }

    return Dmai_EOK;
#else
    Dmai_err0("not implemented\n");
    return Dmai_ENOTIMPL;
#endif /* end CONFIG_VIDEO_OMAP34XX_ISP_RESIZER */
}