Esempio n. 1
0
/******************************************************************************
 * _Dmai_v4l2DriverAlloc
 ******************************************************************************/
Int _Dmai_v4l2DriverAlloc(Int fd, Int numBufs, enum v4l2_buf_type type,
                          struct _VideoBufDesc **bufDescsPtr,
                          BufTab_Handle *hBufTabPtr, Int topOffset, 
                          ColorSpace_Type colorSpace)
{
    BufferGfx_Attrs             gfxAttrs = BufferGfx_Attrs_DEFAULT;
    struct v4l2_requestbuffers  req;
    struct v4l2_format          fmt;
    _VideoBufDesc              *bufDesc;
    Buffer_Handle               hBuf;
    Int                         bufIdx;
    Int8                       *virtPtr;

    Dmai_clear(fmt);
    fmt.type = type;

    if (ioctl(fd, VIDIOC_G_FMT, &fmt) == -1) {
        Dmai_err1("VIDIOC_G_FMT failed (%s)\n", strerror(errno));
        return Dmai_EFAIL;
    }

    Dmai_clear(req);
    req.count  = numBufs;
    req.type   = type;
    req.memory = V4L2_MEMORY_MMAP;

    /* Allocate buffers in the capture device driver */
    if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1) {
        Dmai_err1("VIDIOC_REQBUFS failed (%s)\n", strerror(errno));
        return Dmai_ENOMEM;
    }

    if (req.count < numBufs || !req.count) {
        Dmai_err0("Insufficient device driver buffer memory\n");
        return Dmai_ENOMEM;
    }

    /* Allocate space for buffer descriptors */
    *bufDescsPtr = calloc(numBufs, sizeof(_VideoBufDesc));

    if (*bufDescsPtr == NULL) {
        Dmai_err0("Failed to allocate space for buffer descriptors\n");
        return Dmai_ENOMEM;
    }

    gfxAttrs.dim.width          = fmt.fmt.pix.width;
    gfxAttrs.dim.height         = fmt.fmt.pix.height;
    gfxAttrs.dim.lineLength     = fmt.fmt.pix.bytesperline;
    gfxAttrs.colorSpace         = colorSpace;
    gfxAttrs.bAttrs.reference   = TRUE;

    *hBufTabPtr = BufTab_create(numBufs, fmt.fmt.pix.sizeimage,
                                BufferGfx_getBufferAttrs(&gfxAttrs));

    if (*hBufTabPtr == NULL) {
        return Dmai_ENOMEM;
    }

    for (bufIdx = 0; bufIdx < numBufs; bufIdx++) {
        bufDesc = &(*bufDescsPtr)[bufIdx];

        /* Ask for information about the driver buffer */
        Dmai_clear(bufDesc->v4l2buf);
        bufDesc->v4l2buf.type   = type;
        bufDesc->v4l2buf.memory = V4L2_MEMORY_MMAP;
        bufDesc->v4l2buf.index  = bufIdx;

        if (ioctl(fd, VIDIOC_QUERYBUF, &bufDesc->v4l2buf) == -1) {
            Dmai_err1("Failed VIDIOC_QUERYBUF (%s)\n", strerror(errno));
            return Dmai_EFAIL;
        }

        /* Map the driver buffer to user space */
        virtPtr = mmap(NULL,
                       bufDesc->v4l2buf.length,
                       PROT_READ | PROT_WRITE,
                       MAP_SHARED,
                       fd,
                       bufDesc->v4l2buf.m.offset) + topOffset;

        if (virtPtr == MAP_FAILED) {
            Dmai_err1("Failed to mmap buffer (%s)\n", strerror(errno));
            return Dmai_EFAIL;
        }

        /* Initialize the Buffer with driver buffer information */
        hBuf = BufTab_getBuf(*hBufTabPtr, bufIdx);

        Buffer_setNumBytesUsed(hBuf, fmt.fmt.pix.bytesperline *
                                     fmt.fmt.pix.height);
        Buffer_setUseMask(hBuf, gfxAttrs.bAttrs.useMask);
        Buffer_setUserPtr(hBuf, virtPtr);

        /* Initialize buffer to black */
        _Dmai_blackFill(hBuf);

        Dmai_dbg3("Driver buffer %d mapped to %#x has physical address "
                  "%#lx\n", bufIdx, (Int) virtPtr, Buffer_getPhysicalPtr(hBuf));

        bufDesc->hBuf = hBuf;

        /* Queue buffer in device driver */
        if (ioctl(fd, VIDIOC_QBUF, &bufDesc->v4l2buf) == -1) {
            Dmai_err1("VIODIOC_QBUF failed (%s)\n", strerror(errno));
            return Dmai_EFAIL;
        }
    }

    return Dmai_EOK;
}
/******************************************************************************
 * Display_fbdev_create
 ******************************************************************************/
Display_Handle Display_fbdev_create(BufTab_Handle hBufTab, Display_Attrs *attrs)
{
    BufferGfx_Attrs         gfxAttrs       = BufferGfx_Attrs_DEFAULT;
    struct fb_var_screeninfo varInfo;
    struct fb_fix_screeninfo fixInfo;
    Int                      displaySize;
    Int                      bufIdx;
    Int8                    *virtPtr;
    Int32                    physPtr;
    Int32                    height, width;
    Display_Handle           hDisplay;
    Buffer_Handle            hBuf;

    if (attrs == NULL) {
        Dmai_err0("Must supply valid attrs\n");
        return NULL;
    }

    if (hBufTab != NULL) {
        Dmai_err0("FBdev display does not accept user allocated buffers\n");
        return NULL;
    }

    hDisplay = calloc(1, sizeof(Display_Object));

    if (hDisplay == NULL) {
        Dmai_err0("Failed to allocate space for Display Object\n");
        return NULL;
    }

    /* Open video display device */
    hDisplay->fd = open(attrs->displayDevice, O_RDWR);

    if (hDisplay->fd == -1) {
        Dmai_err2("Failed to open fb device %s (%s)\n", attrs->displayDevice,
                                                        strerror(errno));
        cleanup(hDisplay);
        return NULL;
    }

    /* Get fixed screen info */
    if (ioctl(hDisplay->fd, FBIOGET_FSCREENINFO, &fixInfo) == -1) {
        Dmai_err2("Failed FBIOGET_FSCREENINFO on %s (%s)\n",
                  attrs->displayDevice, strerror(errno));
        cleanup(hDisplay);
        return NULL;
    }

    /* Get virtual screen info */
    if (ioctl(hDisplay->fd, FBIOGET_VSCREENINFO, &varInfo) == -1) {
        Dmai_err2("Failed FBIOGET_VSCREENINFO on %s (%s)\n",
                  attrs->displayDevice, strerror(errno));
        cleanup(hDisplay);
        return NULL;
    }

    Dmai_dbg5("Found width=%d height=%d, yres_virtual=%d,xres_virtual=%d,"
              " line_length=%d\n", varInfo.xres, varInfo.yres, 
                varInfo.yres_virtual,varInfo.xres_virtual, fixInfo.line_length);

    /* Save current virtual screen info. */
    memcpy(&hDisplay->origVarInfo, &varInfo, sizeof(struct fb_var_screeninfo));

    /* If video standard is set to auto then use current height and width */
    if (attrs->videoStd == VideoStd_AUTO) {
        width = varInfo.xres;
        height = varInfo.yres;
    }
    /* If video standard is not set then use the height/width passed from 
     * attribute.
     */
    else if (attrs->videoStd == -1) {
        width = attrs->width;
        height = attrs->height; 
    }
    /* calulcate height/width from video standard */
    else {
        VideoStd_getResolution(attrs->videoStd, &width, &height);
    }

    if (attrs->width > 0) {
        width = attrs->width;
    }

    if (attrs->height > 0) {
        height = attrs->height;
    }

    if (width > varInfo.xres) { 
        width = varInfo.xres;
    }

    if (height > varInfo.yres) {
        height = varInfo.yres;
    }

    varInfo.xoffset         = 0;
    varInfo.yoffset         = 0;
    varInfo.xres            = width;
    varInfo.yres            = height;
    varInfo.xres_virtual    = width;
    varInfo.yres_virtual    = varInfo.yres * attrs->numBufs;

    /* Set video display format */
    Dmai_dbg4("Setting width=%d height=%d, yres_virtual=%d,"
              " xres_virtual=%d\n", varInfo.xres, varInfo.yres, 
              varInfo.yres_virtual, varInfo.xres_virtual);

    if (ioctl(hDisplay->fd, FBIOPUT_VSCREENINFO, &varInfo) == -1) {
        Dmai_err2("Failed FBIOPUT_VSCREENINFO on %s (%s)\n",
                  attrs->displayDevice, strerror(errno));
        cleanup(hDisplay);
        return NULL;
    }

    if (ioctl(hDisplay->fd, FBIOGET_FSCREENINFO, &fixInfo) == -1) {
        Dmai_err2("Failed FBIOGET_FSCREENINFO on %s (%s)\n",
                  attrs->displayDevice, strerror(errno));
        cleanup(hDisplay);
        return NULL;
    }

    Dmai_dbg5("New width=%d, height=%d, yres_virtual=%d,xres_virtual=%d, "
              "line_length=%d\n", varInfo.xres, varInfo.yres, 
              varInfo.yres_virtual, varInfo.xres_virtual, fixInfo.line_length);

    gfxAttrs.dim.width          = varInfo.xres;
    gfxAttrs.colorSpace         = attrs->colorSpace;
    gfxAttrs.dim.height         = varInfo.yres;
    gfxAttrs.dim.lineLength     = fixInfo.line_length;
    gfxAttrs.bAttrs.reference   = TRUE;
    displaySize                 = fixInfo.line_length * varInfo.yres;

    hBufTab = BufTab_create(attrs->numBufs, displaySize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));

    if (hBufTab == NULL) {
        Dmai_err0("Failed to allocate BufTab for display buffers\n");
        cleanup(hDisplay);
        return NULL;
    }

    hBuf = BufTab_getBuf(hBufTab, 0);

    Buffer_setNumBytesUsed(hBuf, varInfo.xres * varInfo.yres *
                                 varInfo.bits_per_pixel / 8);
    virtPtr = (Int8 *) mmap (NULL,
                             displaySize * attrs->numBufs,
                             PROT_READ | PROT_WRITE,
                             MAP_SHARED,
                             hDisplay->fd, 0);

    if (virtPtr == MAP_FAILED) {
        Dmai_err2("Failed mmap on %s (%s)\n", attrs->displayDevice,
                                              strerror(errno));
        cleanup(hDisplay);
        return NULL;
    }

    if (Buffer_setUserPtr(hBuf, virtPtr) < 0) {
        cleanup(hDisplay);
        return NULL;
    }

    _Dmai_blackFill(hBuf);

    Dmai_dbg3("Display buffer %d mapped to %#lx has physical address %#lx\n", 0,
              (Int32) virtPtr, physPtr);

    for (bufIdx=1; bufIdx < attrs->numBufs; bufIdx++) {
        hBuf = BufTab_getBuf(hBufTab, bufIdx);
        Buffer_setNumBytesUsed(hBuf, varInfo.xres * varInfo.yres *
                                     varInfo.bits_per_pixel / 8);

        virtPtr = virtPtr + displaySize;
        Buffer_setUserPtr(hBuf, virtPtr);
        _Dmai_blackFill(hBuf);

        Dmai_dbg3("Display buffer %d mapped to %#lx, physical address %#lx\n",
                  bufIdx, (unsigned long) virtPtr, physPtr);
    }

    hDisplay->hBufTab = hBufTab;
    hDisplay->displayIdx = 0;
    hDisplay->workingIdx = attrs->numBufs > 1 ? 1 : 0;
    hDisplay->displayStd = Display_Std_FBDEV;

    if (setDisplayBuffer(hDisplay, hDisplay->displayIdx) < 0) {
        cleanup(hDisplay);
        return NULL;
    }

    return hDisplay;
}
Esempio n. 3
0
/******************************************************************************
 * appMain
 ******************************************************************************/
Int appMain(Args * args)
{
    Framecopy_Attrs         fcAttrs      = Framecopy_Attrs_DEFAULT;
    BufferGfx_Attrs         gfxAttrs     = BufferGfx_Attrs_DEFAULT;
    Smooth_Attrs            smAttrs      = Smooth_Attrs_DEFAULT;
    Time_Attrs              tAttrs       = Time_Attrs_DEFAULT;
    BufTab_Handle           hCapBufTab   = NULL;
    BufTab_Handle           hDisBufTab   = NULL;
    Display_Handle          hDisplay     = NULL;
    Capture_Handle          hCapture     = NULL;
    Framecopy_Handle        hFc          = NULL;
    Smooth_Handle           hSmooth      = NULL;
    Time_Handle             hTime        = NULL;
    Int                     numFrame     = 0;
    Display_Attrs           dAttrs;
    Capture_Attrs           cAttrs;
    Buffer_Handle           cBuf, dBuf;
    Cpu_Device              device;
    Int                     bufIdx;
    UInt32                  time;
    BufferGfx_Dimensions    dim;
    Int32                   bufSize;
    Int                     ret = Dmai_EOK;

    /* Initialize DMAI */
    Dmai_init();

    if (args->benchmark) {
        hTime = Time_create(&tAttrs);

        if (hTime == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to create Time object\n");
            goto cleanup;
        }
    }

    /* Determine which device the application is running on */
    if (Cpu_getDevice(NULL, &device) < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to determine target board\n");
        goto cleanup;
    }

    /* Set the display and capture attributes depending on device */
    switch (device) {
        case Cpu_Device_DM6467:
            dAttrs = Display_Attrs_DM6467_VID_DEFAULT;
            cAttrs = Capture_Attrs_DM6467_DEFAULT;
            break;
        case Cpu_Device_DM365:
        case Cpu_Device_DM368:
            dAttrs = Display_Attrs_DM365_VID_DEFAULT;
            cAttrs = Capture_Attrs_DM365_DEFAULT;
            dAttrs.colorSpace = ColorSpace_YUV420PSEMI;
            cAttrs.colorSpace = dAttrs.colorSpace;
            break;
        case Cpu_Device_OMAPL138:
            dAttrs = Display_Attrs_OMAPL138_VID_DEFAULT;
            cAttrs = Capture_Attrs_OMAPL138_DEFAULT;
            break;
        case Cpu_Device_OMAP3530:
        case Cpu_Device_DM3730:
            dAttrs     = Display_Attrs_O3530_VID_DEFAULT;
            cAttrs     = Capture_Attrs_OMAP3530_DEFAULT;
            dAttrs.colorSpace = cAttrs.colorSpace = ColorSpace_UYVY;
            dAttrs.rotation = 270;
            break;
        default:
            dAttrs = Display_Attrs_DM6446_DM355_VID_DEFAULT;
            cAttrs = Capture_Attrs_DM6446_DM355_DEFAULT;
            break;
    }

    if (args->displayStd != -1) {
        dAttrs.displayStd = args->displayStd;
    }

    if (args->displayDevice) {
        dAttrs.displayDevice = args->displayDevice;
    }

    if (args->displayNumBufs != -1) {
        dAttrs.numBufs = args->displayNumBufs;
    }

    /* Enable cropping in capture driver if selected */
    if (args->width != -1 && args->height != -1 && args->crop) {
        cAttrs.cropX = args->xIn;
        cAttrs.cropY = args->yIn;
        cAttrs.cropWidth = args->width;
        cAttrs.cropHeight = args->height;
    }

    cAttrs.videoInput = args->videoInput;

    if (Capture_detectVideoStd(NULL, &cAttrs.videoStd, &cAttrs) < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to detect capture video standard\n");
        goto cleanup;
    }

    /* The color space of the capture buffers depend on the device */
    gfxAttrs.colorSpace = cAttrs.colorSpace;

    if (VideoStd_getResolution(cAttrs.videoStd, &gfxAttrs.dim.width, 
                                &gfxAttrs.dim.height) < 0) {
        goto cleanup;
    }

    gfxAttrs.dim.lineLength =
        Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width, 
                                              gfxAttrs.colorSpace), 32); 
    gfxAttrs.dim.x = 0;
    gfxAttrs.dim.y = 0;

    if (gfxAttrs.colorSpace == ColorSpace_YUV422PSEMI) {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2;
    }
    else if (gfxAttrs.colorSpace == ColorSpace_YUV420PSEMI) {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2;
    }
    else {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height;
    }

    if (args->captureUalloc) {
        /* Create a table of video buffers to use with the capture device */
        hCapBufTab = BufTab_create(cAttrs.numBufs, bufSize,
                                   BufferGfx_getBufferAttrs(&gfxAttrs));
        if (hCapBufTab == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to allocate contiguous buffers\n");
            goto cleanup;
        }
    }

    /* Create the capture device driver instance */
    hCapture = Capture_create(hCapBufTab, &cAttrs);

    if (hCapture == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to create capture device\n");
        goto cleanup;
    }

    /* Create the display device driver instance */
    dAttrs.videoStd = Capture_getVideoStd(hCapture);
    dAttrs.videoOutput = args->videoOutput;

    if (args->displayUalloc) {
        /* Create a table of video buffers to use with the display device */
        hDisBufTab = BufTab_create(dAttrs.numBufs, bufSize,
                                   BufferGfx_getBufferAttrs(&gfxAttrs));

        if (hDisBufTab == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to allocate contiguous buffers\n");
            goto cleanup;
        }
    }

    hDisplay = Display_create(hDisBufTab, &dAttrs);

    if (hDisplay == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to create display device\n");
        goto cleanup;
    }

    if (args->smooth) {
        /* Create the smooth job */
        hSmooth = Smooth_create(&smAttrs);

        if (hSmooth == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to create smooth job\n");
        }
    }
    else {
        /* Create the frame copy job */
        fcAttrs.accel = args->accel;
        hFc = Framecopy_create(&fcAttrs);

        if (hFc == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to create frame copy job\n");
            goto cleanup;
        }
    }

    /*
     * If cropping is not used, alter the dimensions of the captured
     * buffers and position the smaller image inside the full screen.
     */
    if (args->width != -1 && args->height != -1 && !args->crop) {
        for (bufIdx = 0;
             bufIdx < BufTab_getNumBufs(Capture_getBufTab(hCapture));
             bufIdx++) {

            cBuf = BufTab_getBuf(Capture_getBufTab(hCapture), bufIdx);
            BufferGfx_getDimensions(cBuf, &dim);

            dim.width   = args->width;
            dim.height  = args->height;
            dim.x       = args->xIn;
            dim.y       = args->yIn;

            if (BufferGfx_setDimensions(cBuf, &dim) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Input resolution does not fit in capture frame\n");
                goto cleanup;
            }
        }
    }

    /*
     * Alter the dimensions of the display buffers and position
     * the smaller image inside the full screen.
     */
    if (args->width != -1 && args->height != -1) {
        for (bufIdx = 0;
             bufIdx < BufTab_getNumBufs(Display_getBufTab(hDisplay));
             bufIdx++) {

            dBuf = BufTab_getBuf(Display_getBufTab(hDisplay), bufIdx);
            BufferGfx_getDimensions(dBuf, &dim);

            dim.width   = args->width;
            dim.height  = args->height;
            dim.x       = args->xOut;
            dim.y       = args->yOut;

            if (BufferGfx_setDimensions(dBuf, &dim) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Output resolution does not fit in display frame\n");
                goto cleanup;
            }
        }
    }

    if (args->smooth) {
        if (Smooth_config(hSmooth,
                          BufTab_getBuf(Capture_getBufTab(hCapture), 0),
                          BufTab_getBuf(Display_getBufTab(hDisplay), 0)) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to configure smooth job\n");
            goto cleanup;
        }
    }
    else {
        /* Configure the frame copy job */
        if (Framecopy_config(hFc, BufTab_getBuf(Capture_getBufTab(hCapture), 0),
                          BufTab_getBuf(Display_getBufTab(hDisplay), 0)) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to configure frame copy job\n");
            goto cleanup;
        }
    }

    while (numFrame++ < args->numFrames || args->numFrames == 0) {
        if (args->benchmark) {
            if (Time_reset(hTime) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to reset timer\n");
                goto cleanup;
            }
        }

        /* Get a captured frame from the capture device */
        if (Capture_get(hCapture, &cBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to get capture buffer\n");
            goto cleanup;
        }

        /* Get a frame from the display device to be filled with data */
        if (Display_get(hDisplay, &dBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to get display buffer\n");
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer delta\n");
                goto cleanup;
            }
        }

        if (args->smooth) {
            /*
             * Remove interlacing artifacts from the captured buffer and
             * store the result in the display buffer.
             */
            if (Smooth_execute(hSmooth, cBuf, dBuf) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to execute smooth job\n");
                goto cleanup;
            }
        }
        else {
            /* Copy the captured buffer to the display buffer */
            if (Framecopy_execute(hFc, cBuf, dBuf) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to execute frame copy job\n");
                goto cleanup;
            }
        }

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer delta\n");
                goto cleanup;
            }

            printf("Smooth / Framecopy: %uus ", (Uns) time);
        }

        /* Give captured buffer back to the capture device driver */
        if (Capture_put(hCapture, cBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to put capture buffer\n");
            goto cleanup;
        }

        /* Send filled buffer to display device driver to be displayed */
        if (Display_put(hDisplay, dBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to put display buffer\n");
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_total(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer total\n");
                goto cleanup;
            }

            printf("Frame time: %uus\n", (Uns) time);
        }
    }

cleanup:
    /* Clean up the application */
    if (hSmooth) {
        Smooth_delete(hSmooth);
    }

    if (hFc) {
        Framecopy_delete(hFc);
    }

    if (hCapture) {
        Capture_delete(hCapture);
    }

    if (hDisplay) {
        Display_delete(hDisplay);
    }

    if (hTime) {
        Time_delete(hTime);
    }

    if (hCapBufTab) {
        BufTab_delete(hCapBufTab);
    }

    if (hDisBufTab) {
        BufTab_delete(hDisBufTab);
    }

    if (ret == Dmai_EFAIL)
        return 1;
    else
        return 0;
}
Esempio n. 4
0
Void *writerThrFxn(Void *arg)
{
    WriterEnv *envp = (WriterEnv *) arg;
    Void *status = THREAD_SUCCESS;
    FILE *outFile = NULL;
    Buffer_Attrs bAttrs = Buffer_Attrs_DEFAULT;
    BufTab_Handle hBufTab = NULL;
    Buffer_Handle hOutBuf;
    Int bufIdx;

    /* Initialization */

	/* Open the output video file */
    outFile = fopen(envp->videoFile, "w");
	if (outFile == NULL) {
        ERR("Failed to open %s for writing\n", envp->videoFile);
        cleanup(THREAD_FAILURE);
    }

    /* Create buftab for video thread */
    hBufTab = BufTab_create(NUM_WRITER_BUFS, envp->outBufSize, &bAttrs);
    if (hBufTab == NULL) {
        ERR("Failed to allocate contiguous buffers\n");
        cleanup(THREAD_FAILURE);
    }

	/* Send all buffers to the video thread to be filled with encoded data */
    for (bufIdx = 0; bufIdx < NUM_WRITER_BUFS; bufIdx++) {
        if (Fifo_put(envp->hWriterOutFifo, BufTab_getBuf(hBufTab, bufIdx)) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }
    }

	/* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    while(1) {

    	/* Get an encoded buffer from the video thread */
        if (Fifo_get(envp->hWriterInFifo, &hOutBuf) < 0) {
            ERR("Failed to get buffer from video thread\n");
            cleanup(THREAD_FAILURE);
        }

		if (Buffer_getNumBytesUsed(hOutBuf)) {
			if (fwrite(Buffer_getUserPtr(hOutBuf), Buffer_getNumBytesUsed(hOutBuf), 1, outFile) != 1) {
				ERR("Error writing the encoded data to video file\n");
				cleanup(THREAD_FAILURE);
			}
		} 
		else {
			printf("Warning, writer received 0 byte encoded frame\n");
		}

		/* Return buffer to capture thread */
        if (Fifo_put(envp->hWriterOutFifo, hOutBuf) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }

    }

cleanup:

    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousInit);
    Pause_off(envp->hPauseProcess);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    /* Clean up the thread before exiting */
    if (outFile) {
        fclose(outFile);
    }

    if (hBufTab) {
        BufTab_delete(hBufTab);
    }
    
	return status;    
}
Esempio n. 5
0
/******************************************************************************
 * captureThrFxn
 ******************************************************************************/
Void *captureThrFxn(Void *arg)
{
    CaptureEnv           *envp     = (CaptureEnv *) arg;
    Void                 *status   = THREAD_SUCCESS;
    Capture_Attrs         cAttrs   = Capture_Attrs_DM365_DEFAULT;
    Display_Attrs         dAttrs   = Display_Attrs_DM365_VID_DEFAULT;
    BufferGfx_Attrs       gfxAttrs = BufferGfx_Attrs_DEFAULT;    
    Capture_Handle        hCapture = NULL;
    Display_Handle        hDisplay = NULL;
    BufTab_Handle         hBufTab  = NULL;
    BufTab_Handle         hDispBufTab = NULL;
    BufTab_Handle         hFifoBufTab = NULL;
    Buffer_Handle         hDstBuf, hCapBuf, hDisBuf, hBuf;
    BufferGfx_Dimensions  capDim;
    VideoStd_Type         videoStd;
    Int32                 width, height, bufSize;
    Int                   fifoRet;
    ColorSpace_Type       colorSpace = ColorSpace_YUV420PSEMI;
    Int                   bufIdx;
    Int                   numCapBufs;

    /* Create capture device driver instance */
    cAttrs.numBufs = NUM_CAPTURE_BUFS;
    cAttrs.videoInput = envp->videoInput;
    cAttrs.videoStd = envp->videoStd;
    cAttrs.colorSpace = colorSpace;

    videoStd = envp->videoStd;

    /* We only support D1, 720P and 1080P input */
    if (videoStd != VideoStd_D1_NTSC && videoStd != VideoStd_D1_PAL 
        && videoStd != VideoStd_720P_60 && videoStd != VideoStd_720P_50 &&
        videoStd != VideoStd_1080I_30) {
        ERR("Need D1/720P/1080P input to this demo\n");
        cleanup(THREAD_FAILURE);
    }
    if (envp->imageWidth > 0 && envp->imageHeight > 0) {
        if (VideoStd_getResolution(videoStd, &width, &height) < 0) {
            ERR("Failed to calculate resolution of video standard\n");
            cleanup(THREAD_FAILURE);
        }

        if (width < envp->imageWidth && height < envp->imageHeight) {
            ERR("User resolution (%ldx%ld) larger than detected (%ldx%ld)\n",
                envp->imageWidth, envp->imageHeight, width, height);
            cleanup(THREAD_FAILURE);
        }

       /*
        * Capture driver provides 32 byte aligned data. We 32 byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        envp->imageWidth  = Dmai_roundUp(envp->imageWidth,32);
        capDim.x          = 0;
        capDim.y          = 0;
        capDim.height     = envp->imageHeight;
        capDim.width      = envp->imageWidth;
        capDim.lineLength = BufferGfx_calcLineLength(capDim.width, colorSpace);
    } 
    else {
        /* Calculate the dimensions of a video standard given a color space */
        if (BufferGfx_calcDimensions(videoStd, colorSpace, &capDim) < 0) {
            ERR("Failed to calculate Buffer dimensions\n");
            cleanup(THREAD_FAILURE);
        }

       /*
        * Capture driver provides 32 byte aligned data. We 32 byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        capDim.width      = Dmai_roundUp(capDim.width,32);
        envp->imageWidth  = capDim.width;
        envp->imageHeight = capDim.height;
    }

    numCapBufs = NUM_CAPTURE_BUFS;

    gfxAttrs.dim.height = capDim.height;
    gfxAttrs.dim.width = capDim.width;
    gfxAttrs.dim.lineLength = 
        Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width,
                     colorSpace), 32);
    gfxAttrs.dim.x = 0;
    gfxAttrs.dim.y = 0;
    if (colorSpace ==  ColorSpace_YUV420PSEMI) {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2;
    } 
    else {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2;
    }

    /* Create a table of buffers to use with the capture driver */
    gfxAttrs.colorSpace = colorSpace;
    hBufTab = BufTab_create(numCapBufs, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    /* Create a table of buffers to use to prime Fifo to video thread */
    hFifoBufTab = BufTab_create(VIDEO_PIPE_SIZE, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hFifoBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    /* Update global data for user interface */
    gblSetImageWidth(envp->imageWidth);
    gblSetImageHeight(envp->imageHeight);

    /* Report the video standard and image size back to the main thread */
    Rendezvous_meet(envp->hRendezvousCapStd);

    /* Capture at half frame rate if using COMPONENT input at 720P */
    if ((envp->videoStd == VideoStd_720P_60) 
        && (envp->videoInput == Capture_Input_COMPONENT)) {
        cAttrs.videoStd = VideoStd_720P_30;
    }
    else {
        cAttrs.videoStd = envp->videoStd;    
    }

    /*If its component input and video std is 1080I_30 then make it 1080I_60.*/
    if (cAttrs.videoStd == VideoStd_1080I_30 && cAttrs.videoInput 
                        == Capture_Input_COMPONENT) {
        cAttrs.videoStd = VideoStd_1080I_60;
    }

    cAttrs.numBufs    = NUM_CAPTURE_BUFS;    
    cAttrs.colorSpace = colorSpace;
    cAttrs.captureDimension = &gfxAttrs.dim;
    /* Create the capture device driver instance */
    hCapture = Capture_create(hBufTab, &cAttrs);

    if (hCapture == NULL) {
        ERR("Failed to create capture device. Is video input connected?\n");
        cleanup(THREAD_FAILURE);
    }

    /* Create a table of buffers to use with the display driver */
    hDispBufTab = BufTab_create(NUM_DISPLAY_BUFS, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hDispBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    /* Create display device driver instance if preview is needed */
    if (!envp->previewDisabled) {
        dAttrs.videoStd = envp->videoStd;
        if ( (dAttrs.videoStd == VideoStd_CIF) ||
            (dAttrs.videoStd == VideoStd_SIF_NTSC) ||
            (dAttrs.videoStd == VideoStd_SIF_PAL) ||
            (dAttrs.videoStd == VideoStd_VGA) ||
            (dAttrs.videoStd == VideoStd_D1_NTSC) ||        
            (dAttrs.videoStd == VideoStd_D1_PAL) ) {
            dAttrs.videoOutput = Display_Output_COMPOSITE;
        } else {
            dAttrs.videoOutput = Display_Output_COMPONENT;
        }    
        dAttrs.numBufs    = NUM_DISPLAY_BUFS;
        dAttrs.colorSpace = colorSpace;
        dAttrs.width = capDim.width;
        dAttrs.height = capDim.height;
        hDisplay = Display_create(hDispBufTab, &dAttrs);

        if (hDisplay == NULL) {
            ERR("Failed to create display device\n");
            cleanup(THREAD_FAILURE);
        }
    }

    for (bufIdx = 0; bufIdx < VIDEO_PIPE_SIZE; bufIdx++) {
        /* Queue the video buffers for main thread processing */
        hBuf = BufTab_getFreeBuf(hFifoBufTab);
        if (hBuf == NULL) {
            ERR("Failed to fill video pipeline\n");
            cleanup(THREAD_FAILURE);            
        }

       /* Fill with black the buffer */
       CapBuf_blackFill(hBuf, 1);

        /* Send buffer to video thread for encoding */
        if (Fifo_put(envp->hOutFifo, hBuf) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }
    }
    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    while (!gblGetQuit()) {
        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Capture a frame */
        if (Capture_get(hCapture, &hCapBuf) < 0) {
            ERR("Failed to get capture buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Get a buffer from the display device */
        if ((!envp->previewDisabled) && (Display_get(hDisplay, &hDisBuf) < 0)) {
            ERR("Failed to get display buffer\n");
            cleanup(THREAD_FAILURE);
        }
        /* Send buffer to video thread for encoding */
        if (Fifo_put(envp->hOutFifo, hCapBuf) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Get a buffer from the video thread */
        fifoRet = Fifo_get(envp->hInFifo, &hDstBuf);

        if (fifoRet < 0) {
            ERR("Failed to get buffer from video thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Did the video thread flush the fifo? */
        if (fifoRet == Dmai_EFLUSH) {
            cleanup(THREAD_SUCCESS);
        }

        if (!envp->previewDisabled) {
            /* Release buffer to the display device driver */
            if (Display_put(hDisplay, hDstBuf) < 0) {
                ERR("Failed to put display buffer\n");
                cleanup(THREAD_FAILURE);
            }
        }

        if (envp->previewDisabled) {
            /* Return the processed buffer to the capture driver */
            if (Capture_put(hCapture, hDstBuf) < 0) {
                ERR("Failed to put capture buffer\n");
                cleanup(THREAD_FAILURE);
            } 
        }
        else {
            /* Return the displayed buffer to the capture driver */
            if (Capture_put(hCapture, hDisBuf) < 0) {
                ERR("Failed to put capture buffer\n");
                cleanup(THREAD_FAILURE);
            }
        }

        /* Increment statistics for the user interface */
        gblIncFrames();

    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousCapStd);
    Rendezvous_force(envp->hRendezvousInit);
    Pause_off(envp->hPauseProcess);
    Fifo_flush(envp->hOutFifo);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    if (hDisplay) {
        Display_delete(hDisplay);
    }

    if (hCapture) {
        Capture_delete(hCapture);
    }
    
    /* Clean up the thread before exiting */
    if (hBufTab) {
        BufTab_delete(hBufTab);
    }
    if (hFifoBufTab) {
        BufTab_delete(hFifoBufTab);
    }
    if (hDispBufTab) {
        BufTab_delete(hDispBufTab);
    }

    return status;
}
Esempio n. 6
0
/*****************************************************************************
 * gst_tidmaiaccel_prepare_output_buffer
 *    Function is used to allocate output buffer
 *****************************************************************************/
static GstFlowReturn gst_tidmaiaccel_prepare_output_buffer (GstBaseTransform
    *trans, GstBuffer *inBuf, gint size, GstCaps *caps, GstBuffer **outBuf)
{
    GstTIDmaiaccel *dmaiaccel = GST_TIDMAIACCEL(trans);
    Buffer_Handle   hOutBuf;
    Bool isContiguous = FALSE;
    UInt32 phys = 0;

    /* Always check if the buffer is contiguous */
    phys = Memory_getBufferPhysicalAddress(
                    GST_BUFFER_DATA(inBuf),
                    GST_BUFFER_SIZE(inBuf),
                    &isContiguous);

    if (isContiguous && dmaiaccel->width){
        GST_DEBUG("Is contiguous video buffer");

        Memory_registerContigBuf((UInt32)GST_BUFFER_DATA(inBuf),
            GST_BUFFER_SIZE(inBuf),phys);
        /* This is a contiguous buffer, create a dmai buffer transport */
        BufferGfx_Attrs gfxAttrs    = BufferGfx_Attrs_DEFAULT;

        gfxAttrs.bAttrs.reference   = TRUE;
        gfxAttrs.dim.width          = dmaiaccel->width;
        gfxAttrs.dim.height         = dmaiaccel->height;
        gfxAttrs.colorSpace         = dmaiaccel->colorSpace;
        gfxAttrs.dim.lineLength     = dmaiaccel->lineLength;

        hOutBuf = Buffer_create(GST_BUFFER_SIZE(inBuf), &gfxAttrs.bAttrs);
        BufferGfx_setDimensions(hOutBuf,&gfxAttrs.dim);
        BufferGfx_setColorSpace(hOutBuf,gfxAttrs.colorSpace);
        Buffer_setUserPtr(hOutBuf, (Int8*)GST_BUFFER_DATA(inBuf));
        Buffer_setNumBytesUsed(hOutBuf, GST_BUFFER_SIZE(inBuf));
        *outBuf = gst_tidmaibuffertransport_new(hOutBuf, NULL, NULL, FALSE);
        gst_buffer_set_data(*outBuf, (guint8*) Buffer_getUserPtr(hOutBuf),
            Buffer_getSize(hOutBuf));
        gst_buffer_copy_metadata(*outBuf,inBuf,GST_BUFFER_COPY_ALL);
        gst_buffer_set_caps(*outBuf, GST_PAD_CAPS(trans->srcpad));

        /* We need to grab a reference to the input buffer since we have 
         * a pointer to his buffer */
        gst_buffer_ref(inBuf);

        gst_tidmaibuffertransport_set_release_callback(
            (GstTIDmaiBufferTransport *)*outBuf,
            dmaiaccel_release_cb,inBuf);

        return GST_FLOW_OK;
    } else {
        GST_DEBUG("Copying into contiguous video buffer");
        /* This is a contiguous buffer, create a dmai buffer transport */
        if (!dmaiaccel->bufTabAllocated){
            /* Initialize our buffer tab */
            BufferGfx_Attrs gfxAttrs    = BufferGfx_Attrs_DEFAULT;

            gfxAttrs.dim.width          = dmaiaccel->width;
            gfxAttrs.dim.height         = dmaiaccel->height;
            gfxAttrs.colorSpace         = dmaiaccel->colorSpace;
            gfxAttrs.dim.lineLength     = dmaiaccel->lineLength;

            dmaiaccel->hOutBufTab =
                        BufTab_create(2, GST_BUFFER_SIZE(inBuf),
                            BufferGfx_getBufferAttrs(&gfxAttrs));
            pthread_mutex_init(&dmaiaccel->bufTabMutex, NULL);
            pthread_cond_init(&dmaiaccel->bufTabCond, NULL);
            if (dmaiaccel->hOutBufTab == NULL) {
                GST_ELEMENT_ERROR(dmaiaccel,RESOURCE,NO_SPACE_LEFT,(NULL),
                    ("failed to create output buffer tab"));
                return GST_FLOW_ERROR;
            }
            dmaiaccel->bufTabAllocated = TRUE;
        }

        pthread_mutex_lock(&dmaiaccel->bufTabMutex);
        hOutBuf = BufTab_getFreeBuf(dmaiaccel->hOutBufTab);
        if (hOutBuf == NULL) {
            GST_INFO("Failed to get free buffer, waiting on bufTab\n");
            pthread_cond_wait(&dmaiaccel->bufTabCond, &dmaiaccel->bufTabMutex);

            hOutBuf = BufTab_getFreeBuf(dmaiaccel->hOutBufTab);

            if (hOutBuf == NULL) {
                GST_ELEMENT_ERROR(dmaiaccel,RESOURCE,NO_SPACE_LEFT,(NULL),
                    ("failed to get a free contiguous buffer from BufTab"));
                pthread_mutex_unlock(&dmaiaccel->bufTabMutex);
                return GST_FLOW_ERROR;
            }
        }
        pthread_mutex_unlock(&dmaiaccel->bufTabMutex);

        memcpy(Buffer_getUserPtr(hOutBuf),GST_BUFFER_DATA(inBuf),
            GST_BUFFER_SIZE(inBuf));
        Buffer_setNumBytesUsed(hOutBuf, GST_BUFFER_SIZE(inBuf));
        *outBuf = gst_tidmaibuffertransport_new(hOutBuf, &dmaiaccel->bufTabMutex,
            &dmaiaccel->bufTabCond, FALSE);
        gst_buffer_set_data(*outBuf, (guint8*) Buffer_getUserPtr(hOutBuf),
            Buffer_getSize(hOutBuf));
        gst_buffer_copy_metadata(*outBuf,inBuf,GST_BUFFER_COPY_ALL);
        gst_buffer_set_caps(*outBuf, GST_PAD_CAPS(trans->srcpad));

        return GST_FLOW_OK;
    }
}
Esempio n. 7
0
Void *videoThrFxn(Void *arg)
{
	VideoEnv *envp = (VideoEnv *) arg;

	Venc1_Handle hVe1 = NULL;
	VIDENC1_Params params = Venc1_Params_DEFAULT;
	VIDENC1_DynamicParams dynParams = Venc1_DynamicParams_DEFAULT;
	IH264VENC_Params h264Params = IH264VENC_PARAMS;
	IH264VENC_DynamicParams h264DynParams = H264VENC_TI_IH264VENC_DYNAMICPARAMS;
	VUIParamBuffer VUI_Buffer = H264VENC_TI_VUIPARAMBUFFER;

	BufTab_Handle hVidBufTab = NULL;
	Buffer_Handle hVInBuf, hWOutBuf;
	BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT;

	ColorSpace_Type colorSpace = ColorSpace_YUV420PSEMI;

	Int bufSize = 0;

	Void *status = THREAD_SUCCESS;

	/* Initialization */
	params.maxWidth = envp->imageWidth;
	params.maxHeight = envp->imageHeight;
	params.inputChromaFormat = XDM_YUV_420SP;
	params.reconChromaFormat = XDM_YUV_420SP;
	params.maxFrameRate = envp->videoFrameRate;
	params.encodingPreset = XDM_USER_DEFINED;
	params.rateControlPreset = IVIDEO_USER_DEFINED;
	params.maxBitRate = 10000000;

	dynParams.targetBitRate = envp->videoBitRate*0.9;
	dynParams.inputWidth = envp->imageWidth;
	dynParams.captureWidth = Dmai_roundUp(BufferGfx_calcLineLength(envp->imageWidth, colorSpace), 32);
	dynParams.inputHeight = envp->imageHeight;
	dynParams.refFrameRate = params.maxFrameRate;
	dynParams.targetFrameRate = params.maxFrameRate;
	dynParams.intraFrameInterval = 0;
	dynParams.interFrameInterval = 0;

	h264Params.videncParams = params;
	h264Params.videncParams.size = sizeof(IH264VENC_Params);
	h264Params.encQuality = 1;
	h264Params.enableDDRbuff = 1; /* Uses DDR instead of VICP buffers */
	h264Params.enableARM926Tcm = 0;
	h264Params.enableVUIparams = (0x1 << 1);
	h264Params.videncParams.inputContentType = IVIDEO_PROGRESSIVE;

	h264DynParams.videncDynamicParams = dynParams;
	h264DynParams.videncDynamicParams.size = sizeof(IH264VENC_DynamicParams);

	h264DynParams.VUI_Buffer = &VUI_Buffer;
	h264DynParams.VUI_Buffer->aspectRatioInfoPresentFlag = 1;
	h264DynParams.VUI_Buffer->overscanInfoPresentFlag = 0;
	h264DynParams.VUI_Buffer->videoSignalTypePresentFlag = 0;
	h264DynParams.VUI_Buffer->timingInfoPresentFlag = 1;
	h264DynParams.VUI_Buffer->numUnitsInTicks = 1;
	h264DynParams.VUI_Buffer->timeScale = params.maxFrameRate / 1000;
	h264DynParams.VUI_Buffer->fixedFrameRateFlag = 1; 
	h264DynParams.VUI_Buffer->nalHrdParameterspresentFlag = 1;
	h264DynParams.VUI_Buffer->picStructPresentFlag = 1;

	h264DynParams.idrFrameInterval = 15;

	hVe1 = Venc1_create(envp->hEngine, envp->videoEncoder,
			(IVIDENC1_Params *) &h264Params,
			(IVIDENC1_DynamicParams *) &h264DynParams);
	if (hVe1 == NULL) {
		ERR("Failed to create video encoder: %s\n", envp->videoEncoder);
		cleanup(THREAD_FAILURE);
	}

	/* Store the output buffer size in the environment */
	envp->outBufSize = Venc1_getOutBufSize(hVe1);

	/* Signal that the codec is created and output buffer size available */
	Rendezvous_meet(envp->hRendezvousWriter);

	/* Video BufTab create */
	BufferGfx_calcDimensions(VideoStd_D1_PAL, colorSpace, &gfxAttrs.dim);
	gfxAttrs.dim.width = 704;
	gfxAttrs.dim.height = 576;
	gfxAttrs.dim.lineLength = Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width, colorSpace), 32);
	gfxAttrs.colorSpace = colorSpace;
	bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2;
	hVidBufTab = BufTab_create(NUM_VIDEO_BUFS, bufSize, BufferGfx_getBufferAttrs(&gfxAttrs));
	if (hVidBufTab == NULL) {
		ERR("Failed to create video buftab\n");
		cleanup(THREAD_FAILURE);
	}

	/* Set input buffer table */
    Venc1_setBufTab(hVe1, hVidBufTab);

	/* Send video buffers to DEI */
	Int nBufId = 0;
	for (nBufId = 0; nBufId < NUM_VIDEO_BUFS; nBufId++) {
		hVInBuf = BufTab_getBuf(hVidBufTab, nBufId);
		if (Fifo_put(envp->hVideoOutFifo, hVInBuf) < 0) {
			ERR("Failed to send buffer to dei thread\n");
			cleanup(THREAD_FAILURE);
		}
	}

	/* Signal that initialization is done and wait for other threads */
	Rendezvous_meet(envp->hRendezvousInit);

	while(1) {

		/* Get buffer from DEI thread */
		if(Fifo_get(envp->hVideoInFifo, &hVInBuf) < 0) {
			ERR("Failed to get buffer from dei thread\n");
			cleanup(THREAD_FAILURE);
		}

		/* Get buffer from Writer thread */
		if(Fifo_get(envp->hWriterOutFifo, &hWOutBuf) < 0) {
			ERR("Failed to get buffer from writer thread\n");
			cleanup(THREAD_FAILURE);
		}

		/* Make sure the whole buffer is used for input */
		BufferGfx_resetDimensions(hVInBuf);

		/* Encode */
		if (Venc1_process(hVe1, hVInBuf, hWOutBuf) < 0) {
			ERR("Failed to encode video buffer\n");
			cleanup(THREAD_FAILURE);
		}

		/* Put buffer to dei thread */
		if (Fifo_put(envp->hVideoOutFifo, hVInBuf) < 0) {
			ERR("Failed to send buffer to dei thread\n");
			cleanup(THREAD_FAILURE);
		}

		/* Put buffer to writer thread */
		if (Fifo_put(envp->hWriterInFifo, hWOutBuf) < 0) {
			ERR("Failed to send buffer to dei thread\n");
			cleanup(THREAD_FAILURE);
		}

	}

cleanup:

	/* Make sure the other threads aren't waiting for us */
	Rendezvous_force(envp->hRendezvousInit);
	Rendezvous_force(envp->hRendezvousWriter);

	/* Make sure the other threads aren't waiting for init to complete */
	Rendezvous_meet(envp->hRendezvousCleanup);

	if (hVidBufTab) {
		BufTab_delete(hVidBufTab);
	}

	if (hVe1) {
		Venc1_delete(hVe1);
	}

	return status;
}