/******************************************************************************
 * gst_ticircbuffer_shift_data
 *    Look for uncopied data in the last window and move it to the first one.
 ******************************************************************************/
static gboolean gst_ticircbuffer_shift_data(GstTICircBuffer *circBuf)
{
    Int8*     firstWindow   = Buffer_getUserPtr(circBuf->hBuf);
    Int32     lastWinOffset = Buffer_getSize(circBuf->hBuf) -
                              (circBuf->windowSize + circBuf->readAheadSize);
    Int8*     lastWindow    = firstWindow + lastWinOffset;
    Int32     bytesToCopy   = 0;
    gboolean  writePtrReset = FALSE;

    /* In fixedBlockSize mode, just wait until the write poitner reaches the
     * end of the buffer and then reset it to the beginning (no copying).
     */
    if (circBuf->fixedBlockSize && circBuf->writePtr >= circBuf->readPtr) {
        if (circBuf->writePtr == firstWindow + Buffer_getSize(circBuf->hBuf)) {
            GST_LOG("resetting write pointer (%lu->0)\n",
                (UInt32)(circBuf->writePtr - firstWindow));
            circBuf->writePtr       = Buffer_getUserPtr(circBuf->hBuf);
            circBuf->contiguousData = FALSE;
        }
        return TRUE;
    }

    /* Otherwise copy unconsumed data from the last window to the first one
     * and reset the write pointer back to the first window.
     */
    if (gst_ticircbuffer_first_window_free(circBuf) &&
        circBuf->writePtr >= circBuf->readPtr       &&
        circBuf->writePtr >= lastWindow + circBuf->windowSize)
    {

        bytesToCopy = circBuf->writePtr - lastWindow;

        GST_LOG("shifting %lu bytes of data from %lu to 0\n", bytesToCopy,
            (UInt32)(lastWindow - firstWindow));

        if (bytesToCopy > 0) {
            memcpy(firstWindow, lastWindow, bytesToCopy);
        }

        GST_LOG("resetting write pointer (%lu->%lu)\n",
            (UInt32)(circBuf->writePtr - firstWindow),
            (UInt32)(circBuf->writePtr - (lastWindow - firstWindow) -
                     firstWindow));
        circBuf->writePtr       -= (lastWindow - firstWindow);
        circBuf->contiguousData  = FALSE;
        writePtrReset            = TRUE;

        /* The queue function will not unblock the consumer until there is
         * at least windowSize + readAhead available, but if the read pointer
         * is toward the end of the buffer, we may never get more than just
         * windowSize available and will deadlock.  To avoid this situation,
         * wake the consumer after shifting data so it has an opportunity to
         * process the last window in the buffer and reset itself to the
         * beginning of the buffer.
         */
        gst_ticircbuffer_broadcast_producer(circBuf);
    }
    return writePtrReset;
}
/******************************************************************************
 * Framecopy_resizer_accel_execute
 ******************************************************************************/
Int Framecopy_resizer_accel_execute(Framecopy_Handle hFc,
                                    Buffer_Handle hSrcBuf,
                                    Buffer_Handle hDstBuf)
{
#if defined(CONFIG_VIDEO_OMAP34XX_ISP_RESIZER)
    Int         i;
    struct      v4l2_buffer qbuf[2];

    assert(hFc);
    assert(hSrcBuf);
    assert(hDstBuf);

    /* Pointers must be a multiple of 32 bytes */
    assert((Buffer_getPhysicalPtr(hDstBuf) & 0x1F) == 0);
    assert((Buffer_getPhysicalPtr(hSrcBuf) & 0x1F) == 0);

    /* Queue the resizer buffers */
    for (i=0; i < 2; i++) { 

        qbuf[i].type         = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        qbuf[i].memory       = V4L2_MEMORY_USERPTR;
        qbuf[i].index        = i;

        if (ioctl (hFc->fd, RSZ_QUERYBUF, &qbuf[i]) == -1) {
            Dmai_err1("Failed to query buffer index %d\n", i);
            return Dmai_EFAIL;
        }

        if (i == 0) {
            qbuf[i].m.userptr = (unsigned long) Buffer_getUserPtr(hSrcBuf);
        }
        else {
            qbuf[i].m.userptr = (unsigned long) Buffer_getUserPtr(hDstBuf);
        }

        if (ioctl (hFc->fd, RSZ_QUEUEBUF, &qbuf[i]) == -1) {
            Dmai_err1("Failed to queue buffer index %d\n",i);
            return Dmai_EFAIL;
        }
    }

    if (ioctl(hFc->fd, RSZ_RESIZE, NULL) == -1) {
        Dmai_err0("Failed to execute resize job\n");
        return Dmai_EFAIL;
    }

    Buffer_setNumBytesUsed(hDstBuf, Buffer_getNumBytesUsed(hSrcBuf));

    return Dmai_EOK;
#else
    Dmai_err0("not implemented\n");
    return Dmai_ENOTIMPL;
#endif /* end CONFIG_VIDEO_OMAP34XX_ISP_RESIZER */
}
Beispiel #3
0
/******************************************************************************
 * Resize_execute
 ******************************************************************************/
Int Resize_execute(Resize_Handle hResize,
                   Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf)
{
#ifdef CONFIG_DM365_IPIPE
    struct imp_convert  rsz;
    BufferGfx_Dimensions srcDim;
    BufferGfx_Dimensions dstDim;
    UInt32               srcOffset;
    UInt32               dstOffset;
    
    assert(hResize);
    assert(hSrcBuf);
    assert(hDstBuf);

    Dmai_clear(rsz);

    BufferGfx_getDimensions(hSrcBuf, &srcDim);
    BufferGfx_getDimensions(hDstBuf, &dstDim);

    srcOffset = srcDim.y * srcDim.lineLength + (srcDim.x << 1);
    dstOffset = dstDim.y * dstDim.lineLength + (dstDim.x << 1);

    rsz.in_buff.index     = -1;
    rsz.in_buff.buf_type  = IMP_BUF_IN;
    rsz.in_buff.offset    = (UInt32) Buffer_getUserPtr(hSrcBuf) + srcOffset;
    rsz.in_buff.size      = Buffer_getSize(hSrcBuf);

    rsz.out_buff1.index    = -1;
    rsz.out_buff1.buf_type = IMP_BUF_OUT1;
    rsz.out_buff1.offset   = (UInt32) Buffer_getUserPtr(hDstBuf) + dstOffset;
    rsz.out_buff1.size     = Buffer_getSize(hDstBuf);
    
    /* 
     * The IPIPE requires that the memory offsets of the input and output
     * buffers start on 32-byte boundaries.
     */
    assert((rsz.in_buff.offset  & 0x1F) == 0);
    assert((rsz.out_buff1.offset & 0x1F) == 0);

    /* Start IPIPE operation */
    if (ioctl(hResize->fd, RSZ_RESIZE, &rsz) == -1) {
        Dmai_err0("Failed RSZ_RESIZE\n");
        return Dmai_EFAIL;
    }

    Buffer_setNumBytesUsed(hDstBuf, Buffer_getNumBytesUsed(hSrcBuf));
    return Dmai_EOK;
#else
    Dmai_err0("not implemented\n");
    return Dmai_ENOTIMPL;
#endif
}
Beispiel #4
0
/******************************************************************************
 * readFrame420P
 ******************************************************************************/
Int readFrame420P(Buffer_Handle hBuf, FILE *outFile, Int imageHeight)
{
    Int8 *yPtr = Buffer_getUserPtr(hBuf);
    Int8 *cbcrPtr;
    Int y, x;

    BufferGfx_Dimensions dim;

    BufferGfx_getDimensions(hBuf, &dim);

    /* Write Y plane */
    for (y = 0; y < imageHeight; y++) {
        if (fread(yPtr, dim.width, 1, outFile) != 1) {
            fprintf(stderr,"Failed to read data from file\n");
            return -1;
        }

        yPtr += dim.lineLength;
    }

    /* Join Cb to CbCr interleaved */
    cbcrPtr = Buffer_getUserPtr(hBuf) + Buffer_getSize(hBuf) * 2 / 3;
    for (y = 0; y < imageHeight / 2; y++) {
        for (x = 0; x < dim.width; x += 2) {
            if (fread(&cbcrPtr[x], 1, 1, outFile) != 1) {
                fprintf(stderr,"Failed to read data from file\n");
                return -1;
            }
        }
        cbcrPtr += dim.lineLength;
    }

    /* Join Cr to CbCr interleaved */
    cbcrPtr = Buffer_getUserPtr(hBuf) + Buffer_getSize(hBuf) * 2 / 3;
    for (y = 0; y < imageHeight / 2; y++) {
        for (x = 1; x < dim.width; x += 2) {
            if (fread(&cbcrPtr[x], 1, 1, outFile) != 1) {
                fprintf(stderr,"Failed to read data from file\n");
                return -1;
            }
        }
        cbcrPtr += dim.lineLength;
    }

    printf("Read 420P frame size %d (%dx%d) from file\n",
           (Int) (dim.width * 3 / 2 * imageHeight),
           (Int) dim.width, (Int) imageHeight);

    Buffer_setNumBytesUsed(hBuf, Buffer_getSize(hBuf));

    return 0;
}
int write2_yuv_422(Buffer_Handle hCapBuf, int width, int height)
{

	FILE *fp;
	unsigned char *pY, *pU, *pV;
	unsigned int i;
	static int g_time = 0;
	char buf[100];

	int linlen = 640;
	int j = 0;
	sprintf(buf, "/%dx%d_%d.yuv", width, height, g_time++);
	printf("buf = %s,width=%d,height=%d\n", buf, width, height);
	//    if(time != 50)
	//       return 1;
	fp = fopen(buf, "wb");
	pY = (unsigned char *)Buffer_getUserPtr(hCapBuf);
	pU = (unsigned char *)Buffer_getUserPtr(hCapBuf) + 640 * 64;
	pV = (unsigned char *)Buffer_getUserPtr(hCapBuf) + 640 * 64 + 1;

	j = 0;

	for(i = 0; i < height; i++) {
		if(j < width) {
			fputc(*pY, fp);
			pY++;
			j++;
		}

		j = 0;
		pY = pY + (linlen - width);
	}

	for(i = 0; i < height * width / 2; i++) {
		fputc(*pU, fp);
		pU++;
		pU++;
	}

	for(i = 0; i < height * width / 2; i++) {
		fputc(*pV, fp);
		pV++;
		pV++;
	}

	fclose(fp);
	printf("write over  height=%d   width=%d   time = %d\n", height, width, g_time);
	return 1;
}
/******************************************************************************
 * Display_v4l2_put
 ******************************************************************************/
Int Display_v4l2_put(Display_Handle hDisplay, Buffer_Handle hBuf)
{
    Int idx;

    assert(hDisplay);
    assert(hBuf);

    idx = getUsedIdx(hDisplay->bufDescs, BufTab_getNumBufs(hDisplay->hBufTab));

    if (idx < 0) {
        Dmai_err0("No v4l2 buffers available\n");
        return Dmai_ENOMEM;
    }

    hDisplay->bufDescs[idx].v4l2buf.m.userptr = (Int)Buffer_getUserPtr(hBuf);
    hDisplay->bufDescs[idx].v4l2buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;

    /* Issue captured frame buffer back to device driver */
    if (ioctl(hDisplay->fd, VIDIOC_QBUF,
              &hDisplay->bufDescs[idx].v4l2buf) == -1) {
        Dmai_err1("VIDIOC_QBUF failed (%s)\n", strerror(errno));
        return Dmai_EFAIL;
    }

    hDisplay->bufDescs[idx].hBuf = hBuf;
    hDisplay->bufDescs[idx].used = FALSE;

    return Dmai_EOK;
}
Beispiel #7
0
/******************************************************************************
 * readFrameUYVY
 ******************************************************************************/
Int readFrameUYVY(Buffer_Handle hBuf, FILE *outFile)
{
    Int8 *ptr = Buffer_getUserPtr(hBuf);
    Int y;

    BufferGfx_Dimensions dim;

    BufferGfx_getDimensions(hBuf, &dim);

    for (y = 0; y < dim.height; y++) {
        if (fread(ptr, dim.width * 2, 1, outFile) != 1) {
            fprintf(stderr,"Failed to read data from file\n");
            return -1;
        }

        ptr += dim.lineLength;
    }

    printf("Read UYVY frame size %d (%dx%d) from file\n",
           (Int) (dim.width * 2 * dim.height),
           (Int) dim.width, (Int) dim.height);

    Buffer_setNumBytesUsed(hBuf, dim.width * 2 * dim.height);

    return 0;
}
Beispiel #8
0
/*****************************************************************************
 * gst_tiprepencbuf_prepare_output_buffer
 *    Function is used to allocate output buffer
 *****************************************************************************/
static GstFlowReturn
gst_tiprepencbuf_prepare_output_buffer(GstBaseTransform * trans,
    GstBuffer * inBuf, gint size, GstCaps * caps, GstBuffer ** outBuf)
{
    GstTIPrepEncBuf *prepencbuf = GST_TIPREPENCBUF(trans);
    Buffer_Handle    hOutBuf;

    GST_LOG("begin prepare output buffer\n");

    /* Get free buffer from buftab */
    if (!(hOutBuf = gst_tidmaibuftab_get_buf(prepencbuf->hOutBufTab))) {
        GST_ELEMENT_ERROR(prepencbuf, RESOURCE, READ,
            ("failed to get free buffer\n"), (NULL));
        return GST_FLOW_ERROR;
    }

    /* Create a DMAI transport buffer object to carry a DMAI buffer to
     * the source pad.  The transport buffer knows how to release the
     * buffer for re-use in this element when the source pad calls
     * gst_buffer_unref().
     */
    GST_LOG("creating dmai transport buffer\n");
    *outBuf = gst_tidmaibuffertransport_new(hOutBuf, prepencbuf->hOutBufTab, NULL, NULL);
    gst_buffer_set_data(*outBuf, (guint8 *) Buffer_getUserPtr(hOutBuf),
        Buffer_getSize(hOutBuf));
    gst_buffer_set_caps(*outBuf, GST_PAD_CAPS(trans->srcpad));

    GST_LOG("end prepare output buffer\n");

    return GST_FLOW_OK;
}
Beispiel #9
0
/******************************************************************************
 * Sound_alsa_read
 ******************************************************************************/
Int Sound_alsa_read(Sound_Handle hSound, Buffer_Handle hBuf)
{
    Int32 numSamples, readSamples;
    Int8 *bufPtr;

    assert(hSound);
    assert(hBuf);

    readSamples = Buffer_getNumBytesUsed(hBuf) / (2 * hSound->channels);
    bufPtr = Buffer_getUserPtr(hBuf);

    while (readSamples > 0) {
        numSamples = snd_pcm_readi(hSound->rcIn, bufPtr, readSamples);

        if (numSamples == -EAGAIN) 
            continue;

        if (numSamples < 0) {
            if (xrunRecovery(hSound->rcIn,numSamples) < 0) {
                Dmai_err2 ("Failed to read from %s (%s)\n",
                            AUDIO_DEVICE, strerror(numSamples));
                return Dmai_EFAIL;
            }
        }
        else {
            bufPtr += numSamples * 2 * hSound->channels;
            readSamples -= numSamples;
        }
    }

    Buffer_setNumBytesUsed(hBuf, Buffer_getSize(hBuf));

    return Dmai_EOK;
}
Beispiel #10
0
/******************************************************************************
 * Capture_put
 ******************************************************************************/
Int Capture_put(Capture_Handle hCapture, Buffer_Handle hBuf)
{
    Int idx;

    assert(hCapture);
    assert(hBuf);

    idx = getUsedIdx(hCapture->bufDescs, BufTab_getNumBufs(hCapture->hBufTab));

    if (idx < 0) {
        Dmai_err0("You must get a captured buffer before putting one\n");
        return Dmai_ENOMEM;
    }

    hCapture->bufDescs[idx].v4l2buf.m.userptr =
        (Int) Buffer_getUserPtr(hBuf);
    hCapture->bufDescs[idx].v4l2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    /* Issue captured frame buffer back to device driver */
    if (ioctl(hCapture->fd, VIDIOC_QBUF,
              &hCapture->bufDescs[idx].v4l2buf) == -1) {
        Dmai_err1("VIDIOC_QBUF failed (%s)\n", strerror(errno));
        return Dmai_EFAIL;
    }

    hCapture->bufDescs[idx].hBuf = hBuf;
    hCapture->bufDescs[idx].used = FALSE;

    return Dmai_EOK;
}
Beispiel #11
0
static gboolean gstti_viddec_process(GstTIDmaidec *dmaidec, GstBuffer *encData,
                    Buffer_Handle hDstBuf,gboolean codecFlushed){
    GstTIDmaidecData *decoder;
    Buffer_Handle   hEncData = NULL;
    Int32           encDataConsumed, originalBufferSize;
    Int             ret;

    decoder = (GstTIDmaidecData *)
       g_type_get_qdata(G_OBJECT_CLASS_TYPE(G_OBJECT_GET_CLASS(dmaidec)),
       GST_TIDMAIDEC_PARAMS_QDATA);

    hEncData = GST_TIDMAIBUFFERTRANSPORT_DMAIBUF(encData);
    g_assert(hEncData != NULL);

    /* Make sure the whole buffer is used for output */
    BufferGfx_resetDimensions(hDstBuf);

    /* Invoke the video decoder */
    originalBufferSize = Buffer_getNumBytesUsed(hEncData);
    GST_DEBUG("invoking the video decoder, with %ld bytes (%p, %p)\n",originalBufferSize,
        Buffer_getUserPtr(hEncData),Buffer_getUserPtr(hDstBuf));
    ret = Vdec_process(dmaidec->hCodec, hEncData, hDstBuf);
    encDataConsumed = (codecFlushed) ? 0 :
        Buffer_getNumBytesUsed(hEncData);

    if (ret < 0) {
        GST_ELEMENT_ERROR(dmaidec,STREAM,DECODE,(NULL),
            ("failed to decode video buffer"));
        return FALSE;
    }

    if (ret == Dmai_EBITERROR){
        GST_ELEMENT_WARNING(dmaidec,STREAM,DECODE,(NULL),
            ("Unable to decode frame with timestamp %"GST_TIME_FORMAT,
                GST_TIME_ARGS(GST_BUFFER_TIMESTAMP(encData))));
        /* We failed to process this buffer, so we need to release it
               because the codec won't do it.
         */
        GST_DEBUG("Freeing buffer because of bit error on the stream");
        Buffer_freeUseMask(hDstBuf, gst_tidmaibuffertransport_GST_FREE |
           decoder->dops->outputUseMask);
        return FALSE;
    }

    return TRUE;
}
/******************************************************************************
 * gst_ticircbuffer_write_space
 *    Return the free space available in the buffer for CONTIGUOUS WRITING at
 *    the writePtr location.
 ******************************************************************************/
static Int32 gst_ticircbuffer_write_space(GstTICircBuffer *circBuf)
{
    if (circBuf->contiguousData) {
        return (Buffer_getUserPtr(circBuf->hBuf) +
                Buffer_getSize(circBuf->hBuf)) - circBuf->writePtr;
    }

    return circBuf->readPtr - circBuf->writePtr;
}
Beispiel #13
0
void dmaiaccel_release_cb(gpointer data,
    GstTIDmaiBufferTransport *buf){
    Buffer_Handle hBuf = GST_TIDMAIBUFFERTRANSPORT_DMAIBUF(buf);
    GstBuffer *inBuf = (GstBuffer *)data;

    GST_DEBUG("Release callback for dmaiaccel allocated buffer");

    Memory_unregisterContigBuf((UInt32)Buffer_getUserPtr(hBuf),
        Buffer_getSize(hBuf));

    /* Now we can release our input buffer */
    gst_buffer_unref(inBuf);
}
/******************************************************************************
 * gst_ticircbuffer_reset_read_pointer
 *    Reset the read pointer back to the beginning of the buffer.
 ******************************************************************************/
static Int32 gst_ticircbuffer_reset_read_pointer(GstTICircBuffer *circBuf)
{
    Int8  *circBufStart  = Buffer_getUserPtr(circBuf->hBuf);
    Int32  lastWinOffset = Buffer_getSize(circBuf->hBuf) -
                            (circBuf->windowSize + circBuf->readAheadSize);
    Int8  *lastWindow    = circBufStart + lastWinOffset;
    Int32  resetDelta    = lastWindow - circBufStart;

    /* In fixedBlockSize mode, just wait until the read poitner reaches the
     * end of the buffer and then reset it to the beginning.
     */
    if (circBuf->fixedBlockSize && !circBuf->contiguousData) {
        if (circBuf->readPtr == circBufStart + Buffer_getSize(circBuf->hBuf)) {
            GST_LOG("resetting read pointer (%lu->0)\n",
                (UInt32)(circBuf->readPtr - circBufStart));
            circBuf->readPtr        = Buffer_getUserPtr(circBuf->hBuf);
            circBuf->contiguousData = TRUE;
        }
        return 0;
    }

    /* Otherwise, reset it when the read pointer reaches the last window and
     * the last window has already been copied back to the first window.
     */
    if (!circBuf->contiguousData                           &&
         circBuf->readPtr              >  lastWindow       &&
         circBuf->readPtr - resetDelta <= circBuf->writePtr) {

        GST_LOG("resetting read pointer (%lu->%lu)\n",
            (UInt32)(circBuf->readPtr - circBufStart),
            (UInt32)(circBuf->readPtr - resetDelta - circBufStart));
        circBuf->readPtr        -= resetDelta;
        circBuf->contiguousData  = TRUE;
        return TRUE;
    }

    return FALSE;
}
/******************************************************************************
 * cleanup
 ******************************************************************************/
static Int cleanup(Display_Handle hDisplay)
{
    Int                   ret     = Dmai_EOK;
    BufTab_Handle         hBufTab = hDisplay->hBufTab;
    enum v4l2_buf_type    type;
    Int                   bufIdx;
    Buffer_Handle         hDispBuf;

    if (hDisplay->fd != -1) {
        if (hDisplay->started) {
            /* Shut off the video display */
            type = V4L2_BUF_TYPE_VIDEO_OUTPUT;

            if (ioctl(hDisplay->fd, VIDIOC_STREAMOFF, &type) == -1) {
                Dmai_err1("VIDIOC_STREAMOFF failed (%s)\n", strerror(errno));
                ret = Dmai_EFAIL;
            }
        }

        if (close(hDisplay->fd) == -1) {
            Dmai_err1("Failed to close capture device (%s)\n", strerror(errno));
            ret = Dmai_EIO;
        }

        if (hDisplay->userAlloc == FALSE) {
            if (hBufTab) {
                for (bufIdx = 0;
                     bufIdx < BufTab_getNumBufs(hBufTab);
                     bufIdx++) {

                    hDispBuf = BufTab_getBuf(hBufTab, bufIdx);

                    if (munmap(Buffer_getUserPtr(hDispBuf),
                               Buffer_getSize(hDispBuf)) == -1) {
                        Dmai_err1("Failed to unmap capture buffer%d\n", bufIdx);
                        ret = Dmai_EFAIL;
                    }
                }
            }
        }
    }

    if (hDisplay->bufDescs) {
        free(hDisplay->bufDescs);
    }

    free(hDisplay);

    return ret;
}
Beispiel #16
0
/******************************************************************************
 * cleanup
 ******************************************************************************/
static Int cleanup(Capture_Handle hCapture)
{
    BufTab_Handle         hBufTab    = hCapture->hBufTab;
    Int                   ret        = Dmai_EOK;
    Int8                 *capBufPtr;
    enum v4l2_buf_type    type;
    Uns                   bufIdx;
    Buffer_Handle         hCapBuf;

    if (hCapture->fd != -1) {
        if (hCapture->started) {
            /* Shut off the video capture */
            type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            if (ioctl(hCapture->fd, VIDIOC_STREAMOFF, &type) == -1) {
                Dmai_err1("VIDIOC_STREAMOFF failed (%s)\n", strerror(errno));
                ret = Dmai_EFAIL;
            }
        }

        if (close(hCapture->fd) == -1) {
            Dmai_err1("Failed to close capture device (%s)\n", strerror(errno));
            ret = Dmai_EIO;
        }

        if (hCapture->userAlloc == FALSE && hBufTab) {
            for (bufIdx = 0;
                 bufIdx < BufTab_getNumBufs(hBufTab);
                 bufIdx++) {

                hCapBuf = BufTab_getBuf(hBufTab, bufIdx);
                capBufPtr = Buffer_getUserPtr(hCapBuf);

                if (munmap(capBufPtr - hCapture->topOffset,
                           Buffer_getSize(hCapBuf)) == -1) {
                    Dmai_err1("Failed to unmap capture buffer%d\n", bufIdx);
                    ret = Dmai_EFAIL;
                }
            }
        }

        if (hCapture->bufDescs) {
            free(hCapture->bufDescs);
        }
    }

    free(hCapture);

    return ret;
}
Beispiel #17
0
/******************************************************************************
 * writeFrame
******************************************************************************/
static Int writeFrame(Buffer_Handle hBuf, FILE *outFile)
{
    Int8 *ptr = Buffer_getUserPtr(hBuf);
    Int size;

    size = Buffer_getNumBytesUsed(hBuf);

    if (fwrite(ptr, size, 1, outFile) != 1) {
        fprintf(stderr,"Failed to write data to disk\n");
        return -1;
    }

    printf("Wrote audio frame size %d to disk\n", size);

    return 0;
}
Beispiel #18
0
/******************************************************************************
 * setOsdTransparency
 ******************************************************************************/
static Int setOsdTransparency(UI_Handle hUI, Char trans)
{
    Buffer_Handle hBuf;

    if (Display_get(hUI->hAttr, &hBuf) < 0) {
        ERR("Failed to get attribute window buffer\n");
        return FAILURE;
    }

    memset(Buffer_getUserPtr(hBuf), trans, Buffer_getSize(hBuf));

    if (Display_put(hUI->hAttr, hBuf) < 0) {
        ERR("Failed to put display buffer\n");
        return FAILURE;
    }

    return SUCCESS;
}
/******************************************************************************
 * gst_ticircbuffer_new
 *     Create a circular buffer to store an encoded input stream.  Increasing
 *     the number of windows stored in the buffer can help performance if
 *     adequate memory is available.
 ******************************************************************************/
GstTICircBuffer* gst_ticircbuffer_new(Int32 windowSize, Int32 numWindows,
                     Bool fixedBlockSize)
{
    GstTICircBuffer *circBuf;
    Buffer_Attrs     bAttrs  = Buffer_Attrs_DEFAULT;
    Int32            bufSize;

    circBuf = (GstTICircBuffer*)gst_mini_object_new(GST_TYPE_TICIRCBUFFER);

    g_return_val_if_fail(circBuf != NULL, NULL);

    GST_INFO("requested windowSize:  %ld\n", windowSize);
    circBuf->windowSize = windowSize;

    GST_INFO("fixed block size is %s\n", fixedBlockSize ? "ON" : "OFF");
    circBuf->fixedBlockSize = fixedBlockSize;

    /* We need to have at least 2 windows allocated for us to be able
     * to copy buffer data while the consumer is running.
     */
    if (numWindows < 3 && circBuf->fixedBlockSize != TRUE) {
        GST_ERROR("numWindows must be at least 3 when fixedBlockSize=FALSE");
        return NULL;
    }

    /* Set the read ahead size to be 1/4 of a window */
    circBuf->readAheadSize = (fixedBlockSize) ? 0 : windowSize >> 2;

    /* Allocate the circular buffer */
    bufSize = (numWindows * windowSize) + (circBuf->readAheadSize << 1);

    GST_LOG("creating circular input buffer of size %lu\n", bufSize);
    circBuf->hBuf = Buffer_create(bufSize, &bAttrs);

    if (circBuf->hBuf == NULL) {
        GST_ERROR("failed to create buffer");
        gst_object_unref(circBuf);
        return NULL;
    }

    circBuf->readPtr = circBuf->writePtr = Buffer_getUserPtr(circBuf->hBuf);

    return circBuf;
}
Beispiel #20
0
/******************************************************************************
 * writeFrameUYVY
******************************************************************************/
static Int writeFrameUYVY(Buffer_Handle hBuf, FILE *outFile)
{
    Int8 *ptr = Buffer_getUserPtr(hBuf);

    BufferGfx_Dimensions dim;

    BufferGfx_getDimensions(hBuf, &dim);

    if (fwrite(ptr, 1, dim.width * dim.height * 2, outFile) != 
        dim.width * dim.height * 2) {
      fprintf(stderr,"Failed to write data to disk\n");
      return -1;
    }

    printf("Wrote UYVY frame size %d (%dx%d) to disk\n",
           (Int) (dim.width * 2 * dim.height),
           (Int) dim.width, (Int) dim.height);

    return 0;
}
/******************************************************************************
 * cleanup
 ******************************************************************************/
static Int cleanup(Display_Handle hDisplay)
{
    Int                      ret     = Dmai_EOK;
    BufTab_Handle            hBufTab = hDisplay->hBufTab;
    struct fb_var_screeninfo varInfo;
    struct fb_fix_screeninfo fixInfo;

    if (hDisplay->fd != -1) {
        if (ioctl(hDisplay->fd, FBIOGET_FSCREENINFO, &fixInfo) == -1) {
            Dmai_err1("Failed FBIOGET_FSCREENINFO (%s)\n", strerror(errno));
            ret = Dmai_EFAIL;
        }

        if (ioctl(hDisplay->fd, FBIOGET_VSCREENINFO, &varInfo) == -1) {
            Dmai_err1("Failed ioctl FBIOGET_VSCREENINFO (%s)\n",
                      strerror(errno));
            ret =  Dmai_EFAIL;
        }
        
        if (ioctl(hDisplay->fd, FBIOPUT_VSCREENINFO, &hDisplay->origVarInfo) 
                == -1) {
            Dmai_err1("Failed FBIOGET_FSCREENINFO (%s)\n", strerror(errno));
            ret = Dmai_EFAIL;
        }

        if (hBufTab) {
            munmap(Buffer_getUserPtr(BufTab_getBuf(hBufTab, 0)),
                   fixInfo.line_length * varInfo.yres_virtual);

            free(hBufTab);
        }

        setDisplayBuffer(hDisplay, 0);

        close(hDisplay->fd);
    }

    free(hDisplay);

    return ret;
}
Beispiel #22
0
/******************************************************************************
 * Sound_alsa_write
 ******************************************************************************/
Int Sound_alsa_write(Sound_Handle hSound, Buffer_Handle hBuf)
{
    Int32 numSamples, writeSamples;
    Int8 *bufPtr;

    assert(hSound);
    assert(hBuf);

    writeSamples = Buffer_getNumBytesUsed(hBuf) / (2 * hSound->channels);
    bufPtr = Buffer_getUserPtr(hBuf);

    while (writeSamples > 0) {
        /* start by doing a blocking wait for free space. */
        snd_pcm_wait (hSound->rcOut, PCM_TIMEOUT);
        numSamples = snd_pcm_writei(hSound->rcOut, bufPtr, writeSamples);

        if (numSamples == -EAGAIN) 
            continue;

        if (numSamples < 0) {
            if (xrunRecovery(hSound->rcOut,numSamples) < 0) {
                Dmai_err2 ("Failed to write to %s (%s)\n",
                            AUDIO_DEVICE, strerror(-numSamples));
                return Dmai_EFAIL;
            }
        }
        else {
            bufPtr += numSamples * 2 * hSound->channels;
            writeSamples -= numSamples;
        }
    }

    Buffer_setNumBytesUsed(hBuf, Buffer_getSize(hBuf));

    return Dmai_EOK;
}
/******************************************************************************
 * gst_ticircbuffer_data_available
 *    Return how much contiguous data is available at the read pointer.
 ******************************************************************************/
static Int32 gst_ticircbuffer_data_available(GstTICircBuffer *circBuf)
{
    /* First check if the buffer is empty, in which case return 0. */
    if (gst_ticircbuffer_is_empty(circBuf)) {
        return 0;
    }

    /* If the write pointer is now ahead of the read pointer, make sure there
     * is a window available.
     */
    if (circBuf->contiguousData) {
        return (circBuf->writePtr - circBuf->readPtr);
    }

    /* Otherwise, there needs to be enough data between the read pointer and
     * the end of the buffer.
     */
    else {
       return (Buffer_getUserPtr(circBuf->hBuf) +
               Buffer_getSize(circBuf->hBuf)) - circBuf->readPtr;
    }

    return 0;
}
Beispiel #24
0
/******************************************************************************
 * Capture_put
 ******************************************************************************/
Int Capture_put(Capture_Handle hCapture, Buffer_Handle hBuf)
{
    Int idx;

    assert(hCapture);
    assert(hBuf);

    /*
     * The used Flag as part of bufdesc is with respect to the use of buffer 
     * by the application. If Used = TRUE then the index is available for
     * storing new buffer information. This is set to FALSE once it contains
     * valid information about the buffer.
     */
    idx = getUsedIdx(hCapture->bufDescs, BufTab_getNumBufs(hCapture->hBufTab));

    if (idx < 0) {
        Dmai_err0("You must get a captured buffer before putting one\n");
        return Dmai_ENOMEM;
    }

    hCapture->bufDescs[idx].v4l2buf.m.userptr =
        (Int) Buffer_getUserPtr(hBuf);
    hCapture->bufDescs[idx].v4l2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    /* Issue captured frame buffer back to device driver */
    if (ioctl(hCapture->fd, VIDIOC_QBUF,
              &hCapture->bufDescs[idx].v4l2buf) == -1) {
        Dmai_err1("VIDIOC_QBUF failed (%s)\n", strerror(errno));
        return Dmai_EFAIL;
    }

    hCapture->bufDescs[idx].hBuf = hBuf;
    hCapture->bufDescs[idx].used = FALSE;

    return Dmai_EOK;
}
Beispiel #25
0
/******************************************************************************
 * appMain
 ******************************************************************************/
Int appMain(Args * args)
{
    Time_Attrs           tAttrs    = Time_Attrs_DEFAULT;
    BufferGfx_Attrs      gfxAttrs  = BufferGfx_Attrs_DEFAULT;
    Display_Handle       hDisplay  = NULL;
    Time_Handle          hTime     = NULL;
    BufTab_Handle        hDisBufTab= NULL;
    Int                  numFrame  = 0;
    Display_Attrs        dAttrs;
    Buffer_Handle        hDispBuf;
    Int                  y, x, pos, color;
    Cpu_Device           device;
    UInt32               time;
    Int32                bufSize;
    BufferGfx_Dimensions dim;
    Int                  ret = Dmai_EOK;

    /* Initialize DMAI */
    Dmai_init();

    if (args->benchmark) {
        hTime = Time_create(&tAttrs);

        if (hTime == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr, "Failed to create Time object\n");
            goto cleanup;
        }
    }

    /* Determine which device the application is running on */
    if (Cpu_getDevice(NULL, &device) < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to determine target board\n");
        goto cleanup;
    }

    switch (device) {
        case Cpu_Device_DM6467:
            dAttrs = Display_Attrs_DM6467_VID_DEFAULT;
            break;
        case Cpu_Device_OMAP3530:
        case Cpu_Device_DM3730:
            dAttrs = Display_Attrs_O3530_VID_DEFAULT;
            break;
        case Cpu_Device_OMAPL138:
            dAttrs = Display_Attrs_OMAPL138_OSD_DEFAULT;
            break;
        case Cpu_Device_DM365:
        case Cpu_Device_DM368:
            dAttrs = Display_Attrs_DM365_VID_DEFAULT;
            break;            
        case Cpu_Device_OMAPL137:
            dAttrs = Display_Attrs_OMAPL137_OSD_DEFAULT;
            break;        
        default:
            dAttrs = Display_Attrs_DM6446_DM355_VID_DEFAULT;
            break;
    }
 
    if (args->displayUalloc) {
        gfxAttrs.colorSpace = dAttrs.colorSpace;

        if (VideoStd_getResolution(args->videoStd, &gfxAttrs.dim.width, 
                                   &gfxAttrs.dim.height) < 0) {
            goto cleanup;
        }

        gfxAttrs.dim.lineLength =
            Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width, 
                                                  gfxAttrs.colorSpace), 32); 

        gfxAttrs.dim.x = 0;
        gfxAttrs.dim.y = 0;

        if (gfxAttrs.colorSpace == ColorSpace_YUV422PSEMI) {
            bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2;
        }
        else if (gfxAttrs.colorSpace == ColorSpace_YUV420PSEMI) {
            bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2;
        }
        else {
            bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height;
        }

        if (bufSize < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to calculated size for display buffers\n");
            goto cleanup;
        }

        /* Create a table of video buffers to use with the display device */
        hDisBufTab = BufTab_create(dAttrs.numBufs, bufSize,
                                   BufferGfx_getBufferAttrs(&gfxAttrs));

        if (hDisBufTab == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to allocate contiguous buffers\n");
            goto cleanup;
        }
    }

    /* Create the video display */
    dAttrs.videoStd = args->videoStd;
    dAttrs.videoOutput = args->videoOutput;
    hDisplay = Display_create(hDisBufTab, &dAttrs);

    if (hDisplay == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to open display device\n");
        goto cleanup;
    }

    x = color = 0;

    while (numFrame++ < args->numFrames) {
        if (args->benchmark) {
            if (Time_reset(hTime) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to reset timer\n");
                goto cleanup;
            }
        }

        /* Get a buffer from the display driver */
        if (Display_get(hDisplay, &hDispBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to get display buffer\n");
            goto cleanup;
        }

        /* Retrieve the dimensions of the display buffer */
        BufferGfx_getDimensions(hDispBuf, &dim);

        printf("Display size %dx%d pitch %d x = %d color %d\n", (Int) dim.width,
               (Int) dim.height, (Int) dim.lineLength, x, color);

        /* Draw a vertical bar of a color */
        for (y = 0; y < dim.height; y++) {
            pos = y * dim.lineLength + x * 2;
            memset(Buffer_getUserPtr(hDispBuf) + pos, color, 2);
        }

        x = (x + 1) % dim.width;
        color = (color + 1) % 0xff;

        /* Give the display buffer back to be displayed */
        if (Display_put(hDisplay, hDispBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to put display buffer\n");
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_total(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer total\n");
                goto cleanup;
            }

            printf("[%d] Frame time: %uus\n", numFrame, (Uns) time);
        }
    }

cleanup:
    /* Clean up the application */
    if (hDisplay) {
        Display_delete(hDisplay);
    }

    if (hDisBufTab) {
        BufTab_delete(hDisBufTab);
    }
    
    if (hTime) {
        Time_delete(hTime);
    }

    if (ret == Dmai_EFAIL)
        return 1;
    else
        return 0;
}
Beispiel #26
0
/*******************************************************************************
 * gst_tidmaivideosink_render
*******************************************************************************/
static GstFlowReturn gst_tidmaivideosink_render(GstBaseSink * bsink,
                         GstBuffer * buf)
{
    BufferGfx_Attrs       gfxAttrs  = BufferGfx_Attrs_DEFAULT;
    Buffer_Handle         hDispBuf  = NULL;
    Buffer_Handle         inBuf     = NULL;
    BufferGfx_Dimensions  inDimSave;
    GstTIDmaiVideoSink   *sink      = GST_TIDMAIVIDEOSINK_CAST(bsink);

    GST_DEBUG("Begin, buffer %p",buf);

    /* The base sink send us the first buffer twice, so we avoid processing 
     * it again, since the Display_put may fail on this case when using 
     * pad_allocation 
     */
    if (sink->prerolledBuffer == buf){
        GST_DEBUG("Not displaying previously pre-rolled buffer");
        sink->prerolledBuffer = NULL;
        return GST_FLOW_OK;
    }
    sink->prerolledBuffer = NULL;

    /* If the input buffer is non dmai buffer, then allocate dmai buffer and
     *  copy input buffer in dmai buffer using memcpy routine.
     */
    if (GST_IS_TIDMAIBUFFERTRANSPORT(buf)) {
        inBuf = GST_TIDMAIBUFFERTRANSPORT_DMAIBUF(buf);
    } else {
        /* allocate DMAI buffer */
        if (sink->tempDmaiBuf == NULL) {

            GST_DEBUG("Input buffer is non-dmai, allocating new buffer");
            gfxAttrs.dim.width          = sink->width;
            gfxAttrs.dim.height         = sink->height;
            gfxAttrs.dim.lineLength     = BufferGfx_calcLineLength(sink->width,
                                            sink->colorSpace);
            gfxAttrs.colorSpace         = sink->colorSpace;
            sink->tempDmaiBuf           = Buffer_create(GST_BUFFER_SIZE(buf),
                                           BufferGfx_getBufferAttrs(&gfxAttrs));

            if (sink->tempDmaiBuf == NULL) {
                GST_ELEMENT_ERROR(sink,STREAM,FAILED,(NULL),
                    ("Failed to allocate memory for the input buffer"));
                return GST_FLOW_UNEXPECTED;
            }
        }
        inBuf = sink->tempDmaiBuf;

        memcpy(Buffer_getUserPtr(inBuf), buf->data, buf->size);
    }

    if (Buffer_getBufTab(inBuf) == Display_getBufTab(sink->hDisplay)) {
        GST_DEBUG("Flipping pad allocated buffer");
        /* We got a buffer that is already on video memory, just flip it */
        hDispBuf = inBuf;
        if (sink->numAllocatedBuffers)
            sink->numAllocatedBuffers--;
        sink->allocatedBuffers[Buffer_getId(inBuf)] = NULL;
        if (buf == sink->lastAllocatedBuffer){
            sink->lastAllocatedBuffer = NULL;
        }
    } else {
        /* Check if we can allocate a new buffer, otherwise we may need 
         * to drop the buffer
         */
        BufferGfx_getDimensions(inBuf, &inDimSave);
        if ((sink->numAllocatedBuffers >= 
             (BufTab_getNumBufs(Display_getBufTab(sink->hDisplay)) - 1)) &&
             (sink->numUnusedBuffers == 0)){
            GST_ELEMENT_WARNING(sink,RESOURCE,NO_SPACE_LEFT,(NULL),
                ("Dropping incoming buffers because no display buffer"
                    " available"));
            return GST_FLOW_OK;
        } else {
            GST_DEBUG("Obtaining display buffer");
            hDispBuf = gst_tidmaivideosink_get_display_buffer(sink,inBuf);
            if (!hDispBuf){
                return GST_FLOW_UNEXPECTED;
            }
        }

        if (Framecopy_config(sink->hFc, inBuf, hDispBuf) < 0) {
            GST_ELEMENT_ERROR(sink,STREAM,FAILED,(NULL),
                ("Failed to configure the frame copy"));
            return GST_FLOW_UNEXPECTED;
        }

        if (Framecopy_execute(sink->hFc, inBuf, hDispBuf) < 0) {
            GST_ELEMENT_ERROR(sink,STREAM,FAILED,(NULL),
                ("Failed to execute the frame copy"));
            return GST_FLOW_UNEXPECTED;
        }

        BufferGfx_resetDimensions(hDispBuf);
        BufferGfx_setDimensions(inBuf, &inDimSave);
    }

    /* Send filled buffer to display device driver to be displayed */
    if (Display_put(sink->hDisplay, hDispBuf) < 0) {
        GST_ELEMENT_ERROR(sink,STREAM,FAILED,(NULL),
            ("Failed to put the buffer on display"));
        return GST_FLOW_UNEXPECTED;
    }

    GST_DEBUG("Finish");

    return GST_FLOW_OK;
}
Beispiel #27
0
/******************************************************************************
 * Idec1_process
 ******************************************************************************/
Int Idec1_process(Idec1_Handle hId, Buffer_Handle hInBuf,
                 Buffer_Handle hOutBuf)
{
    BufferGfx_Dimensions    dim;
    IMGDEC1_DynamicParams   dynParams;
    IMGDEC1_InArgs          inArgs;
    IMGDEC1_OutArgs         outArgs;
    IMGDEC1_Status          decStatus;
    XDM1_BufDesc            inBufDesc;
    XDM1_BufDesc            outBufDesc;
    XDAS_Int32              status;
    XDAS_Int8 *             inPtr;
    XDAS_Int8 *             outPtr;
    UInt32                  offset = 0;
    UInt32                  i;
    
    assert(hId);
    assert(hInBuf);
    assert(hOutBuf);
    assert(Buffer_getSize(hInBuf));   
    assert(Buffer_getUserPtr(hInBuf)); 
    assert(Buffer_getUserPtr(hOutBuf));
    assert(Buffer_getNumBytesUsed(hInBuf));
    assert(Buffer_getSize(hOutBuf));
    assert(Buffer_getType(hOutBuf) == Buffer_Type_GRAPHICS);

    BufferGfx_getDimensions(hOutBuf, &dim);
    
    inPtr  = Buffer_getUserPtr(hInBuf);
    outPtr = Buffer_getUserPtr(hOutBuf);

    inArgs.size             = sizeof(IMGDEC1_InArgs);
    inArgs.numBytes         = Buffer_getNumBytesUsed(hInBuf);

    outArgs.size            = sizeof(IMGDEC1_OutArgs);

    inBufDesc.numBufs       = 1;
    outBufDesc.numBufs      = hId->minNumOutBufs;
    
    inBufDesc.descs[0].buf = inPtr;
    inBufDesc.descs[0].bufSize = Buffer_getSize(hInBuf);

    for(i = 0; i < hId->minNumOutBufs; i++)
    {
        outBufDesc.descs[i].buf = (XDAS_Int8 *)((unsigned int)outPtr + offset);
        offset +=  hId->minOutBufSize[i];
        outBufDesc.descs[i].bufSize = hId->minOutBufSize[i];
    }   
        
    /* Decode image buffer */
    status = IMGDEC1_process(hId->hDecode, &inBufDesc, &outBufDesc, &inArgs,
                            &outArgs);

    Buffer_setNumBytesUsed(hInBuf, outArgs.bytesConsumed);

    if (status != IMGDEC1_EOK) {
        if (XDM_ISFATALERROR(outArgs.extendedError)) {
            Dmai_err2("IMGDEC1_process() failed with error (%d ext: 0x%x)\n",
                      (Int)status, (Uns) outArgs.extendedError);
            return Dmai_EFAIL;
        }
        else {
            Dmai_dbg1("IMGDEC1_process() non-fatal error 0x%x\n",
                      (Uns) outArgs.extendedError);
            return Dmai_EBITERROR;
        }
    }

    /* Get the dynamic codec status */
    decStatus.data.buf = NULL;
    decStatus.size = sizeof(IMGDEC1_Status);
    dynParams.size = sizeof(IMGDEC1_DynamicParams);
    status = IMGDEC1_control(hId->hDecode, XDM_GETSTATUS, &dynParams,
                            &decStatus);

    if (status != IMGDEC1_EOK) {
        Dmai_err1("XDM_GETSTATUS failed, status=%d\n", status);
        return Dmai_EFAIL;
    }

    /* Set output Color Format */
    switch (decStatus.outputChromaFormat) {
        case XDM_YUV_422ILE:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_UYVY); 
            break;
        case XDM_YUV_420P:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_YUV420P); 
            break;
        case XDM_YUV_422P:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_YUV422P); 
            break;
        case XDM_YUV_444P:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_YUV444P); 
            break;
        case XDM_GRAY:
            BufferGfx_setColorSpace (hOutBuf, ColorSpace_GRAY); 
            break;
        default:
            printf("Unsupported output color space.\n");
            return Dmai_EFAIL;        
    }
       
    dim.x = dim.y = 0;
    dim.width = decStatus.outputWidth;
    dim.height = decStatus.outputHeight;
    dim.lineLength = decStatus.outputWidth *
                      ColorSpace_getBpp(BufferGfx_getColorSpace(hOutBuf)) / 8;
    
    if (BufferGfx_setDimensions(hOutBuf, &dim) < 0) {
        Dmai_err0("Frame does not fit in allocated buffer\n");
        return Dmai_EFAIL;
    }

    return Dmai_EOK;
}
Beispiel #28
0
/******************************************************************************
 * speechThrFxn
 ******************************************************************************/
Void *speechThrFxn(Void *arg)
{
    SpeechEnv              *envp                = (SpeechEnv *) arg;
    SPHDEC1_Params          defaultParams       = Sdec1_Params_DEFAULT;
    SPHDEC1_DynamicParams   defaultDynParams    = Sdec1_DynamicParams_DEFAULT;
    Void                   *status              = THREAD_SUCCESS;
    Sound_Attrs             sAttrs              = Sound_Attrs_MONO_DEFAULT;
    Loader_Attrs            lAttrs              = Loader_Attrs_DEFAULT;
    Buffer_Attrs            bAttrs              = Buffer_Attrs_DEFAULT;
    Sdec1_Handle            hSd1                = NULL;
    Sound_Handle            hSound              = NULL;
    Loader_Handle           hLoader             = NULL;
    Engine_Handle           hEngine             = NULL;
    Buffer_Handle           hOutBuf             = NULL;
    SPHDEC1_Params         *params;
    SPHDEC1_DynamicParams  *dynParams;
    Buffer_Handle           hInBuf;

    /* Open the codec engine */
    hEngine = Engine_open(envp->engineName, NULL, NULL);

    if (hEngine == NULL) {
        ERR("Failed to open codec engine %s\n", envp->engineName);
        cleanup(THREAD_FAILURE);
    }

    /* Create the sound device */
    sAttrs.sampleRate = 8000;
    sAttrs.mode = Sound_Mode_OUTPUT;
    sAttrs.leftGain  = 127;
    sAttrs.rightGain = 127;
    sAttrs.bufSize   = 128;
    hSound = Sound_create(&sAttrs);

    if (hSound == NULL) {
        ERR("Failed to create audio device\n");
        cleanup(THREAD_FAILURE);
    }

    /* Set the sample rate for the user interface */
    gblSetSamplingFrequency(sAttrs.sampleRate);

    /* Use supplied params if any, otherwise use defaults */
    params = envp->params ? envp->params : &defaultParams;
    dynParams = envp->dynParams ? envp->dynParams : &defaultDynParams;

    /* Create the speech decoder */
    hSd1 = Sdec1_create(hEngine, envp->speechDecoder, params, dynParams);

    if (hSd1 == NULL) {
        ERR("Failed to create speech decoder: %s\n", envp->speechDecoder);
        cleanup(THREAD_FAILURE);
    }

    /*
     * Make the output buffer size twice the size of what the codec needs
     * as the codec needs mono and the Sound module converts the decoded
     * mono samples to stereo before writing to the device driver.
     */
    hOutBuf = Buffer_create(OUTBUFSIZE, &bAttrs);

    if (hOutBuf == NULL) {
        ERR("Failed to allocate output buffer\n");
        cleanup(THREAD_FAILURE);
    }

    /* How much encoded data to feed the codec each process call */
    lAttrs.readSize = INBUFSIZE;

    /* Make the total ring buffer larger */
    lAttrs.readBufSize = lAttrs.readSize * 512;

    /* Create the file loader for reading encoded data */
    hLoader = Loader_create(envp->speechFile, &lAttrs);

    if (hLoader == NULL) {
        ERR("Failed to create loader\n");
        cleanup(THREAD_FAILURE);
    }

    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    /* Prime the file loader */
    Loader_prime(hLoader, &hInBuf);

    while (!gblGetQuit()) {
        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Decode the audio buffer */
        if (Sdec1_process(hSd1, hInBuf, hOutBuf) < 0) {
            ERR("Failed to decode audio buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Increment statistics for user interface */
        gblIncSoundBytesProcessed(Buffer_getNumBytesUsed(hInBuf));

        /*
         * Force the output buffer size since we are forcing the size of the
         * output buffer allocated as opposed to asking the codec for a size.
         */
        Buffer_setNumBytesUsed(hOutBuf, OUTBUFSIZE);

        /* Write the decoded samples to the sound device */
        if (Sound_write(hSound, hOutBuf) < 0) {
            ERR("Failed to write audio buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Load a new frame from the file system */
        if (Loader_getFrame(hLoader, hInBuf) < 0) {
            ERR("Failed to read a frame of encoded data\n");
            cleanup(THREAD_FAILURE);
        }

        /* Check if the clip has ended */
        if (Buffer_getUserPtr(hInBuf) == NULL) {
            /* Wait for the video clip to finish, if applicable */
            Rendezvous_meet(envp->hRendezvousLoop);

            /* If we are to loop the clip, start over */
            if (envp->loop) {
                /* Recreate the speech codec */
                Sdec1_delete(hSd1);
                hSd1 = Sdec1_create(hEngine, envp->speechDecoder,
                                    params, dynParams);

                if (hSd1 == NULL) {
                    ERR("Failed to create speech decoder: %s\n",
                        envp->speechDecoder);
                    cleanup(THREAD_FAILURE);
                }

                /* Re-prime the file loader */
                Loader_prime(hLoader, &hInBuf);
            }
            else {
                printf("End of clip reached, exiting..\n");
                cleanup(THREAD_SUCCESS);
            }
        }
    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousInit);
    Rendezvous_force(envp->hRendezvousLoop);
    Pause_off(envp->hPauseProcess);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    /* Clean up the thread before exiting */
    if (hLoader) {
        Loader_delete(hLoader);
    }

    if (hSd1) {
        Sdec1_delete(hSd1);
    }

    if (hSound) {
        Sound_delete(hSound);
    }

    if (hOutBuf) {
        Buffer_delete(hOutBuf);
    }

    if (hEngine) {
        Engine_close(hEngine);
    }

    return status;
}
Beispiel #29
0
/******************************************************************************
 * appMain
 ******************************************************************************/
Int appMain(Args * args)
{
    VIDENC1_Params         params    = Venc1_Params_DEFAULT;
    VIDENC1_DynamicParams  dynParams = Venc1_DynamicParams_DEFAULT;
    BufferGfx_Attrs        gfxAttrs  = BufferGfx_Attrs_DEFAULT;
    Buffer_Attrs           bAttrs    = Buffer_Attrs_DEFAULT;
    Time_Attrs             tAttrs    = Time_Attrs_DEFAULT;
    Venc1_Handle           hVe1      = NULL;
    FILE                  *outFile   = NULL;
    FILE                  *reconFile = NULL;
    FILE                  *inFile    = NULL;
    Engine_Handle          hEngine   = NULL;
    Time_Handle            hTime     = NULL;
    Bool                   flushed   = FALSE;
    Bool                   mustExit  = FALSE;
    BufTab_Handle          hBufTab   = NULL;
    Buffer_Handle          hOutBuf   = NULL;
    Buffer_Handle          hFreeBuf  = NULL;
    Buffer_Handle          hInBuf    = NULL;
    Buffer_Handle          hReconBuf = NULL;
    Int                    numFrame  = 0;
    Int                    flushCntr = 1;
    Int                    bufIdx;
    Int                    inBufSize, outBufSize;
    Cpu_Device             device;
    Int                    numBufs;
    ColorSpace_Type        colorSpace;
    UInt32                 time;
    Int                    ret = Dmai_EOK;

    printf("Starting application...\n");

    /* Initialize the codec engine run time */
    CERuntime_init();

    /* Initialize DMAI */
    Dmai_init();

    /* Determine which device the application is running on */
    if (Cpu_getDevice(NULL, &device) < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to determine target board\n");
        goto cleanup;
    }

    if (args->benchmark) {
        hTime = Time_create(&tAttrs);

        if (hTime == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to create Time object\n");
            goto cleanup;
        }
    }

    /* Open the input file with raw yuv data */
    inFile = fopen(args->inFile, "rb");

    if (inFile == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to open input file %s\n", args->inFile);
        goto cleanup;
    }

    /* Using a larger vbuf to enhance performance of file i/o */
    if (setvbuf(inFile, vbufferIn, _IOFBF, sizeof(vbufferIn)) != 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to setvbuf on input file descriptor\n");
        goto cleanup;
    }

    /* Open the output file where to put encoded data */
    outFile = fopen(args->outFile, "wb");

    if (outFile == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to open output file %s\n", args->outFile);
        goto cleanup;
    }

    /* Using a larger vbuf to enhance performance of file i/o */
    if (setvbuf(outFile, vbufferOut, _IOFBF, sizeof(vbufferOut)) != 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to setvbuf on output file descriptor\n");
        goto cleanup;
    }

    /* Open the output file where to put reconstructed frames */
    if (args->writeReconFrames) {
        reconFile = fopen(args->reconFile, "wb");

        if (reconFile == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to open output file %s\n", args->reconFile);
            goto cleanup;
        }

        /* Using a larger vbuf to enhance performance of file i/o */
        if (setvbuf(reconFile, vbufferRecon, _IOFBF,
                    sizeof(vbufferRecon)) != 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to setvbuf on output file descriptor\n");
            goto cleanup;
        }
    }

    /* Open the codec engine */
    hEngine = Engine_open(args->engineName, NULL, NULL);

    if (hEngine == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to open codec engine: %s\n", args->engineName);
        goto cleanup;
    }

    /* Set up codec parameters depending on bit rate */
    if (args->bitRate < 0) {
        /* Variable bit rate */
        params.rateControlPreset = IVIDEO_NONE;

        /*
         * If variable bit rate use a bogus bit rate value (> 0)
         * since it will be ignored.
         */
        params.maxBitRate        = 2000000;
    }
    else {
        /* Constant bit rate */
        params.rateControlPreset = IVIDEO_LOW_DELAY;
        params.maxBitRate        = args->bitRate;
    }

    /* Set up codec parameters depending on device */
    switch (device) {
    case Cpu_Device_DM6467:
        params.inputChromaFormat = XDM_YUV_420SP;
        params.reconChromaFormat = XDM_CHROMA_NA;
        break;
    case Cpu_Device_DM355:
        params.inputChromaFormat = XDM_YUV_422ILE;
        params.reconChromaFormat = XDM_YUV_420P;
        break;
    case Cpu_Device_DM365:
    case Cpu_Device_DM368:
        params.inputChromaFormat = XDM_YUV_420SP;
        params.reconChromaFormat = XDM_YUV_420SP;
        break;
    case Cpu_Device_DM3730:
        params.rateControlPreset = IVIDEO_STORAGE;
        params.inputChromaFormat = XDM_YUV_422ILE;
        break;
    default:
        params.inputChromaFormat = XDM_YUV_422ILE;
        break;
    }

    params.maxWidth              = args->width;
    params.maxHeight             = args->height;

    /* Workaround for SDOCM00068944: h264fhdvenc fails
       to create codec when params.dataEndianness is
       set as XDM_BYTE */
    if(device == Cpu_Device_DM6467) {
        if (!strcmp(args->codecName, "h264fhdvenc")) {
            params.dataEndianness        = XDM_LE_32;
        }
    }

    params.maxInterFrameInterval = 1;
    dynParams.targetBitRate      = params.maxBitRate;
    dynParams.inputWidth         = params.maxWidth;
    dynParams.inputHeight        = params.maxHeight;

    /* Create the video encoder */
    hVe1 = Venc1_create(hEngine, args->codecName, &params, &dynParams);

    if (hVe1 == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to create video encoder: %s\n", args->codecName);
        goto cleanup;
    }

    /* Ask the codec how much input data it needs */
    inBufSize = Venc1_getInBufSize(hVe1);

    /* Ask the codec how much space it needs for output data */
    outBufSize = Venc1_getOutBufSize(hVe1);

    /* Which color space to use in the graphics buffers depends on the device */
    colorSpace = ((device == Cpu_Device_DM6467)||
                  (device == Cpu_Device_DM365) ||
                  (device == Cpu_Device_DM368)) ? ColorSpace_YUV420PSEMI :
                 ColorSpace_UYVY;

    /* Align buffers to cache line boundary */
    gfxAttrs.bAttrs.memParams.align = bAttrs.memParams.align = BUFSIZEALIGN;

    /* Use cached buffers if requested */
    if (args->cache) {
        gfxAttrs.bAttrs.memParams.flags = bAttrs.memParams.flags
                                          = Memory_CACHED;
    }

    gfxAttrs.dim.width      = args->width;
    gfxAttrs.dim.height     = args->height;
    if ((device == Cpu_Device_DM6467) || (device == Cpu_Device_DM365)
            || (device == Cpu_Device_DM368)) {
        gfxAttrs.dim.height = Dmai_roundUp(gfxAttrs.dim.height, CODECHEIGHTALIGN);
    }
    gfxAttrs.dim.lineLength = BufferGfx_calcLineLength(args->width, colorSpace);
    gfxAttrs.colorSpace     = colorSpace;

    if (inBufSize < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to calculate buffer attributes\n");
        goto cleanup;
    }

    /* Number of input buffers required */
    if(params.maxInterFrameInterval>1) {
        /* B frame support */
        numBufs = params.maxInterFrameInterval;
    }
    else {
        numBufs = 1;
    }

    /* Create a table of input buffers of the size requested by the codec */
    hBufTab =
        BufTab_create(numBufs, Dmai_roundUp(inBufSize, BUFSIZEALIGN),
                      BufferGfx_getBufferAttrs(&gfxAttrs));

    if (hBufTab == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to allocate contiguous buffers\n");
        goto cleanup;
    }

    /* Set input buffer table */
    Venc1_setBufTab(hVe1, hBufTab);

    /* Create the reconstructed frame buffer for raw yuv data */
    if (args->writeReconFrames) {
        hReconBuf =
            Buffer_create(Dmai_roundUp(inBufSize, BUFSIZEALIGN),
                          BufferGfx_getBufferAttrs(&gfxAttrs));

        if (hReconBuf == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to allocate contiguous buffer\n");
            goto cleanup;
        }
    }

    /* Create the output buffer for encoded video data */
    hOutBuf = Buffer_create(Dmai_roundUp(outBufSize, BUFSIZEALIGN), &bAttrs);

    if (hOutBuf == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to create contiguous buffer\n");
        goto cleanup;
    }

    while (1) {

        /* Get a buffer for input */
        hInBuf = BufTab_getFreeBuf(hBufTab);

        if (hInBuf == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to get a free contiguous buffer from BufTab\n");
            BufTab_print(hBufTab);
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_reset(hTime) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to reset timer\n");
                goto cleanup;
            }
        }

        /* Read a yuv input frame */
        printf("\n Frame %d: ", numFrame);
        if ((device == Cpu_Device_DM6467)||
                (device == Cpu_Device_DM365) ||
                (device == Cpu_Device_DM368)) {
            if(args->sp) {
                if (readFrame420SP(hInBuf, inFile, args->height) < 0) {
                    ret = Dmai_EFAIL;
                    goto cleanup;
                }
            } else {
                if (readFrame420P(hInBuf, inFile, args->height) < 0) {
                    ret = Dmai_EFAIL;
                    goto cleanup;
                }
            }
        }
        else {
            if (readFrameUYVY(hInBuf, inFile) < 0) {
                ret = Dmai_EFAIL;
                mustExit = TRUE;
            }
        }

        if (++numFrame == args->numFrames||mustExit == TRUE) {
            if(!(params.maxInterFrameInterval>1)) {
                /* No B-frame support */
                printf("... exiting \n");
                goto cleanup;
            }

            /*
             * When encoding a stream with B-frames, ending the processing
             * requires to free the buffer held by the encoder. This is done by
             * flushing the encoder and performing a last process() call
             * with a dummy input buffer.
             */
            printf("\n... exiting with flush (B-frame stream) \n");
            flushCntr = params.maxInterFrameInterval-1;
            flushed = TRUE;
            Venc1_flush(hVe1);
        }

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer delta\n");
                goto cleanup;
            }

            printf("Read time: %uus\n", (Uns)time);
        }

        /*
         * Following flushing loop will iterate more than one time only
         * when the encoder completes processing by flushing the frames
         * held by the encoder. All flushed frames will be encoded as P
         * or I frames.
         */

        for(bufIdx = 0; bufIdx < flushCntr; bufIdx++) {

            if (args->cache) {
                /*
                *  To meet xDAIS DMA Rule 7, when input buffers are cached, we
                *  must writeback the cache into physical memory.  Also, per DMA
                *  Rule 7, we must invalidate the output buffer from
                *  cache before providing it to any xDAIS algorithm.
                */
                Memory_cacheWbInv(Buffer_getUserPtr(hInBuf),
                                  Buffer_getSize(hInBuf));

                /* Per DMA Rule 7, our output buffer cache lines must be cleaned */
                Memory_cacheInv(Buffer_getUserPtr(hOutBuf),
                                Buffer_getSize(hOutBuf));

                if (args->benchmark) {
                    if (Time_delta(hTime, &time) < 0) {
                        ret = Dmai_EFAIL;
                        fprintf(stderr,"Failed to get timer delta\n");
                        goto cleanup;
                    }

                    printf("Pre-process cache maintenance: %uus \n", (Uns) time);
                }
            }

            /* Make sure the whole buffer is used for input */
            BufferGfx_resetDimensions(hInBuf);

            /* Encode the video buffer */
            if (Venc1_process(hVe1, hInBuf, hOutBuf) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to encode video buffer\n");
                goto cleanup;
            }

            /* if encoder generated output content, free released buffer */
            if (Buffer_getNumBytesUsed(hOutBuf)>0) {
                /* Get free buffer */
                hFreeBuf = Venc1_getFreeBuf(hVe1);
                /* Free buffer */
                BufTab_freeBuf(hFreeBuf);
            }
            /* if encoder did not generate output content */
            else {
                /* if non B frame sequence */
                /* encoder skipped frame probably exceeding target bitrate */
                if (params.maxInterFrameInterval<=1) {
                    /* free buffer */
                    printf(" Encoder generated 0 size frame\n");
                    BufTab_freeBuf(hInBuf);
                }
            }

            if (args->benchmark) {
                if (Time_delta(hTime, &time) < 0) {
                    ret = Dmai_EFAIL;
                    fprintf(stderr,"Failed to get encode time\n");
                    goto cleanup;
                }

                printf("[%d] Encode: %uus\n", numFrame, (Uns)time);
            }

            if (args->cache) {
                /* Writeback the outBuf. */
                Memory_cacheWb(Buffer_getUserPtr(hOutBuf),
                               Buffer_getSize(hOutBuf));

                if (args->benchmark) {
                    if (Time_delta(hTime, &time) < 0) {
                        ret = Dmai_EFAIL;
                        fprintf(stderr,"Failed to get timer delta\n");
                        goto cleanup;
                    }

                    printf("Post-process cache write back: %uus \n", (Uns) time);
                }
            }

            /* Write the encoded frame to the file system */
            if (Buffer_getNumBytesUsed(hOutBuf)) {
                if (fwrite(Buffer_getUserPtr(hOutBuf),
                           Buffer_getNumBytesUsed(hOutBuf), 1, outFile) != 1) {
                    ret = Dmai_EFAIL;
                    fprintf(stderr,"Failed to write encoded video data to file\n");
                    goto cleanup;
                }
            }

            /* Write the reconstructed frame to the file system */
            if (args->writeReconFrames) {
                processReconData(Venc1_getReconBufs(hVe1), hInBuf, hReconBuf);

                if (Buffer_getNumBytesUsed(hReconBuf)) {
                    if (fwrite(Buffer_getUserPtr(hReconBuf),
                               Buffer_getNumBytesUsed(hReconBuf), 1, reconFile) != 1) {
                        ret = Dmai_EFAIL;
                        fprintf(stderr,"Failed to write reconstructed frame to file\n");
                        goto cleanup;
                    }
                }
            }

            if (args->benchmark) {
                if (Time_delta(hTime, &time) < 0) {
                    ret = Dmai_EFAIL;
                    printf("Failed to get timer delta\n");
                    goto cleanup;
                }

                printf("File write time: %uus\n", (Uns)time);

                if (Time_total(hTime, &time) < 0) {
                    ret = Dmai_EFAIL;
                    fprintf(stderr,"Failed to get timer total\n");
                    goto cleanup;
                }

                printf("Total: %uus\n", (Uns)time);
            }
        }

        /* If the codec flushing completed, exit main thread */
        if (flushed) {
            /* Free dummy input buffer used for flushing process() calls */
            printf("freeing dummy input buffer ... \n");
            BufTab_freeBuf(hInBuf);
            break;
        }
    }

cleanup:
    /* Clean up the application */
    if (hOutBuf) {
        Buffer_delete(hOutBuf);
    }

    if (hReconBuf) {
        Buffer_delete(hReconBuf);
    }

    if (hVe1) {
        Venc1_delete(hVe1);
    }

    if (hBufTab) {
        BufTab_delete(hBufTab);
    }

    if (hEngine) {
        Engine_close(hEngine);
    }

    if (inFile) {
        fclose(inFile);
    }

    if (outFile) {
        fclose(outFile);
    }

    if (reconFile) {
        fclose(reconFile);
    }

    if (hTime) {
        Time_delete(hTime);
    }

    printf("End of application.\n");

    if (ret == Dmai_EFAIL)
        return 1;
    else
        return 0;
}
Beispiel #30
0
/******************************************************************************
 * processReconData
 *    Transform reconstructed buffer data into a UYVY frame.  The format of
 *    the reconstructed buffer data is expected to conform to the specification
 *    found in "MPEG4 Simple Profile Encoder Codec on DM355 User's Guide"
 *    (SPRUFE4C), and may not work for other codecs.
 ******************************************************************************/
Void processReconData(IVIDEO1_BufDesc* reconBufs, Buffer_Handle hSrcBuf,
                      Buffer_Handle hDstBuf)
{
    Int16                 mbSizeY;
    Int16                 mbSizeX;
    Uint32                lumaColLength;
    Uint32                chromaColSize;
    Uint32                UYVYRowLength;
    Uint32                UYVYMbSize;
    UInt8                *lum_buf;
    UInt8                *chr_buf;
    UInt8                *curr_mb;
    UInt8                *curr_lum_mb;
    UInt8                *curr_chroma_mb;
    Int16                 i, j, k, l;
    BufferGfx_Dimensions  dim;

    /*
     * A single Master Block is 16x16 pixels.  Get our master block dimensions
     * by divding the pixel height and width by 16.
     */
    BufferGfx_getDimensions(hSrcBuf, &dim);

    mbSizeY = dim.height >> 4;
    mbSizeX = dim.width  >> 4;

    /*
     * Our luma buffer is a series of 16x16 byte blocks, and our chroma buffer
     * is a series of 16x8 byte blocks.  Each block corresponds to information
     * for one master block.  The first block in each buffer contains header
     * information.  Set lum_buf and chr_buf to the first block after the
     * header.
     */
    lum_buf = (UInt8*) (reconBufs->bufDesc[0].buf + 16 * 16);
    chr_buf = (UInt8*) (reconBufs->bufDesc[1].buf + 16 * 8);

    /*
     * The luma and chroma buffers are constructed in column-major order.
     * The blocks for a single column are followed by two padding blocks
     * before the next column starts.  Set lumaColLength and chromaColSize
     * to the number of bytes that must be skipped over to get to the next
     * column in the corresponding buffer.
     */
    lumaColLength = (16*16) * (mbSizeY + 2);
    chromaColSize = (16*8)  * (mbSizeY + 2);

    /*
     * Calculate the number of bytes that must be skipped over to go to the
     * next row in the reconstructed UYVY frame.  Also calculate how many
     * bytes in the UYVY file are needed to represent a single master block.
     */
    UYVYRowLength = 32 * mbSizeX;
    UYVYMbSize    = 32 * 16;

    /*
     * Copy the reconstructed buffer information into a UYVY frame.
     */
    for (i = 0; i < mbSizeX; i++) {
        for (j = 0; j < mbSizeY; j++) {

            /* Calculate input and output buffer offsets for the current */
            /* master block                                              */
            curr_lum_mb    = lum_buf + (lumaColLength * i) + (256 * j);
            curr_chroma_mb = chr_buf + (chromaColSize * i) + (128 * j);
            curr_mb        = (UInt8 *) Buffer_getUserPtr(hDstBuf) +
                             (j * (UYVYMbSize * mbSizeX)) + (i * 32);

            /* Copy Luma information */
            for (k = 0; k < 16; k++) {
                for (l = 0; l < 16; l++) {
                    curr_mb[(k * UYVYRowLength) + (l * 2) + 1] =
                        curr_lum_mb[k * 16 + l];
                }
            }

            /* Copy Chroma information */
            for (k = 0; k < 8; k++) {
                for (l = 0; l < 16; l++) {
                    curr_mb[((k * 2) * UYVYRowLength) + (l * 2)] =
                        curr_chroma_mb[k * 16 + l];
                    curr_mb[((k * 2 + 1) * UYVYRowLength) + (l * 2)] =
                        curr_chroma_mb[k * 16 + l];
                }
            }
        }
    }

    Buffer_setNumBytesUsed(hDstBuf, dim.width * dim.height * 2);
}