Esempio n. 1
0
static GstFlowReturn
create(GstPushSrc *base, GstBuffer **buf)
{
    GstTICaptureSrc *src = (GstTICaptureSrc *)base;
    Buffer_Handle       hDstBuf;
    GstBuffer   *outBuf;
    gint    ret = GST_FLOW_OK;
    BufferGfx_Attrs  gfxAttrs = BufferGfx_Attrs_DEFAULT;
    Int32   width, height;

    GST_LOG("create begin");

    /* create capture device */
    if (src->hCapture == NULL) {

        /* set framerate based on video standard */
        switch(dmai_video_std(src->video_standard)) {
            case VideoStd_D1_NTSC:
                    gst_value_set_fraction(&src->framerate,30000,1001);
                break;
            case VideoStd_D1_PAL:
                    gst_value_set_fraction(&src->framerate,25,1);
                break;
            default:
                    gst_value_set_fraction(&src->framerate,30,1);
                break;
        }

        /* set width & height based on video standard */

        src->cAttrs.videoStd = dmai_video_std(src->video_standard);

        VideoStd_getResolution(src->cAttrs.videoStd, &width, &height);
				width = 720;
				height = 576;
				GST_WARNING("force video size to %dx%d", src->width, src->height);

        src->width = width;
        src->height = height;
        
        gfxAttrs.dim.height = src->height;
        gfxAttrs.dim.width = src->width;
        src->cAttrs.captureDimension = &gfxAttrs.dim;

        if (!capture_create(src))
            return GST_FLOW_ERROR;
    }

    /* Get buffer from driver */
    if (Capture_get(src->hCapture, &hDstBuf)) {
        GST_ELEMENT_ERROR(src, RESOURCE, FAILED,
        ("Failed to allocate buffer\n"), (NULL));
        return GST_FLOW_ERROR;
    }

    /* Create a DMAI transport buffer object to carry a DMAI buffer to
     * the source pad.  The transport buffer knows how to release the
     * buffer for re-use in this element when the source pad calls
     * gst_buffer_unref().
     */
    outBuf = gst_tidmaibuffertransport_new(hDstBuf, src->hBufTab, capture_buffer_finalize, (void*)src);
    gst_buffer_set_data(outBuf, GST_BUFFER_DATA(outBuf), Buffer_getSize(hDstBuf));

    *buf = outBuf;

    /* set buffer metadata */
    if (G_LIKELY (ret == GST_FLOW_OK && *buf)) {
        GstClock *clock;
        GstClockTime timestamp;

        GST_BUFFER_OFFSET (*buf) = src->offset++;
        GST_BUFFER_OFFSET_END (*buf) = src->offset;

        /* timestamps, LOCK to get clock and base time. */
        GST_OBJECT_LOCK (src);
        if ((clock = GST_ELEMENT_CLOCK (src))) {
            /* we have a clock, get base time and ref clock */
            timestamp = GST_ELEMENT (src)->base_time;
            gst_object_ref (clock);
        } else {
            /* no clock, can't set timestamps */
            timestamp = GST_CLOCK_TIME_NONE;
        }
        GST_OBJECT_UNLOCK (src);

        if (G_LIKELY (clock)) {
            /* the time now is the time of the clock minus the base time */
            timestamp = gst_clock_get_time (clock) - timestamp;
            gst_object_unref (clock);

            /* if we have a framerate adjust timestamp for frame latency */
            if (GST_CLOCK_TIME_IS_VALID (src->duration)) {
                if (timestamp > src->duration)
                    timestamp -= src->duration;
                else
                    timestamp = 0;
            }

        }

        /* FIXME: use the timestamp from the buffer itself! */
        GST_BUFFER_TIMESTAMP (*buf) = timestamp;
        GST_BUFFER_DURATION (*buf) = src->duration;
    }

    /* Create caps for buffer */
    GstCaps *mycaps;
    GstStructure        *structure;

    mycaps = gst_caps_new_empty();

    if (src->cAttrs.colorSpace == ColorSpace_UYVY) {
        structure = gst_structure_new( "video/x-raw-yuv",
            "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('U', 'Y', 'V', 'Y'),
            "framerate", GST_TYPE_FRACTION,
                gst_value_get_fraction_numerator(&src->framerate),
                gst_value_get_fraction_denominator(&src->framerate),
            "width", G_TYPE_INT,    src->width,
            "height", G_TYPE_INT,   src->height,
            (gchar*) NULL);

    }
    else if(src->cAttrs.colorSpace == ColorSpace_YUV420PSEMI) {
        structure = gst_structure_new( "video/x-raw-yuv",
            "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('N', 'V', '1', '2'),
            "framerate", GST_TYPE_FRACTION,
                gst_value_get_fraction_numerator(&src->framerate),
                gst_value_get_fraction_denominator(&src->framerate),
            "width", G_TYPE_INT,    src->width,
            "height", G_TYPE_INT,   src->height,
            (gchar*) NULL);
    }
    else {
        GST_ERROR("unsupported fourcc\n");
        return FALSE;
    }

    gst_caps_append_structure(mycaps, gst_structure_copy (structure));
    gst_structure_free(structure);
    gst_buffer_set_caps(*buf, mycaps);
    gst_caps_unref(mycaps);

		{
			static int fn;
			fn++;
			GST_INFO("capture frame %d", fn);
		}

    GST_LOG("create end");
    return GST_FLOW_OK;
}
Esempio n. 2
0
/******************************************************************************
 * captureThrFxn
 ******************************************************************************/
Void *captureThrFxn(Void *arg)
{
    CaptureEnv          *envp           = (CaptureEnv *) arg;
    Void                *status         = THREAD_SUCCESS;
    Capture_Attrs        cAttrs         = Capture_Attrs_DM365_DEFAULT;
    BufferGfx_Attrs      gfxAttrs       = BufferGfx_Attrs_DEFAULT;
    Capture_Handle       hCapture       = NULL;
    BufTab_Handle        hBufTab        = NULL;
    BufferGfx_Dimensions dim;
    Buffer_Handle        hDstBuf, hCapBuf;
    Int32                width, height, bufSize;
    Int                  fifoRet;
    ColorSpace_Type      colorSpace = ColorSpace_YUV420PSEMI;

    /* Create capture device driver instance */
    cAttrs.numBufs = NUM_CAPTURE_BUFS;
    cAttrs.videoInput = envp->videoInput;    
    cAttrs.videoStd   = envp->videoStd;
    cAttrs.colorSpace = colorSpace;

    if (VideoStd_getResolution(envp->videoStd, &width, &height) < 0) {
        ERR("Failed to calculate resolution of video standard\n");
        cleanup(THREAD_FAILURE);
    }

    if (envp->imageWidth > 0 && envp->imageHeight > 0) {
        if (width < envp->imageWidth && height < envp->imageHeight) {
            ERR("User resolution (%ldx%ld) larger than detected (%ldx%ld)\n",
                envp->imageWidth, envp->imageHeight, width, height);
            cleanup(THREAD_FAILURE);
        }

       /*
        * Capture driver provides 32-byte aligned data. We 32-byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        envp->imageWidth = Dmai_roundUp(envp->imageWidth, 32);
    }
    else {
        /* Resolution was not set on command line. Set to defaults. */
        envp->imageHeight = height;

       /*
        * Capture driver provides 32-byte aligned data. We 32-byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        envp->imageWidth  = Dmai_roundUp(width, 32); 
    }

    Dmai_clear(dim);
    dim.width      = envp->imageWidth;
    dim.height     = envp->imageHeight;

    dim.lineLength = Dmai_roundUp(dim.width, 32);
    if (colorSpace ==  ColorSpace_YUV420PSEMI) {
        bufSize = dim.lineLength * dim.height * 3 / 2;
    } else {
        bufSize = dim.lineLength * dim.height * 2;
    }
    gfxAttrs.dim = dim;

    /* Report the video standard and image size back to the main thread */
    Rendezvous_meet(envp->hRendezvousCapStd);

    gfxAttrs.colorSpace = colorSpace;
    hBufTab = BufTab_create(NUM_CAPTURE_BUFS, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    if ((envp->videoStd == VideoStd_720P_60) && (!envp->passThrough)) {
        cAttrs.videoStd = VideoStd_720P_30;
    }
    else {
        cAttrs.videoStd = envp->videoStd;    
    }
    cAttrs.colorSpace = colorSpace;
    cAttrs.captureDimension = &dim;
    cAttrs.numBufs = NUM_CAPTURE_BUFS;
    hCapture = Capture_create(hBufTab, &cAttrs);

    if (hCapture == NULL) {
        ERR("Failed to create capture device\n");
        cleanup(THREAD_FAILURE);
    }

    /* Get a buffer from the video thread */
    fifoRet = Fifo_get(envp->hInFifo, &hDstBuf);

    if (fifoRet < 0) {
        ERR("Failed to get buffer from video thread\n");
        cleanup(THREAD_FAILURE);
    }

    if (fifoRet == Dmai_EFLUSH) {
        cleanup(THREAD_SUCCESS);
    }

    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    while (!gblGetQuit()) {
        
        /* Get a buffer from the capture driver to encode */
        if (Capture_get(hCapture, &hCapBuf) < 0) {
            ERR("Failed to get capture buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Send captured buffer to video thread for encoding */
        if (Fifo_put(envp->hOutFifo, hCapBuf) < 0) {
            ERR("Failed to send buffer to video thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Get a buffer from the video thread */
        fifoRet = Fifo_get(envp->hInFifo, &hDstBuf);

        if (fifoRet < 0) {
            ERR("Failed to get buffer from video thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Did the video thread flush the fifo? */
        if (fifoRet == Dmai_EFLUSH) {
            cleanup(THREAD_SUCCESS);
        }

        /* Return a buffer to the capture driver */
        if (Capture_put(hCapture, hDstBuf) < 0) {
            ERR("Failed to put capture buffer\n");
            cleanup(THREAD_FAILURE);
        }
    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousCapStd);
    Rendezvous_force(envp->hRendezvousInit);
    Pause_off(envp->hPauseProcess);
    Fifo_flush(envp->hOutFifo);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    if (hCapture) {
        Capture_delete(hCapture);
    }

    /* Clean up the thread before exiting */
    if (hBufTab) {
        BufTab_delete(hBufTab);
    }

    return status;
}
Esempio n. 3
0
/******************************************************************************
 * appMain
 ******************************************************************************/
Int appMain(Args * args)
{
    Framecopy_Attrs         fcAttrs      = Framecopy_Attrs_DEFAULT;
    BufferGfx_Attrs         gfxAttrs     = BufferGfx_Attrs_DEFAULT;
    Smooth_Attrs            smAttrs      = Smooth_Attrs_DEFAULT;
    Time_Attrs              tAttrs       = Time_Attrs_DEFAULT;
    BufTab_Handle           hCapBufTab   = NULL;
    BufTab_Handle           hDisBufTab   = NULL;
    Display_Handle          hDisplay     = NULL;
    Capture_Handle          hCapture     = NULL;
    Framecopy_Handle        hFc          = NULL;
    Smooth_Handle           hSmooth      = NULL;
    Time_Handle             hTime        = NULL;
    Int                     numFrame     = 0;
    Display_Attrs           dAttrs;
    Capture_Attrs           cAttrs;
    Buffer_Handle           cBuf, dBuf;
    Cpu_Device              device;
    Int                     bufIdx;
    UInt32                  time;
    BufferGfx_Dimensions    dim;
    Int32                   bufSize;
    Int                     ret = Dmai_EOK;

    /* Initialize DMAI */
    Dmai_init();

    if (args->benchmark) {
        hTime = Time_create(&tAttrs);

        if (hTime == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to create Time object\n");
            goto cleanup;
        }
    }

    /* Determine which device the application is running on */
    if (Cpu_getDevice(NULL, &device) < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to determine target board\n");
        goto cleanup;
    }

    /* Set the display and capture attributes depending on device */
    switch (device) {
        case Cpu_Device_DM6467:
            dAttrs = Display_Attrs_DM6467_VID_DEFAULT;
            cAttrs = Capture_Attrs_DM6467_DEFAULT;
            break;
        case Cpu_Device_DM365:
        case Cpu_Device_DM368:
            dAttrs = Display_Attrs_DM365_VID_DEFAULT;
            cAttrs = Capture_Attrs_DM365_DEFAULT;
            dAttrs.colorSpace = ColorSpace_YUV420PSEMI;
            cAttrs.colorSpace = dAttrs.colorSpace;
            break;
        case Cpu_Device_OMAPL138:
            dAttrs = Display_Attrs_OMAPL138_VID_DEFAULT;
            cAttrs = Capture_Attrs_OMAPL138_DEFAULT;
            break;
        case Cpu_Device_OMAP3530:
        case Cpu_Device_DM3730:
            dAttrs     = Display_Attrs_O3530_VID_DEFAULT;
            cAttrs     = Capture_Attrs_OMAP3530_DEFAULT;
            dAttrs.colorSpace = cAttrs.colorSpace = ColorSpace_UYVY;
            dAttrs.rotation = 270;
            break;
        default:
            dAttrs = Display_Attrs_DM6446_DM355_VID_DEFAULT;
            cAttrs = Capture_Attrs_DM6446_DM355_DEFAULT;
            break;
    }

    if (args->displayStd != -1) {
        dAttrs.displayStd = args->displayStd;
    }

    if (args->displayDevice) {
        dAttrs.displayDevice = args->displayDevice;
    }

    if (args->displayNumBufs != -1) {
        dAttrs.numBufs = args->displayNumBufs;
    }

    /* Enable cropping in capture driver if selected */
    if (args->width != -1 && args->height != -1 && args->crop) {
        cAttrs.cropX = args->xIn;
        cAttrs.cropY = args->yIn;
        cAttrs.cropWidth = args->width;
        cAttrs.cropHeight = args->height;
    }

    cAttrs.videoInput = args->videoInput;

    if (Capture_detectVideoStd(NULL, &cAttrs.videoStd, &cAttrs) < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to detect capture video standard\n");
        goto cleanup;
    }

    /* The color space of the capture buffers depend on the device */
    gfxAttrs.colorSpace = cAttrs.colorSpace;

    if (VideoStd_getResolution(cAttrs.videoStd, &gfxAttrs.dim.width, 
                                &gfxAttrs.dim.height) < 0) {
        goto cleanup;
    }

    gfxAttrs.dim.lineLength =
        Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width, 
                                              gfxAttrs.colorSpace), 32); 
    gfxAttrs.dim.x = 0;
    gfxAttrs.dim.y = 0;

    if (gfxAttrs.colorSpace == ColorSpace_YUV422PSEMI) {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2;
    }
    else if (gfxAttrs.colorSpace == ColorSpace_YUV420PSEMI) {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2;
    }
    else {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height;
    }

    if (args->captureUalloc) {
        /* Create a table of video buffers to use with the capture device */
        hCapBufTab = BufTab_create(cAttrs.numBufs, bufSize,
                                   BufferGfx_getBufferAttrs(&gfxAttrs));
        if (hCapBufTab == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to allocate contiguous buffers\n");
            goto cleanup;
        }
    }

    /* Create the capture device driver instance */
    hCapture = Capture_create(hCapBufTab, &cAttrs);

    if (hCapture == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to create capture device\n");
        goto cleanup;
    }

    /* Create the display device driver instance */
    dAttrs.videoStd = Capture_getVideoStd(hCapture);
    dAttrs.videoOutput = args->videoOutput;

    if (args->displayUalloc) {
        /* Create a table of video buffers to use with the display device */
        hDisBufTab = BufTab_create(dAttrs.numBufs, bufSize,
                                   BufferGfx_getBufferAttrs(&gfxAttrs));

        if (hDisBufTab == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to allocate contiguous buffers\n");
            goto cleanup;
        }
    }

    hDisplay = Display_create(hDisBufTab, &dAttrs);

    if (hDisplay == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to create display device\n");
        goto cleanup;
    }

    if (args->smooth) {
        /* Create the smooth job */
        hSmooth = Smooth_create(&smAttrs);

        if (hSmooth == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to create smooth job\n");
        }
    }
    else {
        /* Create the frame copy job */
        fcAttrs.accel = args->accel;
        hFc = Framecopy_create(&fcAttrs);

        if (hFc == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to create frame copy job\n");
            goto cleanup;
        }
    }

    /*
     * If cropping is not used, alter the dimensions of the captured
     * buffers and position the smaller image inside the full screen.
     */
    if (args->width != -1 && args->height != -1 && !args->crop) {
        for (bufIdx = 0;
             bufIdx < BufTab_getNumBufs(Capture_getBufTab(hCapture));
             bufIdx++) {

            cBuf = BufTab_getBuf(Capture_getBufTab(hCapture), bufIdx);
            BufferGfx_getDimensions(cBuf, &dim);

            dim.width   = args->width;
            dim.height  = args->height;
            dim.x       = args->xIn;
            dim.y       = args->yIn;

            if (BufferGfx_setDimensions(cBuf, &dim) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Input resolution does not fit in capture frame\n");
                goto cleanup;
            }
        }
    }

    /*
     * Alter the dimensions of the display buffers and position
     * the smaller image inside the full screen.
     */
    if (args->width != -1 && args->height != -1) {
        for (bufIdx = 0;
             bufIdx < BufTab_getNumBufs(Display_getBufTab(hDisplay));
             bufIdx++) {

            dBuf = BufTab_getBuf(Display_getBufTab(hDisplay), bufIdx);
            BufferGfx_getDimensions(dBuf, &dim);

            dim.width   = args->width;
            dim.height  = args->height;
            dim.x       = args->xOut;
            dim.y       = args->yOut;

            if (BufferGfx_setDimensions(dBuf, &dim) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Output resolution does not fit in display frame\n");
                goto cleanup;
            }
        }
    }

    if (args->smooth) {
        if (Smooth_config(hSmooth,
                          BufTab_getBuf(Capture_getBufTab(hCapture), 0),
                          BufTab_getBuf(Display_getBufTab(hDisplay), 0)) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to configure smooth job\n");
            goto cleanup;
        }
    }
    else {
        /* Configure the frame copy job */
        if (Framecopy_config(hFc, BufTab_getBuf(Capture_getBufTab(hCapture), 0),
                          BufTab_getBuf(Display_getBufTab(hDisplay), 0)) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to configure frame copy job\n");
            goto cleanup;
        }
    }

    while (numFrame++ < args->numFrames || args->numFrames == 0) {
        if (args->benchmark) {
            if (Time_reset(hTime) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to reset timer\n");
                goto cleanup;
            }
        }

        /* Get a captured frame from the capture device */
        if (Capture_get(hCapture, &cBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to get capture buffer\n");
            goto cleanup;
        }

        /* Get a frame from the display device to be filled with data */
        if (Display_get(hDisplay, &dBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to get display buffer\n");
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer delta\n");
                goto cleanup;
            }
        }

        if (args->smooth) {
            /*
             * Remove interlacing artifacts from the captured buffer and
             * store the result in the display buffer.
             */
            if (Smooth_execute(hSmooth, cBuf, dBuf) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to execute smooth job\n");
                goto cleanup;
            }
        }
        else {
            /* Copy the captured buffer to the display buffer */
            if (Framecopy_execute(hFc, cBuf, dBuf) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to execute frame copy job\n");
                goto cleanup;
            }
        }

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer delta\n");
                goto cleanup;
            }

            printf("Smooth / Framecopy: %uus ", (Uns) time);
        }

        /* Give captured buffer back to the capture device driver */
        if (Capture_put(hCapture, cBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to put capture buffer\n");
            goto cleanup;
        }

        /* Send filled buffer to display device driver to be displayed */
        if (Display_put(hDisplay, dBuf) < 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to put display buffer\n");
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_total(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer total\n");
                goto cleanup;
            }

            printf("Frame time: %uus\n", (Uns) time);
        }
    }

cleanup:
    /* Clean up the application */
    if (hSmooth) {
        Smooth_delete(hSmooth);
    }

    if (hFc) {
        Framecopy_delete(hFc);
    }

    if (hCapture) {
        Capture_delete(hCapture);
    }

    if (hDisplay) {
        Display_delete(hDisplay);
    }

    if (hTime) {
        Time_delete(hTime);
    }

    if (hCapBufTab) {
        BufTab_delete(hCapBufTab);
    }

    if (hDisBufTab) {
        BufTab_delete(hDisBufTab);
    }

    if (ret == Dmai_EFAIL)
        return 1;
    else
        return 0;
}
Esempio n. 4
0
/******************************************************************************
 * captureThrFxn
 ******************************************************************************/
Void *captureThrFxn(Void *arg)
{
    CaptureEnv           *envp     = (CaptureEnv *) arg;
    Void                 *status   = THREAD_SUCCESS;
    Capture_Attrs         cAttrs   = Capture_Attrs_DM365_DEFAULT;
    Display_Attrs         dAttrs   = Display_Attrs_DM365_VID_DEFAULT;
    BufferGfx_Attrs       gfxAttrs = BufferGfx_Attrs_DEFAULT;    
    Capture_Handle        hCapture = NULL;
    Display_Handle        hDisplay = NULL;
    BufTab_Handle         hBufTab  = NULL;
    BufTab_Handle         hDispBufTab = NULL;
    BufTab_Handle         hFifoBufTab = NULL;
    Buffer_Handle         hDstBuf, hCapBuf, hDisBuf, hBuf;
    BufferGfx_Dimensions  capDim;
    VideoStd_Type         videoStd;
    Int32                 width, height, bufSize;
    Int                   fifoRet;
    ColorSpace_Type       colorSpace = ColorSpace_YUV420PSEMI;
    Int                   bufIdx;
    Int                   numCapBufs;

    /* Create capture device driver instance */
    cAttrs.numBufs = NUM_CAPTURE_BUFS;
    cAttrs.videoInput = envp->videoInput;
    cAttrs.videoStd = envp->videoStd;
    cAttrs.colorSpace = colorSpace;

    videoStd = envp->videoStd;

    /* We only support D1, 720P and 1080P input */
    if (videoStd != VideoStd_D1_NTSC && videoStd != VideoStd_D1_PAL 
        && videoStd != VideoStd_720P_60 && videoStd != VideoStd_720P_50 &&
        videoStd != VideoStd_1080I_30) {
        ERR("Need D1/720P/1080P input to this demo\n");
        cleanup(THREAD_FAILURE);
    }
    if (envp->imageWidth > 0 && envp->imageHeight > 0) {
        if (VideoStd_getResolution(videoStd, &width, &height) < 0) {
            ERR("Failed to calculate resolution of video standard\n");
            cleanup(THREAD_FAILURE);
        }

        if (width < envp->imageWidth && height < envp->imageHeight) {
            ERR("User resolution (%ldx%ld) larger than detected (%ldx%ld)\n",
                envp->imageWidth, envp->imageHeight, width, height);
            cleanup(THREAD_FAILURE);
        }

       /*
        * Capture driver provides 32 byte aligned data. We 32 byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        envp->imageWidth  = Dmai_roundUp(envp->imageWidth,32);
        capDim.x          = 0;
        capDim.y          = 0;
        capDim.height     = envp->imageHeight;
        capDim.width      = envp->imageWidth;
        capDim.lineLength = BufferGfx_calcLineLength(capDim.width, colorSpace);
    } 
    else {
        /* Calculate the dimensions of a video standard given a color space */
        if (BufferGfx_calcDimensions(videoStd, colorSpace, &capDim) < 0) {
            ERR("Failed to calculate Buffer dimensions\n");
            cleanup(THREAD_FAILURE);
        }

       /*
        * Capture driver provides 32 byte aligned data. We 32 byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        capDim.width      = Dmai_roundUp(capDim.width,32);
        envp->imageWidth  = capDim.width;
        envp->imageHeight = capDim.height;
    }

    numCapBufs = NUM_CAPTURE_BUFS;

    gfxAttrs.dim.height = capDim.height;
    gfxAttrs.dim.width = capDim.width;
    gfxAttrs.dim.lineLength = 
        Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width,
                     colorSpace), 32);
    gfxAttrs.dim.x = 0;
    gfxAttrs.dim.y = 0;
    if (colorSpace ==  ColorSpace_YUV420PSEMI) {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2;
    } 
    else {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2;
    }

    /* Create a table of buffers to use with the capture driver */
    gfxAttrs.colorSpace = colorSpace;
    hBufTab = BufTab_create(numCapBufs, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    /* Create a table of buffers to use to prime Fifo to video thread */
    hFifoBufTab = BufTab_create(VIDEO_PIPE_SIZE, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hFifoBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    /* Update global data for user interface */
    gblSetImageWidth(envp->imageWidth);
    gblSetImageHeight(envp->imageHeight);

    /* Report the video standard and image size back to the main thread */
    Rendezvous_meet(envp->hRendezvousCapStd);

    /* Capture at half frame rate if using COMPONENT input at 720P */
    if ((envp->videoStd == VideoStd_720P_60) 
        && (envp->videoInput == Capture_Input_COMPONENT)) {
        cAttrs.videoStd = VideoStd_720P_30;
    }
    else {
        cAttrs.videoStd = envp->videoStd;    
    }

    /*If its component input and video std is 1080I_30 then make it 1080I_60.*/
    if (cAttrs.videoStd == VideoStd_1080I_30 && cAttrs.videoInput 
                        == Capture_Input_COMPONENT) {
        cAttrs.videoStd = VideoStd_1080I_60;
    }

    cAttrs.numBufs    = NUM_CAPTURE_BUFS;    
    cAttrs.colorSpace = colorSpace;
    cAttrs.captureDimension = &gfxAttrs.dim;
    /* Create the capture device driver instance */
    hCapture = Capture_create(hBufTab, &cAttrs);

    if (hCapture == NULL) {
        ERR("Failed to create capture device. Is video input connected?\n");
        cleanup(THREAD_FAILURE);
    }

    /* Create a table of buffers to use with the display driver */
    hDispBufTab = BufTab_create(NUM_DISPLAY_BUFS, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hDispBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    /* Create display device driver instance if preview is needed */
    if (!envp->previewDisabled) {
        dAttrs.videoStd = envp->videoStd;
        if ( (dAttrs.videoStd == VideoStd_CIF) ||
            (dAttrs.videoStd == VideoStd_SIF_NTSC) ||
            (dAttrs.videoStd == VideoStd_SIF_PAL) ||
            (dAttrs.videoStd == VideoStd_VGA) ||
            (dAttrs.videoStd == VideoStd_D1_NTSC) ||        
            (dAttrs.videoStd == VideoStd_D1_PAL) ) {
            dAttrs.videoOutput = Display_Output_COMPOSITE;
        } else {
            dAttrs.videoOutput = Display_Output_COMPONENT;
        }    
        dAttrs.numBufs    = NUM_DISPLAY_BUFS;
        dAttrs.colorSpace = colorSpace;
        dAttrs.width = capDim.width;
        dAttrs.height = capDim.height;
        hDisplay = Display_create(hDispBufTab, &dAttrs);

        if (hDisplay == NULL) {
            ERR("Failed to create display device\n");
            cleanup(THREAD_FAILURE);
        }
    }

    for (bufIdx = 0; bufIdx < VIDEO_PIPE_SIZE; bufIdx++) {
        /* Queue the video buffers for main thread processing */
        hBuf = BufTab_getFreeBuf(hFifoBufTab);
        if (hBuf == NULL) {
            ERR("Failed to fill video pipeline\n");
            cleanup(THREAD_FAILURE);            
        }

       /* Fill with black the buffer */
       CapBuf_blackFill(hBuf);

        /* Send buffer to video thread for encoding */
        if (Fifo_put(envp->hOutFifo, hBuf) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }
    }
    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    while (!gblGetQuit()) {
        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Capture a frame */
        if (Capture_get(hCapture, &hCapBuf) < 0) {
            ERR("Failed to get capture buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Get a buffer from the display device */
        if ((!envp->previewDisabled) && (Display_get(hDisplay, &hDisBuf) < 0)) {
            ERR("Failed to get display buffer\n");
            cleanup(THREAD_FAILURE);
        }
        /* Send buffer to video thread for encoding */
        if (Fifo_put(envp->hOutFifo, hCapBuf) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Get a buffer from the video thread */
        fifoRet = Fifo_get(envp->hInFifo, &hDstBuf);

        if (fifoRet < 0) {
            ERR("Failed to get buffer from video thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Did the video thread flush the fifo? */
        if (fifoRet == Dmai_EFLUSH) {
            cleanup(THREAD_SUCCESS);
        }

        if (!envp->previewDisabled) {
            /* Release buffer to the display device driver */
            if (Display_put(hDisplay, hDstBuf) < 0) {
                ERR("Failed to put display buffer\n");
                cleanup(THREAD_FAILURE);
            }
        }

        if (envp->previewDisabled) {
            /* Return the processed buffer to the capture driver */
            if (Capture_put(hCapture, hDstBuf) < 0) {
                ERR("Failed to put capture buffer\n");
                cleanup(THREAD_FAILURE);
            } 
        }
        else {
            /* Return the displayed buffer to the capture driver */
            if (Capture_put(hCapture, hDisBuf) < 0) {
                ERR("Failed to put capture buffer\n");
                cleanup(THREAD_FAILURE);
            }
        }

        /* Increment statistics for the user interface */
        gblIncFrames();

    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousCapStd);
    Rendezvous_force(envp->hRendezvousInit);
    Pause_off(envp->hPauseProcess);
    Fifo_flush(envp->hOutFifo);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    if (hDisplay) {
        Display_delete(hDisplay);
    }

    if (hCapture) {
        Capture_delete(hCapture);
    }
    
    /* Clean up the thread before exiting */
    if (hBufTab) {
        BufTab_delete(hBufTab);
    }
    if (hFifoBufTab) {
        BufTab_delete(hFifoBufTab);
    }
    if (hDispBufTab) {
        BufTab_delete(hDispBufTab);
    }

    return status;
}
Esempio n. 5
0
succeed_type CAPTURE_PUT(Capture_Handle phandle,CaptureBuffer *buffer)
{

	return Capture_get(phandle,buffer);
}