/****************************************************************************** * Display_fbdev_get ******************************************************************************/ Int Display_fbdev_get(Display_Handle hDisplay, Buffer_Handle *hBufPtr) { Int dummy; BufTab_Handle hBufTab = hDisplay->hBufTab; assert(hDisplay); assert(hBufPtr); dummy = 1; *hBufPtr = BufTab_getBuf(hBufTab, hDisplay->workingIdx); return Dmai_EOK; }
/****************************************************************************** * cleanup ******************************************************************************/ static Int cleanup(Display_Handle hDisplay) { Int ret = Dmai_EOK; BufTab_Handle hBufTab = hDisplay->hBufTab; enum v4l2_buf_type type; Int bufIdx; Buffer_Handle hDispBuf; if (hDisplay->fd != -1) { if (hDisplay->started) { /* Shut off the video display */ type = V4L2_BUF_TYPE_VIDEO_OUTPUT; if (ioctl(hDisplay->fd, VIDIOC_STREAMOFF, &type) == -1) { Dmai_err1("VIDIOC_STREAMOFF failed (%s)\n", strerror(errno)); ret = Dmai_EFAIL; } } if (close(hDisplay->fd) == -1) { Dmai_err1("Failed to close capture device (%s)\n", strerror(errno)); ret = Dmai_EIO; } if (hDisplay->userAlloc == FALSE) { if (hBufTab) { for (bufIdx = 0; bufIdx < BufTab_getNumBufs(hBufTab); bufIdx++) { hDispBuf = BufTab_getBuf(hBufTab, bufIdx); if (munmap(Buffer_getUserPtr(hDispBuf), Buffer_getSize(hDispBuf)) == -1) { Dmai_err1("Failed to unmap capture buffer%d\n", bufIdx); ret = Dmai_EFAIL; } } } } } if (hDisplay->bufDescs) { free(hDisplay->bufDescs); } free(hDisplay); return ret; }
/****************************************************************************** * cleanup ******************************************************************************/ static Int cleanup(Capture_Handle hCapture) { BufTab_Handle hBufTab = hCapture->hBufTab; Int ret = Dmai_EOK; Int8 *capBufPtr; enum v4l2_buf_type type; Uns bufIdx; Buffer_Handle hCapBuf; if (hCapture->fd != -1) { if (hCapture->started) { /* Shut off the video capture */ type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl(hCapture->fd, VIDIOC_STREAMOFF, &type) == -1) { Dmai_err1("VIDIOC_STREAMOFF failed (%s)\n", strerror(errno)); ret = Dmai_EFAIL; } } if (close(hCapture->fd) == -1) { Dmai_err1("Failed to close capture device (%s)\n", strerror(errno)); ret = Dmai_EIO; } if (hCapture->userAlloc == FALSE && hBufTab) { for (bufIdx = 0; bufIdx < BufTab_getNumBufs(hBufTab); bufIdx++) { hCapBuf = BufTab_getBuf(hBufTab, bufIdx); capBufPtr = Buffer_getUserPtr(hCapBuf); if (munmap(capBufPtr - hCapture->topOffset, Buffer_getSize(hCapBuf)) == -1) { Dmai_err1("Failed to unmap capture buffer%d\n", bufIdx); ret = Dmai_EFAIL; } } } if (hCapture->bufDescs) { free(hCapture->bufDescs); } } free(hCapture); return ret; }
/****************************************************************************** * Display_fbdev_get ******************************************************************************/ Int Display_fbdev_get(Display_Handle hDisplay, Buffer_Handle *hBufPtr) { Int dummy; BufTab_Handle hBufTab = hDisplay->hBufTab; assert(hDisplay); assert(hBufPtr); /* Wait for vertical sync */ if (ioctl(hDisplay->fd, FBIO_WAITFORVSYNC, &dummy) == -1) { Dmai_err1("Failed FBIO_WAITFORVSYNC (%s)\n", strerror(errno)); return Dmai_EFAIL; } dummy = 1; *hBufPtr = BufTab_getBuf(hBufTab, hDisplay->workingIdx); return Dmai_EOK; }
/****************************************************************************** * cleanup ******************************************************************************/ static Int cleanup(Display_Handle hDisplay) { Int ret = Dmai_EOK; BufTab_Handle hBufTab = hDisplay->hBufTab; struct fb_var_screeninfo varInfo; struct fb_fix_screeninfo fixInfo; if (hDisplay->fd != -1) { if (ioctl(hDisplay->fd, FBIOGET_FSCREENINFO, &fixInfo) == -1) { Dmai_err1("Failed FBIOGET_FSCREENINFO (%s)\n", strerror(errno)); ret = Dmai_EFAIL; } if (ioctl(hDisplay->fd, FBIOGET_VSCREENINFO, &varInfo) == -1) { Dmai_err1("Failed ioctl FBIOGET_VSCREENINFO (%s)\n", strerror(errno)); ret = Dmai_EFAIL; } if (ioctl(hDisplay->fd, FBIOPUT_VSCREENINFO, &hDisplay->origVarInfo) == -1) { Dmai_err1("Failed FBIOGET_FSCREENINFO (%s)\n", strerror(errno)); ret = Dmai_EFAIL; } if (hBufTab) { munmap(Buffer_getUserPtr(BufTab_getBuf(hBufTab, 0)), fixInfo.line_length * varInfo.yres_virtual); free(hBufTab); } setDisplayBuffer(hDisplay, 0); close(hDisplay->fd); } free(hDisplay); return ret; }
/****************************************************************************** * Display_fbdev_create ******************************************************************************/ Display_Handle Display_fbdev_create(BufTab_Handle hBufTab, Display_Attrs *attrs) { BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT; struct fb_var_screeninfo varInfo; struct fb_fix_screeninfo fixInfo; Int displaySize; Int bufIdx; Int8 *virtPtr; Int32 physPtr; Int32 height, width; Display_Handle hDisplay; Buffer_Handle hBuf; if (attrs == NULL) { Dmai_err0("Must supply valid attrs\n"); return NULL; } if (hBufTab != NULL) { Dmai_err0("FBdev display does not accept user allocated buffers\n"); return NULL; } hDisplay = calloc(1, sizeof(Display_Object)); if (hDisplay == NULL) { Dmai_err0("Failed to allocate space for Display Object\n"); return NULL; } /* Open video display device */ hDisplay->fd = open(attrs->displayDevice, O_RDWR); if (hDisplay->fd == -1) { Dmai_err2("Failed to open fb device %s (%s)\n", attrs->displayDevice, strerror(errno)); cleanup(hDisplay); return NULL; } /* Get fixed screen info */ if (ioctl(hDisplay->fd, FBIOGET_FSCREENINFO, &fixInfo) == -1) { Dmai_err2("Failed FBIOGET_FSCREENINFO on %s (%s)\n", attrs->displayDevice, strerror(errno)); cleanup(hDisplay); return NULL; } /* Get virtual screen info */ if (ioctl(hDisplay->fd, FBIOGET_VSCREENINFO, &varInfo) == -1) { Dmai_err2("Failed FBIOGET_VSCREENINFO on %s (%s)\n", attrs->displayDevice, strerror(errno)); cleanup(hDisplay); return NULL; } Dmai_dbg5("Found width=%d height=%d, yres_virtual=%d,xres_virtual=%d," " line_length=%d\n", varInfo.xres, varInfo.yres, varInfo.yres_virtual,varInfo.xres_virtual, fixInfo.line_length); /* Save current virtual screen info. */ memcpy(&hDisplay->origVarInfo, &varInfo, sizeof(struct fb_var_screeninfo)); /* If video standard is set to auto then use current height and width */ if (attrs->videoStd == VideoStd_AUTO) { width = varInfo.xres; height = varInfo.yres; } /* If video standard is not set then use the height/width passed from * attribute. */ else if (attrs->videoStd == -1) { width = attrs->width; height = attrs->height; } /* calulcate height/width from video standard */ else { VideoStd_getResolution(attrs->videoStd, &width, &height); } if (attrs->width > 0) { width = attrs->width; } if (attrs->height > 0) { height = attrs->height; } if (width > varInfo.xres) { width = varInfo.xres; } if (height > varInfo.yres) { height = varInfo.yres; } varInfo.xoffset = 0; varInfo.yoffset = 0; varInfo.xres = width; varInfo.yres = height; varInfo.xres_virtual = width; varInfo.yres_virtual = varInfo.yres * attrs->numBufs; /* Set video display format */ Dmai_dbg4("Setting width=%d height=%d, yres_virtual=%d," " xres_virtual=%d\n", varInfo.xres, varInfo.yres, varInfo.yres_virtual, varInfo.xres_virtual); if (ioctl(hDisplay->fd, FBIOPUT_VSCREENINFO, &varInfo) == -1) { Dmai_err2("Failed FBIOPUT_VSCREENINFO on %s (%s)\n", attrs->displayDevice, strerror(errno)); cleanup(hDisplay); return NULL; } if (ioctl(hDisplay->fd, FBIOGET_FSCREENINFO, &fixInfo) == -1) { Dmai_err2("Failed FBIOGET_FSCREENINFO on %s (%s)\n", attrs->displayDevice, strerror(errno)); cleanup(hDisplay); return NULL; } Dmai_dbg5("New width=%d, height=%d, yres_virtual=%d,xres_virtual=%d, " "line_length=%d\n", varInfo.xres, varInfo.yres, varInfo.yres_virtual, varInfo.xres_virtual, fixInfo.line_length); gfxAttrs.dim.width = varInfo.xres; gfxAttrs.colorSpace = attrs->colorSpace; gfxAttrs.dim.height = varInfo.yres; gfxAttrs.dim.lineLength = fixInfo.line_length; gfxAttrs.bAttrs.reference = TRUE; displaySize = fixInfo.line_length * varInfo.yres; hBufTab = BufTab_create(attrs->numBufs, displaySize, BufferGfx_getBufferAttrs(&gfxAttrs)); if (hBufTab == NULL) { Dmai_err0("Failed to allocate BufTab for display buffers\n"); cleanup(hDisplay); return NULL; } hBuf = BufTab_getBuf(hBufTab, 0); Buffer_setNumBytesUsed(hBuf, varInfo.xres * varInfo.yres * varInfo.bits_per_pixel / 8); virtPtr = (Int8 *) mmap (NULL, displaySize * attrs->numBufs, PROT_READ | PROT_WRITE, MAP_SHARED, hDisplay->fd, 0); if (virtPtr == MAP_FAILED) { Dmai_err2("Failed mmap on %s (%s)\n", attrs->displayDevice, strerror(errno)); cleanup(hDisplay); return NULL; } if (Buffer_setUserPtr(hBuf, virtPtr) < 0) { cleanup(hDisplay); return NULL; } _Dmai_blackFill(hBuf); Dmai_dbg3("Display buffer %d mapped to %#lx has physical address %#lx\n", 0, (Int32) virtPtr, physPtr); for (bufIdx=1; bufIdx < attrs->numBufs; bufIdx++) { hBuf = BufTab_getBuf(hBufTab, bufIdx); Buffer_setNumBytesUsed(hBuf, varInfo.xres * varInfo.yres * varInfo.bits_per_pixel / 8); virtPtr = virtPtr + displaySize; Buffer_setUserPtr(hBuf, virtPtr); _Dmai_blackFill(hBuf); Dmai_dbg3("Display buffer %d mapped to %#lx, physical address %#lx\n", bufIdx, (unsigned long) virtPtr, physPtr); } hDisplay->hBufTab = hBufTab; hDisplay->displayIdx = 0; hDisplay->workingIdx = attrs->numBufs > 1 ? 1 : 0; hDisplay->displayStd = Display_Std_FBDEV; if (setDisplayBuffer(hDisplay, hDisplay->displayIdx) < 0) { cleanup(hDisplay); return NULL; } return hDisplay; }
/****************************************************************************** * _Dmai_v4l2DriverAlloc ******************************************************************************/ Int _Dmai_v4l2DriverAlloc(Int fd, Int numBufs, enum v4l2_buf_type type, struct _VideoBufDesc **bufDescsPtr, BufTab_Handle *hBufTabPtr, Int topOffset, ColorSpace_Type colorSpace) { BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT; struct v4l2_requestbuffers req; struct v4l2_format fmt; _VideoBufDesc *bufDesc; Buffer_Handle hBuf; Int bufIdx; Int8 *virtPtr; Dmai_clear(fmt); fmt.type = type; if (ioctl(fd, VIDIOC_G_FMT, &fmt) == -1) { Dmai_err1("VIDIOC_G_FMT failed (%s)\n", strerror(errno)); return Dmai_EFAIL; } Dmai_clear(req); req.count = numBufs; req.type = type; req.memory = V4L2_MEMORY_MMAP; /* Allocate buffers in the capture device driver */ if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1) { Dmai_err1("VIDIOC_REQBUFS failed (%s)\n", strerror(errno)); return Dmai_ENOMEM; } if (req.count < numBufs || !req.count) { Dmai_err0("Insufficient device driver buffer memory\n"); return Dmai_ENOMEM; } /* Allocate space for buffer descriptors */ *bufDescsPtr = calloc(numBufs, sizeof(_VideoBufDesc)); if (*bufDescsPtr == NULL) { Dmai_err0("Failed to allocate space for buffer descriptors\n"); return Dmai_ENOMEM; } gfxAttrs.dim.width = fmt.fmt.pix.width; gfxAttrs.dim.height = fmt.fmt.pix.height; gfxAttrs.dim.lineLength = fmt.fmt.pix.bytesperline; gfxAttrs.colorSpace = colorSpace; gfxAttrs.bAttrs.reference = TRUE; *hBufTabPtr = BufTab_create(numBufs, fmt.fmt.pix.sizeimage, BufferGfx_getBufferAttrs(&gfxAttrs)); if (*hBufTabPtr == NULL) { return Dmai_ENOMEM; } for (bufIdx = 0; bufIdx < numBufs; bufIdx++) { bufDesc = &(*bufDescsPtr)[bufIdx]; /* Ask for information about the driver buffer */ Dmai_clear(bufDesc->v4l2buf); bufDesc->v4l2buf.type = type; bufDesc->v4l2buf.memory = V4L2_MEMORY_MMAP; bufDesc->v4l2buf.index = bufIdx; if (ioctl(fd, VIDIOC_QUERYBUF, &bufDesc->v4l2buf) == -1) { Dmai_err1("Failed VIDIOC_QUERYBUF (%s)\n", strerror(errno)); return Dmai_EFAIL; } /* Map the driver buffer to user space */ virtPtr = mmap(NULL, bufDesc->v4l2buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, bufDesc->v4l2buf.m.offset) + topOffset; if (virtPtr == MAP_FAILED) { Dmai_err1("Failed to mmap buffer (%s)\n", strerror(errno)); return Dmai_EFAIL; } /* Initialize the Buffer with driver buffer information */ hBuf = BufTab_getBuf(*hBufTabPtr, bufIdx); Buffer_setNumBytesUsed(hBuf, fmt.fmt.pix.bytesperline * fmt.fmt.pix.height); Buffer_setUseMask(hBuf, gfxAttrs.bAttrs.useMask); Buffer_setUserPtr(hBuf, virtPtr); /* Initialize buffer to black */ _Dmai_blackFill(hBuf); Dmai_dbg3("Driver buffer %d mapped to %#x has physical address " "%#lx\n", bufIdx, (Int) virtPtr, Buffer_getPhysicalPtr(hBuf)); bufDesc->hBuf = hBuf; /* Queue buffer in device driver */ if (ioctl(fd, VIDIOC_QBUF, &bufDesc->v4l2buf) == -1) { Dmai_err1("VIODIOC_QBUF failed (%s)\n", strerror(errno)); return Dmai_EFAIL; } } return Dmai_EOK; }
/****************************************************************************** * _Dmai_v4l2UserAlloc ******************************************************************************/ Int _Dmai_v4l2UserAlloc(Int fd, Int numBufs, enum v4l2_buf_type type, struct _VideoBufDesc **bufDescsPtr, BufTab_Handle hBufTab, Int topOffset, ColorSpace_Type colorSpace, Int queueBuffers) { struct v4l2_requestbuffers req; struct v4l2_format fmt; _VideoBufDesc *bufDesc; Buffer_Handle hBuf; Int bufIdx; Dmai_clear(fmt); fmt.type = type; if (ioctl(fd, VIDIOC_G_FMT, &fmt) == -1) { Dmai_err1("VIDIOC_G_FMT failed (%s)\n", strerror(errno)); return Dmai_EFAIL; } if (Buffer_getSize(BufTab_getBuf(hBufTab, 0)) < fmt.fmt.pix.width * fmt.fmt.pix.height * 2) { Dmai_err0("Supplied buffers not large enough for video standard\n"); return Dmai_EINVAL; } /* Set the actual size of the buffers allocated */ fmt.type = type; fmt.fmt.pix.sizeimage = Buffer_getSize(BufTab_getBuf(hBufTab, 0)); fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; fmt.fmt.pix.bytesperline = fmt.fmt.pix.width; if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1) { Dmai_err1("VIDIOC_S_FMT failed (%s)\n", strerror(errno)); return Dmai_EIO; } /* * Tell the driver that we will use user allocated buffers, but don't * allocate any buffers in the driver (just the internal descriptors). */ Dmai_clear(req); req.type = type; req.count = numBufs; req.memory = V4L2_MEMORY_USERPTR; if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1) { Dmai_err0("Could not allocate video display buffers\n"); return Dmai_ENOMEM; } /* The driver may return less buffers than requested */ if (req.count < numBufs || !req.count) { Dmai_err0("Insufficient device driver buffer memory\n"); return Dmai_ENOMEM; } /* Allocate space for buffer descriptors */ *bufDescsPtr = calloc(numBufs, sizeof(_VideoBufDesc)); if (*bufDescsPtr == NULL) { Dmai_err0("Failed to allocate space for buffer descriptors\n"); return Dmai_ENOMEM; } for (bufIdx = 0; bufIdx < numBufs; bufIdx++) { bufDesc = &(*bufDescsPtr)[bufIdx]; hBuf = BufTab_getBuf(hBufTab, bufIdx); if (hBuf == NULL) { Dmai_err0("Failed to get buffer from BufTab for display\n"); return Dmai_ENOMEM; } if (Buffer_getType(hBuf) != Buffer_Type_GRAPHICS) { Dmai_err0("Buffer supplied to Display not a Graphics buffer\n"); return Dmai_EINVAL; } Dmai_clear(bufDesc->v4l2buf); bufDesc->v4l2buf.index = bufIdx; bufDesc->v4l2buf.type = type; bufDesc->v4l2buf.memory = V4L2_MEMORY_USERPTR; bufDesc->v4l2buf.m.userptr = (UInt32) Buffer_getUserPtr(hBuf); bufDesc->v4l2buf.length = (fmt.fmt.pix.sizeimage + 4096) & (~0xFFF); bufDesc->hBuf = hBuf; bufDesc->used = (queueBuffers == FALSE) ? TRUE : FALSE; /* If queueBuffers is TRUE, initialize the buffers to black and queue * them into the driver. */ if (queueBuffers == TRUE) { Buffer_setNumBytesUsed(hBuf, Buffer_getSize(hBuf)); _Dmai_blackFill(hBuf); /* Queue buffer in device driver */ if (ioctl(fd, VIDIOC_QBUF, &bufDesc->v4l2buf) == -1) { Dmai_err1("VIODIC_QBUF failed (%s)\n", strerror(errno)); return Dmai_EFAIL; } } } return Dmai_EOK; }
/****************************************************************************** * appMain ******************************************************************************/ Int appMain(Args * args) { Framecopy_Attrs fcAttrs = Framecopy_Attrs_DEFAULT; BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT; Smooth_Attrs smAttrs = Smooth_Attrs_DEFAULT; Time_Attrs tAttrs = Time_Attrs_DEFAULT; BufTab_Handle hCapBufTab = NULL; BufTab_Handle hDisBufTab = NULL; Display_Handle hDisplay = NULL; Capture_Handle hCapture = NULL; Framecopy_Handle hFc = NULL; Smooth_Handle hSmooth = NULL; Time_Handle hTime = NULL; Int numFrame = 0; Display_Attrs dAttrs; Capture_Attrs cAttrs; Buffer_Handle cBuf, dBuf; Cpu_Device device; Int bufIdx; UInt32 time; BufferGfx_Dimensions dim; Int32 bufSize; Int ret = Dmai_EOK; /* Initialize DMAI */ Dmai_init(); if (args->benchmark) { hTime = Time_create(&tAttrs); if (hTime == NULL) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to create Time object\n"); goto cleanup; } } /* Determine which device the application is running on */ if (Cpu_getDevice(NULL, &device) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to determine target board\n"); goto cleanup; } /* Set the display and capture attributes depending on device */ switch (device) { case Cpu_Device_DM6467: dAttrs = Display_Attrs_DM6467_VID_DEFAULT; cAttrs = Capture_Attrs_DM6467_DEFAULT; break; case Cpu_Device_DM365: case Cpu_Device_DM368: dAttrs = Display_Attrs_DM365_VID_DEFAULT; cAttrs = Capture_Attrs_DM365_DEFAULT; dAttrs.colorSpace = ColorSpace_YUV420PSEMI; cAttrs.colorSpace = dAttrs.colorSpace; break; case Cpu_Device_OMAPL138: dAttrs = Display_Attrs_OMAPL138_VID_DEFAULT; cAttrs = Capture_Attrs_OMAPL138_DEFAULT; break; case Cpu_Device_OMAP3530: case Cpu_Device_DM3730: dAttrs = Display_Attrs_O3530_VID_DEFAULT; cAttrs = Capture_Attrs_OMAP3530_DEFAULT; dAttrs.colorSpace = cAttrs.colorSpace = ColorSpace_UYVY; dAttrs.rotation = 270; break; default: dAttrs = Display_Attrs_DM6446_DM355_VID_DEFAULT; cAttrs = Capture_Attrs_DM6446_DM355_DEFAULT; break; } if (args->displayStd != -1) { dAttrs.displayStd = args->displayStd; } if (args->displayDevice) { dAttrs.displayDevice = args->displayDevice; } if (args->displayNumBufs != -1) { dAttrs.numBufs = args->displayNumBufs; } /* Enable cropping in capture driver if selected */ if (args->width != -1 && args->height != -1 && args->crop) { cAttrs.cropX = args->xIn; cAttrs.cropY = args->yIn; cAttrs.cropWidth = args->width; cAttrs.cropHeight = args->height; } cAttrs.videoInput = args->videoInput; if (Capture_detectVideoStd(NULL, &cAttrs.videoStd, &cAttrs) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to detect capture video standard\n"); goto cleanup; } /* The color space of the capture buffers depend on the device */ gfxAttrs.colorSpace = cAttrs.colorSpace; if (VideoStd_getResolution(cAttrs.videoStd, &gfxAttrs.dim.width, &gfxAttrs.dim.height) < 0) { goto cleanup; } gfxAttrs.dim.lineLength = Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width, gfxAttrs.colorSpace), 32); gfxAttrs.dim.x = 0; gfxAttrs.dim.y = 0; if (gfxAttrs.colorSpace == ColorSpace_YUV422PSEMI) { bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2; } else if (gfxAttrs.colorSpace == ColorSpace_YUV420PSEMI) { bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2; } else { bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height; } if (args->captureUalloc) { /* Create a table of video buffers to use with the capture device */ hCapBufTab = BufTab_create(cAttrs.numBufs, bufSize, BufferGfx_getBufferAttrs(&gfxAttrs)); if (hCapBufTab == NULL) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to allocate contiguous buffers\n"); goto cleanup; } } /* Create the capture device driver instance */ hCapture = Capture_create(hCapBufTab, &cAttrs); if (hCapture == NULL) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to create capture device\n"); goto cleanup; } /* Create the display device driver instance */ dAttrs.videoStd = Capture_getVideoStd(hCapture); dAttrs.videoOutput = args->videoOutput; if (args->displayUalloc) { /* Create a table of video buffers to use with the display device */ hDisBufTab = BufTab_create(dAttrs.numBufs, bufSize, BufferGfx_getBufferAttrs(&gfxAttrs)); if (hDisBufTab == NULL) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to allocate contiguous buffers\n"); goto cleanup; } } hDisplay = Display_create(hDisBufTab, &dAttrs); if (hDisplay == NULL) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to create display device\n"); goto cleanup; } if (args->smooth) { /* Create the smooth job */ hSmooth = Smooth_create(&smAttrs); if (hSmooth == NULL) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to create smooth job\n"); } } else { /* Create the frame copy job */ fcAttrs.accel = args->accel; hFc = Framecopy_create(&fcAttrs); if (hFc == NULL) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to create frame copy job\n"); goto cleanup; } } /* * If cropping is not used, alter the dimensions of the captured * buffers and position the smaller image inside the full screen. */ if (args->width != -1 && args->height != -1 && !args->crop) { for (bufIdx = 0; bufIdx < BufTab_getNumBufs(Capture_getBufTab(hCapture)); bufIdx++) { cBuf = BufTab_getBuf(Capture_getBufTab(hCapture), bufIdx); BufferGfx_getDimensions(cBuf, &dim); dim.width = args->width; dim.height = args->height; dim.x = args->xIn; dim.y = args->yIn; if (BufferGfx_setDimensions(cBuf, &dim) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Input resolution does not fit in capture frame\n"); goto cleanup; } } } /* * Alter the dimensions of the display buffers and position * the smaller image inside the full screen. */ if (args->width != -1 && args->height != -1) { for (bufIdx = 0; bufIdx < BufTab_getNumBufs(Display_getBufTab(hDisplay)); bufIdx++) { dBuf = BufTab_getBuf(Display_getBufTab(hDisplay), bufIdx); BufferGfx_getDimensions(dBuf, &dim); dim.width = args->width; dim.height = args->height; dim.x = args->xOut; dim.y = args->yOut; if (BufferGfx_setDimensions(dBuf, &dim) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Output resolution does not fit in display frame\n"); goto cleanup; } } } if (args->smooth) { if (Smooth_config(hSmooth, BufTab_getBuf(Capture_getBufTab(hCapture), 0), BufTab_getBuf(Display_getBufTab(hDisplay), 0)) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to configure smooth job\n"); goto cleanup; } } else { /* Configure the frame copy job */ if (Framecopy_config(hFc, BufTab_getBuf(Capture_getBufTab(hCapture), 0), BufTab_getBuf(Display_getBufTab(hDisplay), 0)) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to configure frame copy job\n"); goto cleanup; } } while (numFrame++ < args->numFrames || args->numFrames == 0) { if (args->benchmark) { if (Time_reset(hTime) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to reset timer\n"); goto cleanup; } } /* Get a captured frame from the capture device */ if (Capture_get(hCapture, &cBuf) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to get capture buffer\n"); goto cleanup; } /* Get a frame from the display device to be filled with data */ if (Display_get(hDisplay, &dBuf) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to get display buffer\n"); goto cleanup; } if (args->benchmark) { if (Time_delta(hTime, &time) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to get timer delta\n"); goto cleanup; } } if (args->smooth) { /* * Remove interlacing artifacts from the captured buffer and * store the result in the display buffer. */ if (Smooth_execute(hSmooth, cBuf, dBuf) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to execute smooth job\n"); goto cleanup; } } else { /* Copy the captured buffer to the display buffer */ if (Framecopy_execute(hFc, cBuf, dBuf) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to execute frame copy job\n"); goto cleanup; } } if (args->benchmark) { if (Time_delta(hTime, &time) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to get timer delta\n"); goto cleanup; } printf("Smooth / Framecopy: %uus ", (Uns) time); } /* Give captured buffer back to the capture device driver */ if (Capture_put(hCapture, cBuf) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to put capture buffer\n"); goto cleanup; } /* Send filled buffer to display device driver to be displayed */ if (Display_put(hDisplay, dBuf) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to put display buffer\n"); goto cleanup; } if (args->benchmark) { if (Time_total(hTime, &time) < 0) { ret = Dmai_EFAIL; fprintf(stderr,"Failed to get timer total\n"); goto cleanup; } printf("Frame time: %uus\n", (Uns) time); } } cleanup: /* Clean up the application */ if (hSmooth) { Smooth_delete(hSmooth); } if (hFc) { Framecopy_delete(hFc); } if (hCapture) { Capture_delete(hCapture); } if (hDisplay) { Display_delete(hDisplay); } if (hTime) { Time_delete(hTime); } if (hCapBufTab) { BufTab_delete(hCapBufTab); } if (hDisBufTab) { BufTab_delete(hDisBufTab); } if (ret == Dmai_EFAIL) return 1; else return 0; }
Void *writerThrFxn(Void *arg) { WriterEnv *envp = (WriterEnv *) arg; Void *status = THREAD_SUCCESS; FILE *outFile = NULL; Buffer_Attrs bAttrs = Buffer_Attrs_DEFAULT; BufTab_Handle hBufTab = NULL; Buffer_Handle hOutBuf; Int bufIdx; /* Initialization */ /* Open the output video file */ outFile = fopen(envp->videoFile, "w"); if (outFile == NULL) { ERR("Failed to open %s for writing\n", envp->videoFile); cleanup(THREAD_FAILURE); } /* Create buftab for video thread */ hBufTab = BufTab_create(NUM_WRITER_BUFS, envp->outBufSize, &bAttrs); if (hBufTab == NULL) { ERR("Failed to allocate contiguous buffers\n"); cleanup(THREAD_FAILURE); } /* Send all buffers to the video thread to be filled with encoded data */ for (bufIdx = 0; bufIdx < NUM_WRITER_BUFS; bufIdx++) { if (Fifo_put(envp->hWriterOutFifo, BufTab_getBuf(hBufTab, bufIdx)) < 0) { ERR("Failed to send buffer to display thread\n"); cleanup(THREAD_FAILURE); } } /* Signal that initialization is done and wait for other threads */ Rendezvous_meet(envp->hRendezvousInit); while(1) { /* Get an encoded buffer from the video thread */ if (Fifo_get(envp->hWriterInFifo, &hOutBuf) < 0) { ERR("Failed to get buffer from video thread\n"); cleanup(THREAD_FAILURE); } if (Buffer_getNumBytesUsed(hOutBuf)) { if (fwrite(Buffer_getUserPtr(hOutBuf), Buffer_getNumBytesUsed(hOutBuf), 1, outFile) != 1) { ERR("Error writing the encoded data to video file\n"); cleanup(THREAD_FAILURE); } } else { printf("Warning, writer received 0 byte encoded frame\n"); } /* Return buffer to capture thread */ if (Fifo_put(envp->hWriterOutFifo, hOutBuf) < 0) { ERR("Failed to send buffer to display thread\n"); cleanup(THREAD_FAILURE); } } cleanup: /* Make sure the other threads aren't waiting for us */ Rendezvous_force(envp->hRendezvousInit); Pause_off(envp->hPauseProcess); /* Meet up with other threads before cleaning up */ Rendezvous_meet(envp->hRendezvousCleanup); /* Clean up the thread before exiting */ if (outFile) { fclose(outFile); } if (hBufTab) { BufTab_delete(hBufTab); } return status; }
Void *videoThrFxn(Void *arg) { VideoEnv *envp = (VideoEnv *) arg; Venc1_Handle hVe1 = NULL; VIDENC1_Params params = Venc1_Params_DEFAULT; VIDENC1_DynamicParams dynParams = Venc1_DynamicParams_DEFAULT; IH264VENC_Params h264Params = IH264VENC_PARAMS; IH264VENC_DynamicParams h264DynParams = H264VENC_TI_IH264VENC_DYNAMICPARAMS; VUIParamBuffer VUI_Buffer = H264VENC_TI_VUIPARAMBUFFER; BufTab_Handle hVidBufTab = NULL; Buffer_Handle hVInBuf, hWOutBuf; BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT; ColorSpace_Type colorSpace = ColorSpace_YUV420PSEMI; Int bufSize = 0; Void *status = THREAD_SUCCESS; /* Initialization */ params.maxWidth = envp->imageWidth; params.maxHeight = envp->imageHeight; params.inputChromaFormat = XDM_YUV_420SP; params.reconChromaFormat = XDM_YUV_420SP; params.maxFrameRate = envp->videoFrameRate; params.encodingPreset = XDM_USER_DEFINED; params.rateControlPreset = IVIDEO_USER_DEFINED; params.maxBitRate = 10000000; dynParams.targetBitRate = envp->videoBitRate*0.9; dynParams.inputWidth = envp->imageWidth; dynParams.captureWidth = Dmai_roundUp(BufferGfx_calcLineLength(envp->imageWidth, colorSpace), 32); dynParams.inputHeight = envp->imageHeight; dynParams.refFrameRate = params.maxFrameRate; dynParams.targetFrameRate = params.maxFrameRate; dynParams.intraFrameInterval = 0; dynParams.interFrameInterval = 0; h264Params.videncParams = params; h264Params.videncParams.size = sizeof(IH264VENC_Params); h264Params.encQuality = 1; h264Params.enableDDRbuff = 1; /* Uses DDR instead of VICP buffers */ h264Params.enableARM926Tcm = 0; h264Params.enableVUIparams = (0x1 << 1); h264Params.videncParams.inputContentType = IVIDEO_PROGRESSIVE; h264DynParams.videncDynamicParams = dynParams; h264DynParams.videncDynamicParams.size = sizeof(IH264VENC_DynamicParams); h264DynParams.VUI_Buffer = &VUI_Buffer; h264DynParams.VUI_Buffer->aspectRatioInfoPresentFlag = 1; h264DynParams.VUI_Buffer->overscanInfoPresentFlag = 0; h264DynParams.VUI_Buffer->videoSignalTypePresentFlag = 0; h264DynParams.VUI_Buffer->timingInfoPresentFlag = 1; h264DynParams.VUI_Buffer->numUnitsInTicks = 1; h264DynParams.VUI_Buffer->timeScale = params.maxFrameRate / 1000; h264DynParams.VUI_Buffer->fixedFrameRateFlag = 1; h264DynParams.VUI_Buffer->nalHrdParameterspresentFlag = 1; h264DynParams.VUI_Buffer->picStructPresentFlag = 1; h264DynParams.idrFrameInterval = 15; hVe1 = Venc1_create(envp->hEngine, envp->videoEncoder, (IVIDENC1_Params *) &h264Params, (IVIDENC1_DynamicParams *) &h264DynParams); if (hVe1 == NULL) { ERR("Failed to create video encoder: %s\n", envp->videoEncoder); cleanup(THREAD_FAILURE); } /* Store the output buffer size in the environment */ envp->outBufSize = Venc1_getOutBufSize(hVe1); /* Signal that the codec is created and output buffer size available */ Rendezvous_meet(envp->hRendezvousWriter); /* Video BufTab create */ BufferGfx_calcDimensions(VideoStd_D1_PAL, colorSpace, &gfxAttrs.dim); gfxAttrs.dim.width = 704; gfxAttrs.dim.height = 576; gfxAttrs.dim.lineLength = Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width, colorSpace), 32); gfxAttrs.colorSpace = colorSpace; bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2; hVidBufTab = BufTab_create(NUM_VIDEO_BUFS, bufSize, BufferGfx_getBufferAttrs(&gfxAttrs)); if (hVidBufTab == NULL) { ERR("Failed to create video buftab\n"); cleanup(THREAD_FAILURE); } /* Set input buffer table */ Venc1_setBufTab(hVe1, hVidBufTab); /* Send video buffers to DEI */ Int nBufId = 0; for (nBufId = 0; nBufId < NUM_VIDEO_BUFS; nBufId++) { hVInBuf = BufTab_getBuf(hVidBufTab, nBufId); if (Fifo_put(envp->hVideoOutFifo, hVInBuf) < 0) { ERR("Failed to send buffer to dei thread\n"); cleanup(THREAD_FAILURE); } } /* Signal that initialization is done and wait for other threads */ Rendezvous_meet(envp->hRendezvousInit); while(1) { /* Get buffer from DEI thread */ if(Fifo_get(envp->hVideoInFifo, &hVInBuf) < 0) { ERR("Failed to get buffer from dei thread\n"); cleanup(THREAD_FAILURE); } /* Get buffer from Writer thread */ if(Fifo_get(envp->hWriterOutFifo, &hWOutBuf) < 0) { ERR("Failed to get buffer from writer thread\n"); cleanup(THREAD_FAILURE); } /* Make sure the whole buffer is used for input */ BufferGfx_resetDimensions(hVInBuf); /* Encode */ if (Venc1_process(hVe1, hVInBuf, hWOutBuf) < 0) { ERR("Failed to encode video buffer\n"); cleanup(THREAD_FAILURE); } /* Put buffer to dei thread */ if (Fifo_put(envp->hVideoOutFifo, hVInBuf) < 0) { ERR("Failed to send buffer to dei thread\n"); cleanup(THREAD_FAILURE); } /* Put buffer to writer thread */ if (Fifo_put(envp->hWriterInFifo, hWOutBuf) < 0) { ERR("Failed to send buffer to dei thread\n"); cleanup(THREAD_FAILURE); } } cleanup: /* Make sure the other threads aren't waiting for us */ Rendezvous_force(envp->hRendezvousInit); Rendezvous_force(envp->hRendezvousWriter); /* Make sure the other threads aren't waiting for init to complete */ Rendezvous_meet(envp->hRendezvousCleanup); if (hVidBufTab) { BufTab_delete(hVidBufTab); } if (hVe1) { Venc1_delete(hVe1); } return status; }
/****************************************************************************** * gst_tiaudenc1_encode_thread * Call the audio codec to process a full input buffer ******************************************************************************/ static void* gst_tiaudenc1_encode_thread(void *arg) { GstTIAudenc1 *audenc1 = GST_TIAUDENC1(gst_object_ref(arg)); void *threadRet = GstTIThreadSuccess; Buffer_Handle hDstBuf; Int32 encDataConsumed; GstBuffer *encDataWindow = NULL; GstClockTime encDataTime; Buffer_Handle hEncDataWindow; GstBuffer *outBuf; GstClockTime sampleDuration; guint sampleRate; guint numSamples; Int bufIdx; Int ret; GST_LOG("starting audenc encode thread\n"); /* Initialize codec engine */ ret = gst_tiaudenc1_codec_start(audenc1); /* Notify main thread that it is ok to continue initialization */ Rendezvous_meet(audenc1->waitOnEncodeThread); Rendezvous_reset(audenc1->waitOnEncodeThread); if (ret == FALSE) { GST_ELEMENT_ERROR(audenc1, RESOURCE, FAILED, ("Failed to start codec\n"), (NULL)); goto thread_exit; } while (TRUE) { /* Obtain an raw data frame */ encDataWindow = gst_ticircbuffer_get_data(audenc1->circBuf); encDataTime = GST_BUFFER_TIMESTAMP(encDataWindow); hEncDataWindow = GST_TIDMAIBUFFERTRANSPORT_DMAIBUF(encDataWindow); /* Check if there is enough encoded data to be sent to the codec. * The last frame of data may not be sufficient to meet the codec * requirements for the amount of input data. If so just throw * away the last bit of data rather than filling with bogus * data. */ if (GST_BUFFER_SIZE(encDataWindow) < Aenc1_getInBufSize(audenc1->hAe)) { GST_LOG("Not enough audio data remains\n"); if (!audenc1->drainingEOS) { goto thread_failure; } goto thread_exit; } /* Obtain a free output buffer for the encoded data */ if (!(hDstBuf = gst_tidmaibuftab_get_buf(audenc1->hOutBufTab))) { GST_ELEMENT_ERROR(audenc1, RESOURCE, READ, ("Failed to get a free contiguous buffer from BufTab\n"), (NULL)); goto thread_exit; } /* Invoke the audio encoder */ GST_LOG("Invoking the audio encoder at 0x%08lx with %u bytes\n", (unsigned long)Buffer_getUserPtr(hEncDataWindow), GST_BUFFER_SIZE(encDataWindow)); ret = Aenc1_process(audenc1->hAe, hEncDataWindow, hDstBuf); encDataConsumed = Buffer_getNumBytesUsed(hEncDataWindow); if (ret < 0) { GST_ELEMENT_ERROR(audenc1, STREAM, ENCODE, ("Failed to encode audio buffer\n"), (NULL)); goto thread_failure; } /* If no encoded data was used we cannot find the next frame */ if (ret == Dmai_EBITERROR && encDataConsumed == 0) { GST_ELEMENT_ERROR(audenc1, STREAM, ENCODE, ("Fatal bit error\n"), (NULL)); goto thread_failure; } if (ret > 0) { GST_LOG("Aenc1_process returned success code %d\n", ret); } sampleRate = audenc1->samplefreq; numSamples = encDataConsumed / (2 * audenc1->channels) ; sampleDuration = GST_FRAMES_TO_CLOCK_TIME(numSamples, sampleRate); /* Release the reference buffer, and tell the circular buffer how much * data was consumed. */ ret = gst_ticircbuffer_data_consumed(audenc1->circBuf, encDataWindow, encDataConsumed); encDataWindow = NULL; if (!ret) { goto thread_failure; } /* Set the source pad capabilities based on the encoded frame * properties. */ gst_tiaudenc1_set_source_caps(audenc1); /* Create a DMAI transport buffer object to carry a DMAI buffer to * the source pad. The transport buffer knows how to release the * buffer for re-use in this element when the source pad calls * gst_buffer_unref(). */ outBuf = gst_tidmaibuffertransport_new(hDstBuf, audenc1->hOutBufTab, NULL, NULL); gst_buffer_set_data(outBuf, GST_BUFFER_DATA(outBuf), Buffer_getNumBytesUsed(hDstBuf)); gst_buffer_set_caps(outBuf, GST_PAD_CAPS(audenc1->srcpad)); /* Set timestamp on output buffer */ if (audenc1->genTimeStamps) { GST_BUFFER_DURATION(outBuf) = sampleDuration; GST_BUFFER_TIMESTAMP(outBuf) = encDataTime; } else { GST_BUFFER_TIMESTAMP(outBuf) = GST_CLOCK_TIME_NONE; } /* Tell circular buffer how much time we consumed */ gst_ticircbuffer_time_consumed(audenc1->circBuf, sampleDuration); /* Push the transport buffer to the source pad */ GST_LOG("pushing buffer to source pad with timestamp : %" GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP(outBuf)), GST_TIME_ARGS (GST_BUFFER_DURATION(outBuf))); if (gst_pad_push(audenc1->srcpad, outBuf) != GST_FLOW_OK) { GST_DEBUG("push to source pad failed\n"); goto thread_failure; } /* Release buffers no longer in use by the codec */ Buffer_freeUseMask(hDstBuf, gst_tidmaibuffer_CODEC_FREE); } thread_failure: gst_tithread_set_status(audenc1, TIThread_CODEC_ABORTED); gst_ticircbuffer_consumer_aborted(audenc1->circBuf); threadRet = GstTIThreadFailure; thread_exit: /* Re-claim any buffers owned by the codec */ bufIdx = BufTab_getNumBufs(GST_TIDMAIBUFTAB_BUFTAB(audenc1->hOutBufTab)); while (bufIdx-- > 0) { Buffer_Handle hBuf = BufTab_getBuf( GST_TIDMAIBUFTAB_BUFTAB(audenc1->hOutBufTab), bufIdx); Buffer_freeUseMask(hBuf, gst_tidmaibuffer_CODEC_FREE); } /* Release the last buffer we retrieved from the circular buffer */ if (encDataWindow) { gst_ticircbuffer_data_consumed(audenc1->circBuf, encDataWindow, 0); } /* We have to wait to shut down this thread until we can guarantee that * no more input buffers will be queued into the circular buffer * (we're about to delete it). */ Rendezvous_meet(audenc1->waitOnEncodeThread); Rendezvous_reset(audenc1->waitOnEncodeThread); /* Notify main thread that we are done draining before we shutdown the * codec, or we will hang. We proceed in this order so the EOS event gets * propagated downstream before we attempt to shut down the codec. The * codec-shutdown process will block until all BufTab buffers have been * released, and downstream-elements may hang on to buffers until * they get the EOS. */ Rendezvous_force(audenc1->waitOnEncodeDrain); /* Initialize codec engine */ if (gst_tiaudenc1_codec_stop(audenc1) < 0) { GST_ERROR("failed to stop codec\n"); GST_ELEMENT_ERROR(audenc1, RESOURCE, FAILED, ("Failed to stop codec\n"), (NULL)); } gst_object_unref(audenc1); GST_LOG("exit audio encode_thread (%d)\n", (int)threadRet); return threadRet; }