/****************************************************************************** * Display_v4l2_get ******************************************************************************/ Int Display_v4l2_get(Display_Handle hDisplay, Buffer_Handle *hBufPtr) { struct v4l2_buffer v4l2buf; assert(hDisplay); assert(hBufPtr); Dmai_clear(v4l2buf); v4l2buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; v4l2buf.memory = hDisplay->userAlloc ? V4L2_MEMORY_USERPTR : V4L2_MEMORY_MMAP; Dmai_clear(v4l2buf); v4l2buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; v4l2buf.memory = hDisplay->userAlloc ? V4L2_MEMORY_USERPTR : V4L2_MEMORY_MMAP; /* Get a frame buffer with captured data */ if(ioctl(hDisplay->fd, VIDIOC_DQBUF, &v4l2buf) < 0) { Dmai_err1("VIDIOC_DQBUF failed (%s)\n", strerror(errno)); return Dmai_EFAIL; } *hBufPtr = hDisplay->bufDescs[v4l2buf.index].hBuf; hDisplay->bufDescs[v4l2buf.index].used = TRUE; return Dmai_EOK; }
/****************************************************************************** * Resize_execute ******************************************************************************/ Int Resize_execute(Resize_Handle hResize, Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf) { #ifdef CONFIG_DM365_IPIPE struct imp_convert rsz; BufferGfx_Dimensions srcDim; BufferGfx_Dimensions dstDim; UInt32 srcOffset; UInt32 dstOffset; assert(hResize); assert(hSrcBuf); assert(hDstBuf); Dmai_clear(rsz); BufferGfx_getDimensions(hSrcBuf, &srcDim); BufferGfx_getDimensions(hDstBuf, &dstDim); srcOffset = srcDim.y * srcDim.lineLength + (srcDim.x << 1); dstOffset = dstDim.y * dstDim.lineLength + (dstDim.x << 1); rsz.in_buff.index = -1; rsz.in_buff.buf_type = IMP_BUF_IN; rsz.in_buff.offset = (UInt32) Buffer_getUserPtr(hSrcBuf) + srcOffset; rsz.in_buff.size = Buffer_getSize(hSrcBuf); rsz.out_buff1.index = -1; rsz.out_buff1.buf_type = IMP_BUF_OUT1; rsz.out_buff1.offset = (UInt32) Buffer_getUserPtr(hDstBuf) + dstOffset; rsz.out_buff1.size = Buffer_getSize(hDstBuf); /* * The IPIPE requires that the memory offsets of the input and output * buffers start on 32-byte boundaries. */ assert((rsz.in_buff.offset & 0x1F) == 0); assert((rsz.out_buff1.offset & 0x1F) == 0); /* Start IPIPE operation */ if (ioctl(hResize->fd, RSZ_RESIZE, &rsz) == -1) { Dmai_err0("Failed RSZ_RESIZE\n"); return Dmai_EFAIL; } Buffer_setNumBytesUsed(hDstBuf, Buffer_getNumBytesUsed(hSrcBuf)); return Dmai_EOK; #else Dmai_err0("not implemented\n"); return Dmai_ENOTIMPL; #endif }
/****************************************************************************** * captureThrFxn ******************************************************************************/ Void *captureThrFxn(Void *arg) { CaptureEnv *envp = (CaptureEnv *) arg; Void *status = THREAD_SUCCESS; Capture_Attrs cAttrs = Capture_Attrs_DM365_DEFAULT; BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT; Capture_Handle hCapture = NULL; BufTab_Handle hBufTab = NULL; BufferGfx_Dimensions dim; Buffer_Handle hDstBuf, hCapBuf; Int32 width, height, bufSize; Int fifoRet; ColorSpace_Type colorSpace = ColorSpace_YUV420PSEMI; /* Create capture device driver instance */ cAttrs.numBufs = NUM_CAPTURE_BUFS; cAttrs.videoInput = envp->videoInput; cAttrs.videoStd = envp->videoStd; cAttrs.colorSpace = colorSpace; if (VideoStd_getResolution(envp->videoStd, &width, &height) < 0) { ERR("Failed to calculate resolution of video standard\n"); cleanup(THREAD_FAILURE); } if (envp->imageWidth > 0 && envp->imageHeight > 0) { if (width < envp->imageWidth && height < envp->imageHeight) { ERR("User resolution (%ldx%ld) larger than detected (%ldx%ld)\n", envp->imageWidth, envp->imageHeight, width, height); cleanup(THREAD_FAILURE); } /* * Capture driver provides 32-byte aligned data. We 32-byte align the * capture and video buffers to perform zero copy encoding. */ envp->imageWidth = Dmai_roundUp(envp->imageWidth, 32); } else { /* Resolution was not set on command line. Set to defaults. */ envp->imageHeight = height; /* * Capture driver provides 32-byte aligned data. We 32-byte align the * capture and video buffers to perform zero copy encoding. */ envp->imageWidth = Dmai_roundUp(width, 32); } Dmai_clear(dim); dim.width = envp->imageWidth; dim.height = envp->imageHeight; dim.lineLength = Dmai_roundUp(dim.width, 32); if (colorSpace == ColorSpace_YUV420PSEMI) { bufSize = dim.lineLength * dim.height * 3 / 2; } else { bufSize = dim.lineLength * dim.height * 2; } gfxAttrs.dim = dim; /* Report the video standard and image size back to the main thread */ Rendezvous_meet(envp->hRendezvousCapStd); gfxAttrs.colorSpace = colorSpace; hBufTab = BufTab_create(NUM_CAPTURE_BUFS, bufSize, BufferGfx_getBufferAttrs(&gfxAttrs)); if (hBufTab == NULL) { ERR("Failed to create buftab\n"); cleanup(THREAD_FAILURE); } if ((envp->videoStd == VideoStd_720P_60) && (!envp->passThrough)) { cAttrs.videoStd = VideoStd_720P_30; } else { cAttrs.videoStd = envp->videoStd; } cAttrs.colorSpace = colorSpace; cAttrs.captureDimension = &dim; cAttrs.numBufs = NUM_CAPTURE_BUFS; hCapture = Capture_create(hBufTab, &cAttrs); if (hCapture == NULL) { ERR("Failed to create capture device\n"); cleanup(THREAD_FAILURE); } /* Get a buffer from the video thread */ fifoRet = Fifo_get(envp->hInFifo, &hDstBuf); if (fifoRet < 0) { ERR("Failed to get buffer from video thread\n"); cleanup(THREAD_FAILURE); } if (fifoRet == Dmai_EFLUSH) { cleanup(THREAD_SUCCESS); } /* Signal that initialization is done and wait for other threads */ Rendezvous_meet(envp->hRendezvousInit); while (!gblGetQuit()) { /* Get a buffer from the capture driver to encode */ if (Capture_get(hCapture, &hCapBuf) < 0) { ERR("Failed to get capture buffer\n"); cleanup(THREAD_FAILURE); } /* Send captured buffer to video thread for encoding */ if (Fifo_put(envp->hOutFifo, hCapBuf) < 0) { ERR("Failed to send buffer to video thread\n"); cleanup(THREAD_FAILURE); } /* Pause processing? */ Pause_test(envp->hPauseProcess); /* Get a buffer from the video thread */ fifoRet = Fifo_get(envp->hInFifo, &hDstBuf); if (fifoRet < 0) { ERR("Failed to get buffer from video thread\n"); cleanup(THREAD_FAILURE); } /* Did the video thread flush the fifo? */ if (fifoRet == Dmai_EFLUSH) { cleanup(THREAD_SUCCESS); } /* Return a buffer to the capture driver */ if (Capture_put(hCapture, hDstBuf) < 0) { ERR("Failed to put capture buffer\n"); cleanup(THREAD_FAILURE); } } cleanup: /* Make sure the other threads aren't waiting for us */ Rendezvous_force(envp->hRendezvousCapStd); Rendezvous_force(envp->hRendezvousInit); Pause_off(envp->hPauseProcess); Fifo_flush(envp->hOutFifo); /* Meet up with other threads before cleaning up */ Rendezvous_meet(envp->hRendezvousCleanup); if (hCapture) { Capture_delete(hCapture); } /* Clean up the thread before exiting */ if (hBufTab) { BufTab_delete(hBufTab); } return status; }
/****************************************************************************** * _Dmai_v4l2DriverAlloc ******************************************************************************/ Int _Dmai_v4l2DriverAlloc(Int fd, Int numBufs, enum v4l2_buf_type type, struct _VideoBufDesc **bufDescsPtr, BufTab_Handle *hBufTabPtr, Int topOffset, ColorSpace_Type colorSpace) { BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT; struct v4l2_requestbuffers req; struct v4l2_format fmt; _VideoBufDesc *bufDesc; Buffer_Handle hBuf; Int bufIdx; Int8 *virtPtr; Dmai_clear(fmt); fmt.type = type; if (ioctl(fd, VIDIOC_G_FMT, &fmt) == -1) { Dmai_err1("VIDIOC_G_FMT failed (%s)\n", strerror(errno)); return Dmai_EFAIL; } Dmai_clear(req); req.count = numBufs; req.type = type; req.memory = V4L2_MEMORY_MMAP; /* Allocate buffers in the capture device driver */ if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1) { Dmai_err1("VIDIOC_REQBUFS failed (%s)\n", strerror(errno)); return Dmai_ENOMEM; } if (req.count < numBufs || !req.count) { Dmai_err0("Insufficient device driver buffer memory\n"); return Dmai_ENOMEM; } /* Allocate space for buffer descriptors */ *bufDescsPtr = calloc(numBufs, sizeof(_VideoBufDesc)); if (*bufDescsPtr == NULL) { Dmai_err0("Failed to allocate space for buffer descriptors\n"); return Dmai_ENOMEM; } gfxAttrs.dim.width = fmt.fmt.pix.width; gfxAttrs.dim.height = fmt.fmt.pix.height; gfxAttrs.dim.lineLength = fmt.fmt.pix.bytesperline; gfxAttrs.colorSpace = colorSpace; gfxAttrs.bAttrs.reference = TRUE; *hBufTabPtr = BufTab_create(numBufs, fmt.fmt.pix.sizeimage, BufferGfx_getBufferAttrs(&gfxAttrs)); if (*hBufTabPtr == NULL) { return Dmai_ENOMEM; } for (bufIdx = 0; bufIdx < numBufs; bufIdx++) { bufDesc = &(*bufDescsPtr)[bufIdx]; /* Ask for information about the driver buffer */ Dmai_clear(bufDesc->v4l2buf); bufDesc->v4l2buf.type = type; bufDesc->v4l2buf.memory = V4L2_MEMORY_MMAP; bufDesc->v4l2buf.index = bufIdx; if (ioctl(fd, VIDIOC_QUERYBUF, &bufDesc->v4l2buf) == -1) { Dmai_err1("Failed VIDIOC_QUERYBUF (%s)\n", strerror(errno)); return Dmai_EFAIL; } /* Map the driver buffer to user space */ virtPtr = mmap(NULL, bufDesc->v4l2buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, bufDesc->v4l2buf.m.offset) + topOffset; if (virtPtr == MAP_FAILED) { Dmai_err1("Failed to mmap buffer (%s)\n", strerror(errno)); return Dmai_EFAIL; } /* Initialize the Buffer with driver buffer information */ hBuf = BufTab_getBuf(*hBufTabPtr, bufIdx); Buffer_setNumBytesUsed(hBuf, fmt.fmt.pix.bytesperline * fmt.fmt.pix.height); Buffer_setUseMask(hBuf, gfxAttrs.bAttrs.useMask); Buffer_setUserPtr(hBuf, virtPtr); /* Initialize buffer to black */ _Dmai_blackFill(hBuf); Dmai_dbg3("Driver buffer %d mapped to %#x has physical address " "%#lx\n", bufIdx, (Int) virtPtr, Buffer_getPhysicalPtr(hBuf)); bufDesc->hBuf = hBuf; /* Queue buffer in device driver */ if (ioctl(fd, VIDIOC_QBUF, &bufDesc->v4l2buf) == -1) { Dmai_err1("VIODIOC_QBUF failed (%s)\n", strerror(errno)); return Dmai_EFAIL; } } return Dmai_EOK; }
/****************************************************************************** * _Dmai_v4l2UserAlloc ******************************************************************************/ Int _Dmai_v4l2UserAlloc(Int fd, Int numBufs, enum v4l2_buf_type type, struct _VideoBufDesc **bufDescsPtr, BufTab_Handle hBufTab, Int topOffset, ColorSpace_Type colorSpace, Int queueBuffers) { struct v4l2_requestbuffers req; struct v4l2_format fmt; _VideoBufDesc *bufDesc; Buffer_Handle hBuf; Int bufIdx; Dmai_clear(fmt); fmt.type = type; if (ioctl(fd, VIDIOC_G_FMT, &fmt) == -1) { Dmai_err1("VIDIOC_G_FMT failed (%s)\n", strerror(errno)); return Dmai_EFAIL; } if (Buffer_getSize(BufTab_getBuf(hBufTab, 0)) < fmt.fmt.pix.width * fmt.fmt.pix.height * 2) { Dmai_err0("Supplied buffers not large enough for video standard\n"); return Dmai_EINVAL; } /* Set the actual size of the buffers allocated */ fmt.type = type; fmt.fmt.pix.sizeimage = Buffer_getSize(BufTab_getBuf(hBufTab, 0)); fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; fmt.fmt.pix.bytesperline = fmt.fmt.pix.width; if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1) { Dmai_err1("VIDIOC_S_FMT failed (%s)\n", strerror(errno)); return Dmai_EIO; } /* * Tell the driver that we will use user allocated buffers, but don't * allocate any buffers in the driver (just the internal descriptors). */ Dmai_clear(req); req.type = type; req.count = numBufs; req.memory = V4L2_MEMORY_USERPTR; if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1) { Dmai_err0("Could not allocate video display buffers\n"); return Dmai_ENOMEM; } /* The driver may return less buffers than requested */ if (req.count < numBufs || !req.count) { Dmai_err0("Insufficient device driver buffer memory\n"); return Dmai_ENOMEM; } /* Allocate space for buffer descriptors */ *bufDescsPtr = calloc(numBufs, sizeof(_VideoBufDesc)); if (*bufDescsPtr == NULL) { Dmai_err0("Failed to allocate space for buffer descriptors\n"); return Dmai_ENOMEM; } for (bufIdx = 0; bufIdx < numBufs; bufIdx++) { bufDesc = &(*bufDescsPtr)[bufIdx]; hBuf = BufTab_getBuf(hBufTab, bufIdx); if (hBuf == NULL) { Dmai_err0("Failed to get buffer from BufTab for display\n"); return Dmai_ENOMEM; } if (Buffer_getType(hBuf) != Buffer_Type_GRAPHICS) { Dmai_err0("Buffer supplied to Display not a Graphics buffer\n"); return Dmai_EINVAL; } Dmai_clear(bufDesc->v4l2buf); bufDesc->v4l2buf.index = bufIdx; bufDesc->v4l2buf.type = type; bufDesc->v4l2buf.memory = V4L2_MEMORY_USERPTR; bufDesc->v4l2buf.m.userptr = (UInt32) Buffer_getUserPtr(hBuf); bufDesc->v4l2buf.length = (fmt.fmt.pix.sizeimage + 4096) & (~0xFFF); bufDesc->hBuf = hBuf; bufDesc->used = (queueBuffers == FALSE) ? TRUE : FALSE; /* If queueBuffers is TRUE, initialize the buffers to black and queue * them into the driver. */ if (queueBuffers == TRUE) { Buffer_setNumBytesUsed(hBuf, Buffer_getSize(hBuf)); _Dmai_blackFill(hBuf); /* Queue buffer in device driver */ if (ioctl(fd, VIDIOC_QBUF, &bufDesc->v4l2buf) == -1) { Dmai_err1("VIODIC_QBUF failed (%s)\n", strerror(errno)); return Dmai_EFAIL; } } } return Dmai_EOK; }
/****************************************************************************** * main ******************************************************************************/ Int main(Int argc, Char *argv[]) { Args args = DEFAULT_ARGS; Uns initMask = 0; Int status = EXIT_SUCCESS; Pause_Attrs pAttrs = Pause_Attrs_DEFAULT; Rendezvous_Attrs rzvAttrs = Rendezvous_Attrs_DEFAULT; Fifo_Attrs fAttrs = Fifo_Attrs_DEFAULT; Rendezvous_Handle hRendezvousCapStd = NULL; Rendezvous_Handle hRendezvousInit = NULL; Rendezvous_Handle hRendezvousWriter = NULL; Rendezvous_Handle hRendezvousCleanup = NULL; Pause_Handle hPauseProcess = NULL; UI_Handle hUI = NULL; struct sched_param schedParam; pthread_t captureThread; pthread_t writerThread; pthread_t videoThread; pthread_t speechThread; CaptureEnv captureEnv; WriterEnv writerEnv; VideoEnv videoEnv; SpeechEnv speechEnv; CtrlEnv ctrlEnv; Int numThreads; pthread_attr_t attr; Void *ret; /* Zero out the thread environments */ Dmai_clear(captureEnv); Dmai_clear(writerEnv); Dmai_clear(videoEnv); Dmai_clear(speechEnv); Dmai_clear(ctrlEnv); /* Parse the arguments given to the app and set the app environment */ parseArgs(argc, argv, &args); printf("Encode demo started.\n"); /* Initialize the mutex which protects the global data */ pthread_mutex_init(&gbl.mutex, NULL); /* Set the priority of this whole process to max (requires root) */ setpriority(PRIO_PROCESS, 0, -20); /* Initialize Codec Engine runtime */ CERuntime_init(); /* Initialize signal handler for SIGINT */ signal(SIGINT, signalHandler); /* Initialize Davinci Multimedia Application Interface */ Dmai_init(); initMask |= LOGSINITIALIZED; /* Set up the user interface */ hUI = uiSetup(&args); if (hUI == NULL) { cleanup(EXIT_FAILURE); } /* Create the Pause object */ hPauseProcess = Pause_create(&pAttrs); if (hPauseProcess == NULL) { ERR("Failed to create Pause object\n"); cleanup(EXIT_FAILURE); } /* Determine the number of threads needing synchronization */ numThreads = 1; if (args.videoFile) { numThreads += 3; } if (args.speechFile) { numThreads += 1; } /* Create the objects which synchronizes the thread init and cleanup */ hRendezvousCapStd = Rendezvous_create(2, &rzvAttrs); hRendezvousInit = Rendezvous_create(numThreads, &rzvAttrs); hRendezvousCleanup = Rendezvous_create(numThreads, &rzvAttrs); hRendezvousWriter = Rendezvous_create(2, &rzvAttrs); if (hRendezvousCapStd == NULL || hRendezvousInit == NULL || hRendezvousCleanup == NULL || hRendezvousWriter == NULL) { ERR("Failed to create Rendezvous objects\n"); cleanup(EXIT_FAILURE); } /* Initialize the thread attributes */ if (pthread_attr_init(&attr)) { ERR("Failed to initialize thread attrs\n"); cleanup(EXIT_FAILURE); } /* Force the thread to use custom scheduling attributes */ if (pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED)) { ERR("Failed to set schedule inheritance attribute\n"); cleanup(EXIT_FAILURE); } /* Set the thread to be fifo real time scheduled */ if (pthread_attr_setschedpolicy(&attr, SCHED_FIFO)) { ERR("Failed to set FIFO scheduling policy\n"); cleanup(EXIT_FAILURE); } /* Create the video threads if a file name is supplied */ if (args.videoFile) { /* Create the capture fifos */ captureEnv.hInFifo = Fifo_create(&fAttrs); captureEnv.hOutFifo = Fifo_create(&fAttrs); if (captureEnv.hInFifo == NULL || captureEnv.hOutFifo == NULL) { ERR("Failed to open display fifos\n"); cleanup(EXIT_FAILURE); } /* Set the capture thread priority */ schedParam.sched_priority = CAPTURE_THREAD_PRIORITY; if (pthread_attr_setschedparam(&attr, &schedParam)) { ERR("Failed to set scheduler parameters\n"); cleanup(EXIT_FAILURE); } /* Create the capture thread */ captureEnv.hRendezvousInit = hRendezvousInit; captureEnv.hRendezvousCapStd = hRendezvousCapStd; captureEnv.hRendezvousCleanup = hRendezvousCleanup; captureEnv.hPauseProcess = hPauseProcess; captureEnv.videoStd = args.videoStd; captureEnv.videoInput = args.videoInput; captureEnv.imageWidth = args.imageWidth; captureEnv.imageHeight = args.imageHeight; /* TODO */ VideoStd_getResolution(VideoStd_CIF, &captureEnv.resizeWidth, &captureEnv.resizeHeight); if (pthread_create(&captureThread, &attr, captureThrFxn, &captureEnv)) { ERR("Failed to create capture thread\n"); cleanup(EXIT_FAILURE); } initMask |= CAPTURETHREADCREATED; /* * Once the capture thread has detected the video standard, make it * available to other threads. The capture thread will set the * resolution of the buffer to encode in the environment (derived * from the video standard if the user hasn't passed a resolution). */ Rendezvous_meet(hRendezvousCapStd); /* Create the writer fifos */ writerEnv.hInFifo = Fifo_create(&fAttrs); writerEnv.hOutFifo = Fifo_create(&fAttrs); if (writerEnv.hInFifo == NULL || writerEnv.hOutFifo == NULL) { ERR("Failed to open display fifos\n"); cleanup(EXIT_FAILURE); } /* Set the video thread priority */ schedParam.sched_priority = VIDEO_THREAD_PRIORITY; if (pthread_attr_setschedparam(&attr, &schedParam)) { ERR("Failed to set scheduler parameters\n"); cleanup(EXIT_FAILURE); } /* Create the video thread */ videoEnv.hRendezvousInit = hRendezvousInit; videoEnv.hRendezvousCleanup = hRendezvousCleanup; videoEnv.hRendezvousWriter = hRendezvousWriter; videoEnv.hPauseProcess = hPauseProcess; videoEnv.hCaptureOutFifo = captureEnv.hOutFifo; videoEnv.hCaptureInFifo = captureEnv.hInFifo; videoEnv.hWriterOutFifo = writerEnv.hOutFifo; videoEnv.hWriterInFifo = writerEnv.hInFifo; videoEnv.videoEncoder = args.videoEncoder->codecName; videoEnv.params = args.videoEncoder->params; videoEnv.dynParams = args.videoEncoder->dynParams; videoEnv.videoBitRate = args.videoBitRate; videoEnv.imageWidth = captureEnv.imageWidth; videoEnv.imageHeight = captureEnv.imageHeight; videoEnv.resizeWidth = captureEnv.resizeWidth; videoEnv.resizeHeight = captureEnv.resizeHeight; videoEnv.imgEncoder = "jpegenc"; videoEnv.engineName = engine->engineName; if (args.videoStd == VideoStd_D1_PAL) { videoEnv.videoFrameRate = 25000; } else { videoEnv.videoFrameRate = 30000; } if (pthread_create(&videoThread, &attr, videoThrFxn, &videoEnv)) { ERR("Failed to create video thread\n"); cleanup(EXIT_FAILURE); } initMask |= VIDEOTHREADCREATED; /* * Wait for the codec to be created in the video thread before * launching the writer thread (otherwise we don't know which size * of buffers to use). */ Rendezvous_meet(hRendezvousWriter); /* Set the writer thread priority */ schedParam.sched_priority = WRITER_THREAD_PRIORITY; if (pthread_attr_setschedparam(&attr, &schedParam)) { ERR("Failed to set scheduler parameters\n"); cleanup(EXIT_FAILURE); } /* Create the writer thread */ writerEnv.hRendezvousInit = hRendezvousInit; writerEnv.hRendezvousCleanup = hRendezvousCleanup; writerEnv.hPauseProcess = hPauseProcess; writerEnv.videoFile = args.videoFile; writerEnv.outBufSize = videoEnv.outBufSize; writerEnv.outsBufSize = videoEnv.outsBufSize; if (pthread_create(&writerThread, &attr, writerThrFxn, &writerEnv)) { ERR("Failed to create writer thread\n"); cleanup(EXIT_FAILURE); } initMask |= WRITERTHREADCREATED; } /* Create the speech thread if a file name is supplied */ if (args.speechFile) { /* Set the thread priority */ schedParam.sched_priority = SPEECH_THREAD_PRIORITY; if (pthread_attr_setschedparam(&attr, &schedParam)) { ERR("Failed to set scheduler parameters\n"); cleanup(EXIT_FAILURE); } /* Create the speech thread */ speechEnv.hRendezvousInit = hRendezvousInit; speechEnv.hRendezvousCleanup = hRendezvousCleanup; speechEnv.hPauseProcess = hPauseProcess; speechEnv.speechFile = args.speechFile; speechEnv.soundInput = args.soundInput; speechEnv.speechEncoder = args.speechEncoder->codecName; speechEnv.params = args.speechEncoder->params; speechEnv.dynParams = args.speechEncoder->dynParams; speechEnv.engineName = engine->engineName; if (pthread_create(&speechThread, &attr, speechThrFxn, &speechEnv)) { ERR("Failed to create speech thread\n"); cleanup(EXIT_FAILURE); } initMask |= SPEECHTHREADCREATED; } /* Main thread becomes the control thread */ ctrlEnv.hRendezvousInit = hRendezvousInit; ctrlEnv.hRendezvousCleanup = hRendezvousCleanup; ctrlEnv.hPauseProcess = hPauseProcess; ctrlEnv.keyboard = args.keyboard; ctrlEnv.time = args.time; ctrlEnv.hUI = hUI; ctrlEnv.engineName = engine->engineName; ret = ctrlThrFxn(&ctrlEnv); if (ret == THREAD_FAILURE) { status = EXIT_FAILURE; } cleanup: /* Make sure the other threads aren't waiting for init to complete */ if (hRendezvousCapStd) Rendezvous_force(hRendezvousCapStd); if (hRendezvousWriter) Rendezvous_force(hRendezvousWriter); if (hRendezvousInit) Rendezvous_force(hRendezvousInit); if (hPauseProcess) Pause_off(hPauseProcess); /* Wait until the other threads terminate */ if (initMask & SPEECHTHREADCREATED) { if (pthread_join(speechThread, &ret) == 0) { if (ret == THREAD_FAILURE) { status = EXIT_FAILURE; } } } if (initMask & VIDEOTHREADCREATED) { if (pthread_join(videoThread, &ret) == 0) { if (ret == THREAD_FAILURE) { status = EXIT_FAILURE; } } } if (initMask & WRITERTHREADCREATED) { if (pthread_join(writerThread, &ret) == 0) { if (ret == THREAD_FAILURE) { status = EXIT_FAILURE; } } } if (writerEnv.hOutFifo) { Fifo_delete(writerEnv.hOutFifo); } if (writerEnv.hInFifo) { Fifo_delete(writerEnv.hInFifo); } if (initMask & CAPTURETHREADCREATED) { if (pthread_join(captureThread, &ret) == 0) { if (ret == THREAD_FAILURE) { status = EXIT_FAILURE; } } } if (captureEnv.hOutFifo) { Fifo_delete(captureEnv.hOutFifo); } if (captureEnv.hInFifo) { Fifo_delete(captureEnv.hInFifo); } if (hRendezvousCleanup) { Rendezvous_delete(hRendezvousCleanup); } if (hRendezvousInit) { Rendezvous_delete(hRendezvousInit); } if (hPauseProcess) { Pause_delete(hPauseProcess); } if (hUI) { UI_delete(hUI); } system("sync"); system("echo 3 > /proc/sys/vm/drop_caches"); pthread_mutex_destroy(&gbl.mutex); if (args.interface) { /* Launch the demo selection interface when exiting */ if (execl("./interface", "interface", "-l 3", (char *) NULL) == -1) { status = EXIT_FAILURE; } } exit(status); }
/****************************************************************************** * Resize_execute ******************************************************************************/ Int Resize_execute(Resize_Handle hResize, Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf, Buffer_Handle hsDstBuf) { struct imp_convert rsz; struct rsz_channel_config rsz_chan_config; struct rsz_single_shot_config rsz_ss_config; BufferGfx_Dimensions srcDim; BufferGfx_Dimensions dstDim; BufferGfx_Dimensions sdstDim; UInt32 srcOffset; UInt32 dstOffset; UInt32 sdstOffset; UInt32 srcSize; UInt32 dstSize; assert(hResize); assert(hSrcBuf); assert(hDstBuf); Dmai_clear(rsz); BufferGfx_getDimensions(hSrcBuf, &srcDim); BufferGfx_getDimensions(hDstBuf, &dstDim); hsDstBuf?BufferGfx_getDimensions(hsDstBuf, &sdstDim):NULL; srcSize = srcDim.width * srcDim.height; dstSize = dstDim.width * dstDim.height; /* the resize operation to ColorSpace_UYVY is different from ColorSpace_YUV420PSEMI*/ if (BufferGfx_getColorSpace(hSrcBuf) == ColorSpace_UYVY) { srcOffset = srcDim.y * srcDim.lineLength + (srcDim.x << 1); dstOffset = dstDim.y * dstDim.lineLength + (dstDim.x << 1); sdstOffset = hsDstBuf?(sdstDim.y * sdstDim.lineLength + (sdstDim.x << 1)):0; rsz.in_buff.index = -1; rsz.in_buff.buf_type = IMP_BUF_IN; rsz.in_buff.offset = Buffer_getPhysicalPtr(hSrcBuf) + srcOffset; rsz.in_buff.size = Buffer_getSize(hSrcBuf); rsz.out_buff1.index = -1; rsz.out_buff1.buf_type = IMP_BUF_OUT1; rsz.out_buff1.offset = Buffer_getPhysicalPtr(hDstBuf) + dstOffset; rsz.out_buff1.size = Buffer_getSize(hDstBuf); rsz.out_buff2.index = -1; rsz.out_buff2.buf_type = IMP_BUF_OUT2; rsz.out_buff2.offset = hsDstBuf?(Buffer_getPhysicalPtr(hsDstBuf) + sdstOffset):0; rsz.out_buff2.size = hsDstBuf?Buffer_getSize(hsDstBuf):0; /* * The IPIPE requires that the memory offsets of the input and output * buffers start on 32-byte boundaries. */ assert((rsz.in_buff.offset & 0x1F) == 0); assert((rsz.out_buff1.offset & 0x1F) == 0); assert((rsz.out_buff2.offset & 0x1F) == 0); /* Start IPIPE operation */ if (ioctl(hResize->fd, RSZ_RESIZE, &rsz) == -1) { Dmai_err0("Failed RSZ_RESIZE\n"); return Dmai_EFAIL; } Buffer_setNumBytesUsed(hDstBuf, Buffer_getSize(hDstBuf)); hsDstBuf?Buffer_setNumBytesUsed(hsDstBuf, Buffer_getSize(hsDstBuf)):NULL; } else { /* configure for the ColorSpace_YUV420PSEMI*/ Dmai_clear(rsz_ss_config); rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = 0; rsz_chan_config.config = NULL; /* to set defaults in driver */ if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in setting default configuration for single shot mode\n"); return Dmai_EFAIL; } /* default configuration setting in Resizer successfull */ Dmai_clear(rsz_ss_config); rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = sizeof(struct rsz_single_shot_config); rsz_chan_config.config = &rsz_ss_config; if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in getting configuration from driver\n"); return Dmai_EFAIL; } /* input params are set at the resizer */ rsz_ss_config.input.image_width = srcDim.width; rsz_ss_config.input.image_height = srcDim.height; rsz_ss_config.input.ppln = rsz_ss_config.input.image_width + 8; rsz_ss_config.input.lpfr = rsz_ss_config.input.image_height + 10; rsz_ss_config.input.pix_fmt = IPIPE_420SP_Y; rsz_ss_config.output1.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hDstBuf)); rsz_ss_config.output1.enable = 1; rsz_ss_config.output1.width = dstDim.width; rsz_ss_config.output1.height = dstDim.height; rsz_ss_config.output2.enable = 0; rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = sizeof(struct rsz_single_shot_config); if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in setting default configuration for single shot mode\n"); return Dmai_EFAIL; } rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = sizeof(struct rsz_single_shot_config); /* read again and verify */ if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in getting configuration from driver\n"); return Dmai_EFAIL; } /* execute for ColorSpace_YUV420PSEMI*/ Dmai_clear(rsz); //srcOffset = srcDim.y * srcDim.lineLength + (srcDim.x << 1); //dstOffset = dstDim.y * dstDim.lineLength + (dstDim.x << 1); rsz.in_buff.buf_type = IMP_BUF_IN; rsz.in_buff.index = -1; rsz.in_buff.offset = Buffer_getPhysicalPtr(hSrcBuf); //rsz.in_buff.size = Buffer_getSize(hSrcBuf); rsz.in_buff.size = srcSize; rsz.out_buff1.buf_type = IMP_BUF_OUT1; rsz.out_buff1.index = -1; rsz.out_buff1.offset = Buffer_getPhysicalPtr(hDstBuf); //rsz.out_buff1.size = Buffer_getSize(hDstBuf); rsz.out_buff1.size = dstSize; /* * The IPIPE requires that the memory offsets of the input and output * buffers start on 32-byte boundaries. */ assert((rsz.in_buff.offset & 0x1F) == 0); assert((rsz.out_buff1.offset & 0x1F) == 0); /* Start IPIPE operation */ if (ioctl(hResize->fd, RSZ_RESIZE, &rsz) == -1) { Dmai_err0("Failed RSZ_RESIZE\n"); return Dmai_EFAIL; } /* input params are set at the resizer */ rsz_ss_config.input.image_width = srcDim.width; rsz_ss_config.input.image_height = srcDim.height>>1; rsz_ss_config.input.ppln = rsz_ss_config.input.image_width + 8; rsz_ss_config.input.lpfr = rsz_ss_config.input.image_height + 10; rsz_ss_config.input.pix_fmt = IPIPE_420SP_C; rsz_ss_config.output1.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hDstBuf)); rsz_ss_config.output1.enable = 1; rsz_ss_config.output1.width = dstDim.width; rsz_ss_config.output1.height = dstDim.height; rsz_ss_config.output2.enable = 0; rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = sizeof(struct rsz_single_shot_config); if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in setting default configuration for single shot mode\n"); return Dmai_EFAIL; } rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = sizeof(struct rsz_single_shot_config); /* read again and verify */ if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in getting configuration from driver\n"); return Dmai_EFAIL; } Dmai_clear(rsz); //srcOffset = srcDim.y * srcDim.lineLength + (srcDim.x << 1); //dstOffset = dstDim.y * dstDim.lineLength + (dstDim.x << 1); rsz.in_buff.buf_type = IMP_BUF_IN; rsz.in_buff.index = -1; rsz.in_buff.offset = Buffer_getPhysicalPtr(hSrcBuf) + srcSize; rsz.in_buff.size = srcSize>>1; rsz.out_buff1.buf_type = IMP_BUF_OUT1; rsz.out_buff1.index = -1; rsz.out_buff1.offset = Buffer_getPhysicalPtr(hDstBuf) + dstSize; rsz.out_buff1.size = dstSize>>1; /* * The IPIPE requires that the memory offsets of the input and output * buffers start on 32-byte boundaries. */ assert((rsz.in_buff.offset & 0x1F) == 0); assert((rsz.out_buff1.offset & 0x1F) == 0); /* Start IPIPE operation */ if (ioctl(hResize->fd, RSZ_RESIZE, &rsz) == -1) { Dmai_err0("Failed RSZ_RESIZE\n"); return Dmai_EFAIL; } Buffer_setNumBytesUsed(hDstBuf, Buffer_getNumBytesUsed(hSrcBuf)); } return Dmai_EOK; }
/****************************************************************************** * Resize_config ******************************************************************************/ Int Resize_config(Resize_Handle hResize, Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf, Buffer_Handle hsDstBuf) { BufferGfx_Dimensions srcDim, dstDim, sdstDim; UInt hDif; UInt vDif; UInt shDif; UInt svDif; struct rsz_channel_config rsz_chan_config; struct rsz_single_shot_config rsz_ss_config; /* Make sure our input parameters are valid */ if (!hResize) { Dmai_err0("Resize_Handle parameter must not be NULL\n"); return Dmai_EINVAL; } if (!hSrcBuf) { Dmai_err0("Source buffer parameter must not be NULL\n"); return Dmai_EINVAL; } if (!hDstBuf) { Dmai_err0("Destination buffer parameter must not be NULL\n"); return Dmai_EINVAL; } if (!hsDstBuf) { Dmai_err0("Second destination buffer parameter is NULL\n"); } /* Buffer needs to be graphics buffers */ if (Buffer_getType(hSrcBuf) != Buffer_Type_GRAPHICS || Buffer_getType(hDstBuf) != Buffer_Type_GRAPHICS || hsDstBuf?(Buffer_getType(hsDstBuf) != Buffer_Type_GRAPHICS):FALSE) { Dmai_err0("Src and dst buffers need to be graphics buffers\n"); return Dmai_EINVAL; } BufferGfx_getDimensions(hSrcBuf, &srcDim); BufferGfx_getDimensions(hDstBuf, &dstDim); hsDstBuf?BufferGfx_getDimensions(hsDstBuf, &sdstDim):NULL; if (dstDim.width <= 0) { Dmai_err0("Destination buffer width must be greater than zero\n"); return Dmai_EINVAL; } if (dstDim.height <= 0) { Dmai_err0("Destination buffer height must be greater than zero\n"); return Dmai_EINVAL; } if (hsDstBuf?(sdstDim.width <= 0):FALSE) { Dmai_err0("Second destination buffer width must be greater than zero\n"); return Dmai_EINVAL; } if (hsDstBuf?(sdstDim.height <= 0):FALSE) { Dmai_err0("Second destination buffer height must be greater than zero\n"); return Dmai_EINVAL; } if ((srcDim.lineLength & 0x1F) != 0) { Dmai_err0("Source buffer pitch must be a multiple of 32 bytes\n"); return Dmai_EINVAL; } if ((dstDim.lineLength & 0x1F) != 0) { Dmai_err0("Destination buffer pitch must be a multiple of 32 bytes\n"); return Dmai_EINVAL; } if (hsDstBuf?((sdstDim.lineLength & 0x1F) != 0):FALSE) { Dmai_err0("Second destination buffer pitch must be a multiple of 32 bytes\n"); return Dmai_EINVAL; } /* Check for valid buffer scaling */ hDif = srcDim.width * 256 / dstDim.width; vDif = srcDim.height * 256 / dstDim.height; if (hDif < 32) { Dmai_err0("Horizontal up-scaling must not exceed 8x\n"); return Dmai_EINVAL; } if (hDif > 4096) { Dmai_err0("Horizontal down-scaling must not exceed 1/16x\n"); return Dmai_EINVAL; } if (vDif < 32) { Dmai_err0("Vertical up-scaling must not exceed 8x\n"); return Dmai_EINVAL; } if (vDif > 4096) { Dmai_err0("Vertical down-scaling must not exceed 1/16x\n"); return Dmai_EINVAL; } shDif = hsDstBuf?(srcDim.width * 256 / sdstDim.width):0; svDif = hsDstBuf?(srcDim.height * 256 / sdstDim.height):0; if (shDif && shDif < 32) { Dmai_err0("Second horizontal up-scaling must not exceed 8x\n"); return Dmai_EINVAL; } if (shDif && shDif > 4096) { Dmai_err0("Second horizontal down-scaling must not exceed 1/16x\n"); return Dmai_EINVAL; } if (svDif && svDif < 32) { Dmai_err0("Second vertical up-scaling must not exceed 8x\n"); return Dmai_EINVAL; } if (svDif && svDif > 4096) { Dmai_err0("Second vertical down-scaling must not exceed 1/16x\n"); return Dmai_EINVAL; } /* Configure for ColorSpace_UYVY_*/ if (BufferGfx_getColorSpace(hSrcBuf) == ColorSpace_UYVY) { /* Setting default configuration in Resizer */ Dmai_clear(rsz_ss_config); rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = 0; rsz_chan_config.config = NULL; /* to set defaults in driver */ if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in setting default configuration for single shot mode\n"); goto faco_error; } /* default configuration setting in Resizer successfull */ Dmai_clear(rsz_ss_config); rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = sizeof(struct rsz_single_shot_config); rsz_chan_config.config = &rsz_ss_config; if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in getting resizer channel configuration from driver\n"); goto faco_error; } /* input params are set at the resizer */ rsz_ss_config.input.image_width = srcDim.width; rsz_ss_config.input.image_height = srcDim.height; rsz_ss_config.input.ppln = rsz_ss_config.input.image_width + 8; rsz_ss_config.input.lpfr = rsz_ss_config.input.image_height + 10; rsz_ss_config.input.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hSrcBuf)); rsz_ss_config.output1.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hDstBuf)); rsz_ss_config.output1.enable = 1; rsz_ss_config.output1.width = dstDim.width; rsz_ss_config.output1.height = dstDim.height; rsz_ss_config.output2.pix_fmt = hsDstBuf?pixFormatConversion(BufferGfx_getColorSpace(hsDstBuf)):0; rsz_ss_config.output2.enable = hsDstBuf?1:0; rsz_ss_config.output2.width = hsDstBuf?sdstDim.width:0; rsz_ss_config.output2.height = hsDstBuf?sdstDim.height:0; rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = sizeof(struct rsz_single_shot_config); if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in setting default configuration for single shot mode\n"); goto faco_error; } rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = sizeof(struct rsz_single_shot_config); /* read again and verify */ if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in getting configuration from driver\n"); goto faco_error; } } return Dmai_EOK; faco_error: close(hResize->fd); hResize->fd = 0; return Dmai_EFAIL; }
/****************************************************************************** * Capture_create ******************************************************************************/ Capture_Handle Capture_create(BufTab_Handle hBufTab, Capture_Attrs *attrs) { struct v4l2_capability cap; struct v4l2_crop crop; struct v4l2_format fmt; enum v4l2_buf_type type; Capture_Handle hCapture; VideoStd_Type videoStd; Int32 width, height; assert(attrs); Dmai_clear(fmt); /* Allocate space for state object */ hCapture = calloc(1, sizeof(Capture_Object)); if (hCapture == NULL) { Dmai_err0("Failed to allocate space for Capture Object\n"); return NULL; } /* User allocated buffers by default */ hCapture->userAlloc = TRUE; /* Open video capture device */ hCapture->fd = open(attrs->captureDevice, O_RDWR, 0); if (hCapture->fd == -1) { Dmai_err2("Cannot open %s (%s)\n", attrs->captureDevice, strerror(errno)); cleanup(hCapture); return NULL; } /* See if an input is connected, and if so which standard */ if (Capture_detectVideoStd(hCapture, &videoStd, attrs) < 0) { cleanup(hCapture); return NULL; } hCapture->videoStd = videoStd; if (VideoStd_getResolution(videoStd, &width, &height) < 0) { cleanup(hCapture); Dmai_err0("Failed to get resolution of capture video standard\n"); return NULL; } /* Query for capture device capabilities */ if (ioctl(hCapture->fd, VIDIOC_QUERYCAP, &cap) == -1) { cleanup(hCapture); if (errno == EINVAL) { Dmai_err1("%s is no V4L2 device\n", attrs->captureDevice); cleanup(hCapture); return NULL; } Dmai_err2("Failed VIDIOC_QUERYCAP on %s (%s)\n", attrs->captureDevice, strerror(errno)); cleanup(hCapture); return NULL; } if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { Dmai_err1("%s is not a video capture device\n", attrs->captureDevice); cleanup(hCapture); return NULL; } if (!(cap.capabilities & V4L2_CAP_STREAMING)) { Dmai_err1("%s does not support streaming i/o\n", attrs->captureDevice); cleanup(hCapture); return NULL; } fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl(hCapture->fd, VIDIOC_G_FMT, &fmt) == -1) { Dmai_err2("Failed VIDIOC_G_FMT on %s (%s)\n", attrs->captureDevice, strerror(errno)); cleanup(hCapture); return NULL; } fmt.fmt.pix.width = width; fmt.fmt.pix.height = height; fmt.fmt.pix.bytesperline = width * 2; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if (ioctl(hCapture->fd, VIDIOC_S_FMT, &fmt) == -1) { Dmai_err2("Failed VIDIOC_S_FMT on %s (%s)\n", attrs->captureDevice, strerror(errno)); cleanup(hCapture); return NULL; } Dmai_dbg3("Video input connected size %dx%d pitch %d\n", fmt.fmt.pix.width, fmt.fmt.pix.height, fmt.fmt.pix.bytesperline); if (attrs->cropWidth > 0 && attrs->cropHeight > 0) { if (attrs->cropX & 0x1) { Dmai_err1("Crop width (%ld) needs to be even\n", attrs->cropX); cleanup(hCapture); return NULL; } crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; crop.c.left = attrs->cropX; crop.c.top = attrs->cropY; crop.c.width = attrs->cropWidth; crop.c.height = hCapture->topOffset ? attrs->cropHeight + 4 + 2 : attrs->cropHeight; Dmai_dbg4("Setting capture cropping at %dx%d size %dx%d\n", crop.c.left, crop.c.top, crop.c.width, crop.c.height); /* Crop the image depending on requested image size */ if (ioctl(hCapture->fd, VIDIOC_S_CROP, &crop) == -1) { Dmai_err2("VIDIOC_S_CROP failed on %s (%s)\n", attrs->captureDevice, strerror(errno)); cleanup(hCapture); return NULL; } } if (hBufTab == NULL) { hCapture->userAlloc = FALSE; /* The driver allocates the buffers */ if (_Dmai_v4l2DriverAlloc(hCapture->fd, attrs->numBufs, V4L2_BUF_TYPE_VIDEO_CAPTURE, &hCapture->bufDescs, &hBufTab, hCapture->topOffset, attrs->colorSpace) < 0) { Dmai_err1("Failed to allocate capture driver buffers on %s\n", attrs->captureDevice); cleanup(hCapture); return NULL; } } else { /* Make the driver use the user supplied buffers */ if (_Dmai_v4l2UserAlloc(hCapture->fd, attrs->numBufs, V4L2_BUF_TYPE_VIDEO_CAPTURE, &hCapture->bufDescs, hBufTab, 0, attrs->colorSpace, TRUE) < 0) { Dmai_err1("Failed to intialize capture driver buffers on %s\n", attrs->captureDevice); cleanup(hCapture); return NULL; } } hCapture->hBufTab = hBufTab; /* Start the video streaming */ type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl(hCapture->fd, VIDIOC_STREAMON, &type) == -1) { Dmai_err2("VIDIOC_STREAMON failed on %s (%s)\n", attrs->captureDevice, strerror(errno)); cleanup(hCapture); return NULL; } hCapture->started = TRUE; return hCapture; }
/****************************************************************************** * Resize_config ******************************************************************************/ Int Resize_config(Resize_Handle hResize, Buffer_Handle hSrcBuf, Buffer_Handle hDstBuf) { #ifdef CONFIG_DM365_IPIPE BufferGfx_Dimensions srcDim, dstDim; UInt hDif; UInt vDif; struct rsz_channel_config rsz_chan_config; struct rsz_single_shot_config rsz_ss_config; /* Make sure our input parameters are valid */ if (!hResize) { Dmai_err0("Resize_Handle parameter must not be NULL\n"); return Dmai_EINVAL; } if (!hSrcBuf) { Dmai_err0("Source buffer parameter must not be NULL\n"); return Dmai_EINVAL; } if (!hDstBuf) { Dmai_err0("Destination buffer parameter must not be NULL\n"); return Dmai_EINVAL; } /* Buffer needs to be graphics buffers */ if (Buffer_getType(hSrcBuf) != Buffer_Type_GRAPHICS || Buffer_getType(hDstBuf) != Buffer_Type_GRAPHICS) { Dmai_err0("Src and dst buffers need to be graphics buffers\n"); return Dmai_EINVAL; } BufferGfx_getDimensions(hSrcBuf, &srcDim); BufferGfx_getDimensions(hDstBuf, &dstDim); if (dstDim.width <= 0) { Dmai_err0("Destination buffer width must be greater than zero\n"); return Dmai_EINVAL; } if (dstDim.height <= 0) { Dmai_err0("Destination buffer height must be greater than zero\n"); return Dmai_EINVAL; } if ((srcDim.lineLength & 0x1F) != 0) { Dmai_err0("Source buffer pitch must be a multiple of 32 bytes\n"); return Dmai_EINVAL; } if ((dstDim.lineLength & 0x1F) != 0) { Dmai_err0("Destination buffer pitch must be a multiple of 32 bytes\n"); return Dmai_EINVAL; } /* Check for valid buffer scaling */ hDif = srcDim.width * 256 / dstDim.width; vDif = srcDim.height * 256 / dstDim.height; if (hDif < 32) { Dmai_err0("Horizontal up-scaling must not exceed 8x\n"); return Dmai_EINVAL; } if (hDif > 4096) { Dmai_err0("Horizontal down-scaling must not exceed 1/16x\n"); return Dmai_EINVAL; } if (vDif < 32) { Dmai_err0("Vertical up-scaling must not exceed 8x\n"); return Dmai_EINVAL; } if (vDif > 4096) { Dmai_err0("Vertical down-scaling must not exceed 1/16x\n"); return Dmai_EINVAL; } /* Set the driver default parameters and retrieve what was set */ Dmai_clear(rsz_ss_config); rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = 0; rsz_chan_config.config = NULL; /* to set defaults in driver */ if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in setting default configuration for single shot mode\n"); goto cleanup; } /* default configuration setting in Resizer successfull */ Dmai_clear(rsz_ss_config); rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = sizeof(struct rsz_single_shot_config); rsz_chan_config.config = &rsz_ss_config; if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in getting resizer channel configuration from driver\n"); goto cleanup; } /* input params are set at the resizer */ rsz_ss_config.input.image_width = srcDim.width; rsz_ss_config.input.image_height = srcDim.height; rsz_ss_config.input.ppln = rsz_ss_config.input.image_width + 8; rsz_ss_config.input.lpfr = rsz_ss_config.input.image_height + 10; rsz_ss_config.input.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hSrcBuf)); rsz_ss_config.output1.pix_fmt = pixFormatConversion(BufferGfx_getColorSpace(hDstBuf)); rsz_ss_config.output1.enable = 1; rsz_ss_config.output1.width = dstDim.width; rsz_ss_config.output1.height = dstDim.height; rsz_ss_config.output2.enable = 0; rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = sizeof(struct rsz_single_shot_config); if (ioctl(hResize->fd, RSZ_S_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in setting default configuration for single shot mode\n"); goto cleanup; } rsz_chan_config.oper_mode = IMP_MODE_SINGLE_SHOT; rsz_chan_config.chain = 0; rsz_chan_config.len = sizeof(struct rsz_single_shot_config); /* read again and verify */ if (ioctl(hResize->fd, RSZ_G_CONFIG, &rsz_chan_config) < 0) { Dmai_err0("Error in getting configuration from driver\n"); goto cleanup; } return Dmai_EOK; cleanup: close(hResize->fd); hResize->fd = 0; return Dmai_EFAIL; #else Dmai_err0("not implemented\n"); return Dmai_ENOTIMPL; #endif }
/* ############################################## ##Int ModuleResize(Int argc, Char *argv[]) ############################################## */ Int ModuleResize(Int argc, Char *argv[]) { Uns initMask = 0; Int status = EXIT_SUCCESS; Rendezvous_Attrs rzvAttrs = Rendezvous_Attrs_DEFAULT; Fifo_Attrs fAttrs = Fifo_Attrs_DEFAULT; Rendezvous_Handle hRendezvousInit = NULL; Rendezvous_Handle hRendezvousWriter = NULL; Rendezvous_Handle hRendezvousCleanup = NULL; Int numThreads = 0; pthread_t id_listen[5] = {0}; Void *ret; char devicebuf[16] = {0}; CaptureEnv captureEnv; WriterEnv writerEnv; WriterEnv writerLowRateEnv; DetectEnv detectEnv; VideoEnv videoEnv; VideoEnv videoLowRateEnv;//dd VideoEnv LowRateResize; AudioEnv audioEnv; CtrlEnv ctrlEnv; char box_version[64] = {0}; OutputVideoInfo outputhandle; textinfo *texthandle; int DHCPVAL = 0, tmp = 0; char gateway[255] = {0}; struct sched_param schedParam; pthread_t captureThread; pthread_t detectThread; pthread_t writerThread; pthread_t writerLowThread; pthread_t videoThread; pthread_t audioThread; pthread_t videoLowThread; pthread_t resizeLowThread; #ifdef DSS_ENC_1100_1200 pthread_t webListenThread; #endif pthread_attr_t attr; int index = 0; int result = 0; char ts_version[128] = {0}; /* Zero out the thread environments */ Dmai_clear(captureEnv); Dmai_clear(writerEnv); Dmai_clear(videoEnv); Dmai_clear(audioEnv); Dmai_clear(ctrlEnv); mid_task_init(); trace_init(); open_gpio_port(); ts_build_get_version(ts_version, sizeof(ts_version)); strcpy(box_version, BOX_VER); strcat(box_version, CODE_TYPE); strcat(box_version, DEUBG); printf("[%s] Module Encode Program %s V%s\n", CODE_COND, BOARD_TYPE, box_version); printf("the build time is %s,the git vesion is %s.the ts version is %s\n\n", g_make_build_date, _VERSION, ts_version); initMutexPthread(); InitgblCommonMutex(); InitSysParams(); initOutputVideoParam(); InitHVTable(&gHVTable); // webgetDHCPFlag(tmp, &DHCPVAL); // readDHCPValue(DHCPCONFIG_FILE, &DHCPVAL); // setDHCPFlag(DHCPVAL); gLogoinfo = initLogoMod(); initTextinfo(); ReadEncodeParamTable(CONFIG_NAME, &gSysParaT); DHCPVAL = gSysParaT.sysPara.nTemp[0]; printf("----mic=%x:%x:%x:%x:%x:%x\n",gSysParaT.sysPara.szMacAddr[0],gSysParaT.sysPara.szMacAddr[1],gSysParaT.sysPara.szMacAddr[2], gSysParaT.sysPara.szMacAddr[3],gSysParaT.sysPara.szMacAddr[4],gSysParaT.sysPara.szMacAddr[5]); ReadLowbitParamTable(LOWBIT_PARAM, &gSysParaT); memset(&outputhandle, 0, sizeof(OutputVideoInfo)); getOutputvideohandle(&outputhandle); readOutputVideoParam(VIDEOENCODE_FILE, &outputhandle); setOutputvideohandle(&outputhandle); //sleep(10); ReadLogoinfo(LOGOCONFIGNAME, gLogoinfo); //setLogoInfoHandle(logoEnv); //sleep(10); texthandle = getTextInfoHandle(); readTextFromfile(ADDTEXT_FILE, texthandle); // DEBUG(DL_DEBUG, "%d,%d,%d,%d,%d,%d,%s\n", DHCPVAL, texthandle->xpos, texthandle->ypos, // texthandle->enable, texthandle->showtime, texthandle->alpha, texthandle->msgtext); // sleep(10); #ifdef DSS_ENC_1100_1200 ReadProtocolIni(PROTOCOL_NAME, &gProtocol); #endif ReadRemoteCtrlIndex(REMOTE_NAME, &index); /*Read I frames Interval*/ ReadIframeInterval(IFRAMES_NAME); /*green Save Module*/ app_init_green_adjust_module(); #ifdef CL4000_DVI app_init_screen_adjust_module(); #endif gblSetRemoteIndex(index); ReadHVTable(&gHVTable, 0); ReadHVTable(&gHVTable, 1); #ifdef CL4000_DVI_SDI ReadIPParamTable(IP_PARAM, &gSysParaT); #endif if(DHCPVAL) { printf("i will set dhcp.\n"); #if 1 system("kill -1 `cat /var/run/dhcpcd-eth0.pid`"); system("/sbin/dhcpcd eth0"); system("ifconfig eth0"); #endif gSysParaT.sysPara.dwNetMark = GetNetmask("eth0"); gSysParaT.sysPara.dwAddr = GetIPaddr("eth0"); get_gateway(gateway); gSysParaT.sysPara.dwGateWay = get_gateway(gateway); DEBUG(DL_DEBUG, "gateway =%s\n", gateway); } else { printf("i will set static ip.\n"); SetEthConfigIP(gSysParaT.sysPara.dwAddr, gSysParaT.sysPara.dwNetMark); SetEthConfigGW(gSysParaT.sysPara.dwGateWay); } system("ifconfig"); strcpy(gSysParaT.sysPara.strVer, box_version); initSetParam(); DEBUG(DL_DEBUG, "logo=%d text=%d ,texthandle->enable=%d,texthandle->showtime=%d\n", outputhandle.logo_show, outputhandle.text_show, texthandle->enable, texthandle->showtime); #ifdef DSS_ENC_1100_1200 /*open lcd initial*/ OpenLCDCom(); //matchbox ++ gblLoadIDX(); //matchbox ++ if(-2 == ReadDeviceType(DTYPECONFIG_NAME, 1)) { ReadDeviceType(DTYPECONFIG_NAME, 0); } GetDeviceType(devicebuf); DEBUG(DL_DEBUG, "DTYPECONFIG_NAME gDeviceType = %s\n", devicebuf); #endif /*取消PIPE坏的信号*/ Signal(SIGPIPE, SIG_IGN); /* Set the priority of this whole process to max (requires root) */ setpriority(PRIO_PROCESS, 0, -20); /*初始化高码流视频编码库参数*/ InitVideoEncParams(&gSysParaT.videoPara[PORT_ONE]); /*初始化低码流视频编码库参数*/ InitLowRateParams(&gSysParaT.videoPara[PORT_TWO]); /*初始化音频编码库参数*/ InitAudioEncParams(&gSysParaT.audioPara[PORT_ONE]); /* Initialize the mutex which protects the global data */ pthread_mutex_init(&gbl.mutex, NULL); /* Initialize Codec Engine runtime */ CERuntime_init(); /* Initialize Davinci Multimedia Application Interface */ Dmai_init(); closeWatchDog(); mid_timer_init(); initWatchDog(); #ifdef CL4000_DVI_SDI if(gblGetRemoteIndex() < MAX_FAR_CTRL_NUM) { result = InitRemoteStruct(gblGetRemoteIndex()); } gRemoteFD = CameraCtrlInit(PORT_COM2); if(gRemoteFD <= 0) { DEBUG(DL_ERROR, "Initial CameraCtrlInit() Error\n"); } #else #ifndef ENABLE_DEUBG if(gblGetRemoteIndex() < MAX_FAR_CTRL_NUM) { result = InitRemoteStruct(gblGetRemoteIndex()); } gRemoteFD = CameraCtrlInit(PORT_COM1); if(gRemoteFD <= 0) { DEBUG(DL_ERROR, "Initial CameraCtrlInit() Error\n"); } #endif #endif CreateTCPTask(id_listen); /* Initialize the logs. Must be done after CERuntime_init() */ /* if(TraceUtil_start(engine->engineName) != TRACEUTIL_SUCCESS) { ERR("Failed to TraceUtil_start\n"); cleanup(EXIT_FAILURE); } */ //initMask |= LOGSINITIALIZED; app_set_logoshow_flag(outputhandle.logo_show); app_set_textshow_flag(outputhandle.text_show) ; //setShowLogoTextFlag(outputhandle->logotext); addtextdisplay(texthandle); /* Determine the number of threads needing synchronization */ numThreads = 1; /*视频线程个数*/ numThreads += 7; /*音频线程个数*/ numThreads += 1; /* Create the objects which synchronizes the thread init and cleanup */ hRendezvousInit = Rendezvous_create(numThreads, &rzvAttrs); hRendezvousCleanup = Rendezvous_create(numThreads, &rzvAttrs); hRendezvousWriter = Rendezvous_create(3, &rzvAttrs); if(hRendezvousInit == NULL || hRendezvousCleanup == NULL || hRendezvousWriter == NULL) { ERR("Failed to create Rendezvous objects\n"); cleanup(EXIT_FAILURE); } /* Initialize the thread attributes */ if(pthread_attr_init(&attr)) { ERR("Failed to initialize thread attrs\n"); cleanup(EXIT_FAILURE); } /* Force the thread to use custom scheduling attributes */ if(pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED)) { ERR("Failed to set schedule inheritance attribute\n"); cleanup(EXIT_FAILURE); } /* Set the thread to be fifo real time scheduled */ if(pthread_attr_setschedpolicy(&attr, SCHED_FIFO)) { ERR("Failed to set FIFO scheduling policy\n"); cleanup(EXIT_FAILURE); } /* Create the capture fifos */ captureEnv.to_video_c = Fifo_create(&fAttrs); captureEnv.from_video_c = Fifo_create(&fAttrs); captureEnv.to_resize_c = Fifo_create(&fAttrs); captureEnv.from_resize_c = Fifo_create(&fAttrs); if(captureEnv.to_video_c == NULL || captureEnv.from_video_c == NULL || captureEnv.to_resize_c == NULL || captureEnv.from_resize_c == NULL) { ERR("Failed to open display fifos\n"); cleanup(EXIT_FAILURE); } LowRateResize.to_videoresize_c = Fifo_create(&fAttrs); LowRateResize.from_videoresize_c = Fifo_create(&fAttrs); if(LowRateResize.to_videoresize_c == NULL || LowRateResize.from_videoresize_c == NULL) { ERR("Failed to open Resize fifos\n"); cleanup(EXIT_FAILURE); } /* Set the capture thread priority */ schedParam.sched_priority = CAPTURE_THREAD_PRIORITY; if(pthread_attr_setschedparam(&attr, &schedParam)) { ERR("Failed to set scheduler parameters\n"); cleanup(EXIT_FAILURE); } /* Create the capture thread */ captureEnv.hRendezvousInit = hRendezvousInit; captureEnv.hRendezvousCleanup = hRendezvousCleanup; DEBUG(DL_DEBUG, "captureThrFxn thread!!!!\n"); if(pthread_create(&captureThread, &attr, captureThrFxn, &captureEnv)) { ERR("Failed to create capture thread\n"); cleanup(EXIT_FAILURE); } /* Create the writer fifos */ writerEnv.to_video_c = Fifo_create(&fAttrs); writerEnv.from_video_c = Fifo_create(&fAttrs); writerLowRateEnv.to_writelow_c = Fifo_create(&fAttrs); writerLowRateEnv.from_writelow_c = Fifo_create(&fAttrs); if(writerEnv.to_video_c == NULL || writerEnv.from_video_c == NULL || writerLowRateEnv.to_writelow_c == NULL || writerLowRateEnv.from_writelow_c == NULL) { ERR("Failed to open display fifos\n"); cleanup(EXIT_FAILURE); } initMask |= CAPTURETHREADCREATED ; /*detect thread*/ detectEnv.hRendezvousInit = hRendezvousInit; detectEnv.hRendezvousCleanup = hRendezvousCleanup; /* Set the video thread priority */ schedParam.sched_priority = DETECT_THREAD_PRIORITY; if(pthread_attr_setschedparam(&attr, &schedParam)) { ERR("Failed to set scheduler parameters\n"); cleanup(EXIT_FAILURE); } if(pthread_create(&detectThread, &attr, detectThrFxn, &detectEnv)) { ERR("Failed to create detect thread\n"); cleanup(EXIT_FAILURE); } initMask |= DETECTTHREADCREATED ; /* Set the video thread priority */ schedParam.sched_priority = VIDEO_THREAD_PRIORITY; if(pthread_attr_setschedparam(&attr, &schedParam)) { ERR("Failed to set scheduler parameters\n"); cleanup(EXIT_FAILURE); } DEBUG(DL_DEBUG, "videoThrFxn thread!!!!\n"); /* Create the video thread */ videoEnv.hRendezvousInit = hRendezvousInit; videoEnv.hRendezvousCleanup = hRendezvousCleanup; videoEnv.hRendezvousWriter = hRendezvousWriter; videoEnv.to_capture = captureEnv.from_video_c; videoEnv.from_capture = captureEnv.to_video_c; videoEnv.to_writer = writerEnv.from_video_c; videoEnv.from_writer = writerEnv.to_video_c; videoEnv.videoEncoder = engine->videoEncoders->codecName; videoEnv.engineName = engine->engineName; if(pthread_create(&videoThread, &attr, videoThrFxn, &videoEnv)) { ERR("Failed to create video thread\n"); cleanup(EXIT_FAILURE); } initMask |= VIDEOTHREADCREATED; /* Create the videoResize thread */ videoLowRateEnv.hRendezvousInit = hRendezvousInit; videoLowRateEnv.hRendezvousCleanup = hRendezvousCleanup; videoLowRateEnv.hRendezvousWriter = hRendezvousWriter; videoLowRateEnv.to_resize = LowRateResize.from_videoresize_c; videoLowRateEnv.from_resize = LowRateResize.to_videoresize_c; videoLowRateEnv.from_writer = writerLowRateEnv.to_writelow_c; videoLowRateEnv.to_writer = writerLowRateEnv.from_writelow_c; videoLowRateEnv.videoEncoder = engine->videoEncoders->codecName; videoLowRateEnv.engineName = engine->engineName; DEBUG(DL_DEBUG, "videoLowRateThrFxn thread!!!!\n"); if(pthread_create(&videoLowThread, &attr, videoLowRateThrFxn, &videoLowRateEnv)) { ERR("Failed to create video thread\n"); cleanup(EXIT_FAILURE); } initMask |= VIDEOLOWRATETHREAD; /* Create the video thread */ LowRateResize.hRendezvousInit = hRendezvousInit; LowRateResize.hRendezvousCleanup = hRendezvousCleanup; LowRateResize.hRendezvousWriter = hRendezvousWriter; LowRateResize.from_capture = captureEnv.to_resize_c; LowRateResize.to_capture = captureEnv.from_resize_c; LowRateResize.videoEncoder = engine->videoEncoders->codecName; LowRateResize.engineName = engine->engineName; /* Set the video thread priority */ schedParam.sched_priority = VIDEO_THREAD_PRIORITY; if(pthread_attr_setschedparam(&attr, &schedParam)) { ERR("Failed to set scheduler parameters\n"); cleanup(EXIT_FAILURE); } DEBUG(DL_DEBUG, "ResizeLowThrFxn thread!!!!\n"); if(pthread_create(&resizeLowThread, &attr, ResizeLowThrFxn, &LowRateResize)) { ERR("Failed to create video thread\n"); cleanup(EXIT_FAILURE); } initMask |= RESIZELOWRATETHREAD; Rendezvous_meet(hRendezvousWriter); /* Set the writer thread priority */ schedParam.sched_priority = WRITER_THREAD_PRIORITY; if(pthread_attr_setschedparam(&attr, &schedParam)) { ERR("Failed to set scheduler parameters\n"); cleanup(EXIT_FAILURE); } /* Create the writer thread */ writerEnv.hRendezvousInit = hRendezvousInit; writerEnv.hRendezvousCleanup = hRendezvousCleanup; writerEnv.outBufSize = videoEnv.outBufSize; DEBUG(DL_DEBUG, "writerThrFxn thread!!!!\n"); if(pthread_create(&writerThread, &attr, writerThrFxn, &writerEnv)) { ERR("Failed to create writer thread\n"); cleanup(EXIT_FAILURE); } initMask |= WRITERTHREADCREATED; /* Create the writer thread */ writerLowRateEnv.hRendezvousInit = hRendezvousInit; writerLowRateEnv.hRendezvousCleanup = hRendezvousCleanup; writerLowRateEnv.outBufSize = videoLowRateEnv.outBufSize; DEBUG(DL_DEBUG, "writerLowThrFxn thread!!!!\n"); if(pthread_create(&writerLowThread, &attr, writerLowThrFxn, &writerLowRateEnv)) { ERR("Failed to create writerResize thread\n"); cleanup(EXIT_FAILURE); } initMask |= WRITELOWRATETHREAD; /* Set the thread priority */ schedParam.sched_priority = AUDIO_THREAD_PRIORITY; if(pthread_attr_setschedparam(&attr, &schedParam)) { ERR("Failed to set scheduler parameters\n"); cleanup(EXIT_FAILURE); } DEBUG(DL_DEBUG, "Audio thread Function!!!!\n"); /* Create the audio thread */ audioEnv.hRendezvousInit = hRendezvousInit; audioEnv.hRendezvousCleanup = hRendezvousCleanup; audioEnv.engineName = engine->engineName; audioEnv.audioEncoder = engine->audioEncoders->codecName; if(pthread_create(&audioThread, &attr, audioThrFxn, &audioEnv)) { ERR("Failed to create speech thread\n"); cleanup(EXIT_FAILURE); } initMask |= AUDIOTHREADCREATED; #ifdef DSS_ENC_1100_1200 if(pthread_create(&webListenThread, &attr, weblistenThrFxn, NULL)) { ERR("Failed to create web listen thread\n"); cleanup(EXIT_FAILURE); } initMask |= WEBLISTENCREATED; #endif /* Main thread becomes the control thread */ ctrlEnv.hRendezvousInit = hRendezvousInit; ctrlEnv.hRendezvousCleanup = hRendezvousCleanup; ctrlEnv.engineName = engine->engineName; ret = ctrlThrFxn(&ctrlEnv); if(ret == THREAD_FAILURE) { status = EXIT_FAILURE; } DEBUG(DL_DEBUG, "Exit All Thread!!\n"); cleanup: /* Make sure the other threads aren't waiting for init to complete */ if(hRendezvousWriter) { Rendezvous_force(hRendezvousWriter); } if(hRendezvousInit) { Rendezvous_force(hRendezvousInit); } DEBUG(DL_DEBUG, "EXIT Common Mutex!!!\n"); DestorygblCommonMutex(); DEBUG(DL_DEBUG, "EXIT pthread Mutex!!!\n"); DestroyMutexPthread(); if(initMask & AUDIOTHREADCREATED) { if(pthread_join(audioThread, &ret) == 0) { if(ret == THREAD_FAILURE) { status = EXIT_FAILURE; } } } DEBUG(DL_DEBUG, "EXIT audio pThread!!!\n"); if(initMask & VIDEOTHREADCREATED) { if(pthread_join(videoThread, &ret) == 0) { if(ret == THREAD_FAILURE) { status = EXIT_FAILURE; } } } DEBUG(DL_DEBUG, "EXIT video pThread!!!\n"); if(initMask & WRITERTHREADCREATED) { if(pthread_join(writerThread, &ret) == 0) { if(ret == THREAD_FAILURE) { status = EXIT_FAILURE; } } } DEBUG(DL_DEBUG, "EXIT write pThread!!!\n"); if(initMask & CAPTURETHREADCREATED) { if(pthread_join(captureThread, &ret) == 0) { if(ret == THREAD_FAILURE) { status = EXIT_FAILURE; } } } DEBUG(DL_DEBUG, "EXIT capture pThread!!!\n"); if(initMask & VIDEOLOWRATETHREAD) { if(pthread_join(videoLowThread, &ret) == 0) { if(ret == THREAD_FAILURE) { status = EXIT_FAILURE; } } } if(initMask & RESIZELOWRATETHREAD) { if(pthread_join(resizeLowThread, &ret) == 0) { if(ret == THREAD_FAILURE) { status = EXIT_FAILURE; } } } if(initMask & WRITELOWRATETHREAD) { if(pthread_join(writerLowThread, &ret) == 0) { if(ret == THREAD_FAILURE) { status = EXIT_FAILURE; } } } if(pthread_join(id_listen[PORT_ONE], &ret) == 0) { if(ret == THREAD_FAILURE) { status = EXIT_FAILURE; } } if(captureEnv.to_video_c) { Fifo_delete(captureEnv.to_video_c); } if(captureEnv.from_video_c) { Fifo_delete(captureEnv.from_video_c); } if(captureEnv.to_resize_c) { Fifo_delete(captureEnv.to_resize_c); } if(captureEnv.from_resize_c) { Fifo_delete(captureEnv.from_resize_c); } if(writerEnv.to_video_c) { Fifo_delete(writerEnv.to_video_c); } if(writerEnv.from_video_c) { Fifo_delete(writerEnv.from_video_c); } if(writerLowRateEnv.from_writelow_c) { Fifo_delete(writerLowRateEnv.from_video_c); } if(writerLowRateEnv.to_writelow_c) { Fifo_delete(writerLowRateEnv.to_writelow_c); } if(LowRateResize.to_videoresize_c) { Fifo_delete(LowRateResize.to_videoresize_c); } if(LowRateResize.from_videoresize_c) { Fifo_delete(LowRateResize.from_videoresize_c); } DEBUG(DL_DEBUG, "EXIT Rendezvous cleanup pThread!!!\n"); if(hRendezvousCleanup) { Rendezvous_delete(hRendezvousCleanup); } DEBUG(DL_DEBUG, "EXIT Rendezvous init pThread!!!\n"); if(hRendezvousInit) { Rendezvous_delete(hRendezvousInit); } DEBUG(DL_DEBUG, "EXIT Rendezvous cleanup pThread!!!\n"); /* if (initMask & LOGSINITIALIZED) { TraceUtil_stop(); } */ DEBUG(DL_DEBUG, "EXIT TraceUtil_stop !!!\n"); pthread_mutex_destroy(&gbl.mutex); DEBUG(DL_DEBUG, "process EXIT!!!\n"); exit(1); }