Пример #1
0
void *thread_func_aew(void *arg)
{
        struct env_aew          *env = arg;
        void                    *status = THREAD_SUCCESS;
        struct image_buffer_description *image = NULL;

        log_enter();
        log_dbg("meet\n");
        Rendezvous_meet(env->rendezvous_init);

        while (!gblGetQuit()) {
                usleep(10000);

                if (fifo_get(env->fifo_aew, &image) == FIFO_FAILURE) {
                        breakLoop(THREAD_FAILURE);
                }

                if (image == NULL) { //阻塞FIFO,只有FIFO中有消息才会执行下去
                        breakLoop(THREAD_SUCCESS);
                }
  
                capture_buffer_put(image);
        }

        Rendezvous_force(env->rendezvous_init);
        Rendezvous_meet(env->rendezvous_deinit);
        log("exit aew thread\n");
        log_exit();

        return status;
}
Пример #2
0
/*Thread LCD*/
void LCDModuleTask(void *param)
{
	PRINTF("get pid= %d\n", getpid());

	//signal_set();
	while(!gblGetQuit()) {  /*LCD Module*/
		sleep(3);
		LCDShowInfo(gLcdFD);
	}
}
Пример #3
0
int ListenSeries(void *pParam)
{
	unsigned char data[256];
	char szData[256];
	int len, i, cnt, sendsocket, nRet;
	fd_set rfds;
	PRINTF("get pid= %d\n", getpid());

	while(!gblGetQuit()) {

#ifdef DSS_ENC_1100_1200_DEBUG
		sleep(1);
		continue;
#endif
		FD_ZERO(&rfds);
		FD_SET(gRemoteFD, &rfds);
		select(gRemoteFD + 1, &rfds, NULL, NULL, NULL);
		len = read(gRemoteFD, data, 256) ; //成功返回数据量大小,失败返回-1
		PackHeaderMSG((BYTE *)szData, MSG_TRACKAUTO, HEAD_LEN + 1);
		szData[HEAD_LEN] = data[3];

		if(data[0] == 0x08 && data[1] == 0x09  && data[2] == 0x08) {
			for(i = 0; i < len; i++) {
				//printf("ReadDataFroCom:%x\n",data[i]);
			}

			for(cnt = 0; cnt < MAX_CLIENT; cnt++) {
				if(ISUSED(0, cnt) && ISLOGIN(0, cnt)) {
					pthread_mutex_lock(&gSetP_m.audio_video_m);
					sendsocket = GETSOCK(0, cnt);

					if(sendsocket > 0)  {
						nRet = WriteData(sendsocket, szData, HEAD_LEN + 1);

						if(nRet < 0) {
							SETCLIUSED(0, cnt, FALSE);
							SETCLILOGIN(0, cnt, FALSE);
							//SETFARCTRLLOCK(index, cnt, 0);
							writeWatchDog();
							DEBUG(DL_ERROR, "track Send to socket Error: SOCK = %d count = %d  errno = %d  ret = %d\n", sendsocket, cnt, errno, nRet);
						}
					}

					pthread_mutex_unlock(&gSetP_m.audio_video_m);
				}
			}

		}
	}

	return len;
}
Пример #4
0
void* audioEncodeThrFxn(void *arg)
{
	ThreadEnv* envp = arg;
	//printf("In %s\n", __func__);	
	
	Rendezvous_meet(&envp->rendezvousInit);	
	printf("[audio-encode] start\n");

	while ( !gblGetQuit() )
	{
		usleep(10000);
	}
	printf("[audio-encode] exit\n");
	OSA_thrExit(NULL);
}
Пример #5
0
/******************************************************************************
 * captureThrFxn
 ******************************************************************************/
Void *captureThrFxn(Void *arg)
{
    CaptureEnv          *envp           = (CaptureEnv *) arg;
    Void                *status         = THREAD_SUCCESS;
    Capture_Attrs        cAttrs         = Capture_Attrs_DM365_DEFAULT;
    BufferGfx_Attrs      gfxAttrs       = BufferGfx_Attrs_DEFAULT;
    Capture_Handle       hCapture       = NULL;
    BufTab_Handle        hBufTab        = NULL;
    BufferGfx_Dimensions dim;
    Buffer_Handle        hDstBuf, hCapBuf;
    Int32                width, height, bufSize;
    Int                  fifoRet;
    ColorSpace_Type      colorSpace = ColorSpace_YUV420PSEMI;

    /* Create capture device driver instance */
    cAttrs.numBufs = NUM_CAPTURE_BUFS;
    cAttrs.videoInput = envp->videoInput;    
    cAttrs.videoStd   = envp->videoStd;
    cAttrs.colorSpace = colorSpace;

    if (VideoStd_getResolution(envp->videoStd, &width, &height) < 0) {
        ERR("Failed to calculate resolution of video standard\n");
        cleanup(THREAD_FAILURE);
    }

    if (envp->imageWidth > 0 && envp->imageHeight > 0) {
        if (width < envp->imageWidth && height < envp->imageHeight) {
            ERR("User resolution (%ldx%ld) larger than detected (%ldx%ld)\n",
                envp->imageWidth, envp->imageHeight, width, height);
            cleanup(THREAD_FAILURE);
        }

       /*
        * Capture driver provides 32-byte aligned data. We 32-byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        envp->imageWidth = Dmai_roundUp(envp->imageWidth, 32);
    }
    else {
        /* Resolution was not set on command line. Set to defaults. */
        envp->imageHeight = height;

       /*
        * Capture driver provides 32-byte aligned data. We 32-byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        envp->imageWidth  = Dmai_roundUp(width, 32); 
    }

    Dmai_clear(dim);
    dim.width      = envp->imageWidth;
    dim.height     = envp->imageHeight;

    dim.lineLength = Dmai_roundUp(dim.width, 32);
    if (colorSpace ==  ColorSpace_YUV420PSEMI) {
        bufSize = dim.lineLength * dim.height * 3 / 2;
    } else {
        bufSize = dim.lineLength * dim.height * 2;
    }
    gfxAttrs.dim = dim;

    /* Report the video standard and image size back to the main thread */
    Rendezvous_meet(envp->hRendezvousCapStd);

    gfxAttrs.colorSpace = colorSpace;
    hBufTab = BufTab_create(NUM_CAPTURE_BUFS, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    if ((envp->videoStd == VideoStd_720P_60) && (!envp->passThrough)) {
        cAttrs.videoStd = VideoStd_720P_30;
    }
    else {
        cAttrs.videoStd = envp->videoStd;    
    }
    cAttrs.colorSpace = colorSpace;
    cAttrs.captureDimension = &dim;
    cAttrs.numBufs = NUM_CAPTURE_BUFS;
    hCapture = Capture_create(hBufTab, &cAttrs);

    if (hCapture == NULL) {
        ERR("Failed to create capture device\n");
        cleanup(THREAD_FAILURE);
    }

    /* Get a buffer from the video thread */
    fifoRet = Fifo_get(envp->hInFifo, &hDstBuf);

    if (fifoRet < 0) {
        ERR("Failed to get buffer from video thread\n");
        cleanup(THREAD_FAILURE);
    }

    if (fifoRet == Dmai_EFLUSH) {
        cleanup(THREAD_SUCCESS);
    }

    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    while (!gblGetQuit()) {
        
        /* Get a buffer from the capture driver to encode */
        if (Capture_get(hCapture, &hCapBuf) < 0) {
            ERR("Failed to get capture buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Send captured buffer to video thread for encoding */
        if (Fifo_put(envp->hOutFifo, hCapBuf) < 0) {
            ERR("Failed to send buffer to video thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Get a buffer from the video thread */
        fifoRet = Fifo_get(envp->hInFifo, &hDstBuf);

        if (fifoRet < 0) {
            ERR("Failed to get buffer from video thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Did the video thread flush the fifo? */
        if (fifoRet == Dmai_EFLUSH) {
            cleanup(THREAD_SUCCESS);
        }

        /* Return a buffer to the capture driver */
        if (Capture_put(hCapture, hDstBuf) < 0) {
            ERR("Failed to put capture buffer\n");
            cleanup(THREAD_FAILURE);
        }
    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousCapStd);
    Rendezvous_force(envp->hRendezvousInit);
    Pause_off(envp->hPauseProcess);
    Fifo_flush(envp->hOutFifo);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    if (hCapture) {
        Capture_delete(hCapture);
    }

    /* Clean up the thread before exiting */
    if (hBufTab) {
        BufTab_delete(hBufTab);
    }

    return status;
}
Пример #6
0
/******************************************************************************
 * displayThrFxn
 ******************************************************************************/
Void *displayThrFxn(Void *arg)
{
    DisplayEnv             *envp       = (DisplayEnv *) arg;
    Display_Attrs           dAttrs     = Display_Attrs_DM365_VID_DEFAULT;
    Display_Handle          hDisplay   = NULL;
    Framecopy_Handle        hFc        = NULL;
    Void                   *status     = THREAD_SUCCESS;
    Uns                     frameCnt   = 0;
    BufferGfx_Dimensions    srcDim;
    Buffer_Handle           hSrcBuf, hDstBuf;
    Int                     fifoRet;
    ColorSpace_Type         colorSpace = ColorSpace_YUV420PSEMI;
    BufferGfx_Attrs         gfxAttrs = BufferGfx_Attrs_DEFAULT;
    BufTab_Handle           hBufTab  = NULL;
    Int32                   bufSize;
    Time_Attrs              tAttrs   = Time_Attrs_DEFAULT;
    Time_Handle             hTime    = NULL;
    Int32                   time, waitTime;
    Int                     bufCnt = 1;

    hTime = Time_create(&tAttrs);

    if (hTime == NULL) {
        ERR("Failed to create Time object\n");
        cleanup(THREAD_FAILURE);
    }

    if(Time_reset(hTime) != Dmai_EOK) {
        ERR("Failed to reset timer\n");
        cleanup(THREAD_FAILURE);
    }

    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    while (!gblGetQuit()) {
        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Pause for priming? */
        Pause_test(envp->hPausePrime);

        /* Get decoded video frame */
        fifoRet = Fifo_get(envp->hInFifo, &hSrcBuf);

        if (fifoRet < 0) {
            ERR("Failed to get buffer from video thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Did the video thread flush the fifo? */
        if (fifoRet == Dmai_EFLUSH) {
            cleanup(THREAD_SUCCESS);
        }
        
        BufferGfx_getDimensions(hSrcBuf, &srcDim);

        /* Prime the display driver with the first NUM_DISPLAY_BUFS buffers */
        if (bufCnt <= NUM_DISPLAY_BUFS) { 
            if (bufCnt == 1) {  // Create the Display at the first frame
                gfxAttrs.dim.width = srcDim.width;
                gfxAttrs.dim.height = srcDim.height;
                gfxAttrs.dim.lineLength = srcDim.lineLength;
                gfxAttrs.dim.x = srcDim.x;
                gfxAttrs.dim.y = srcDim.y;
                if (colorSpace ==  ColorSpace_YUV420PSEMI) {
                    bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 
                        3 / 2;
                } else {
                    bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2;
                }

                /* Create a table of buffers to use with the device drivers */
                gfxAttrs.colorSpace = colorSpace;
                hBufTab = BufTab_create(NUM_DISPLAY_BUFS, bufSize,
                    BufferGfx_getBufferAttrs(&gfxAttrs));
                if (hBufTab == NULL) {
                    ERR("Failed to create buftab\n");
                    cleanup(THREAD_FAILURE);
                }
	
                /* Create the display device instance */
                dAttrs.delayStreamon = TRUE;
                dAttrs.numBufs = NUM_DISPLAY_BUFS;
                dAttrs.videoStd = envp->videoStd;
                /* 
                 * Round down the width to a multiple of 32 as required by 
                 * display driver. Otherwise, the driver would internally round
                 * up the width, resulting in the codec padding showing up
                 * on the display when the image width is not a multiple of 32.
                 */
                dAttrs.width = ((gfxAttrs.dim.width & 0x1f) ?
                    (gfxAttrs.dim.width & ~(0x1f)) : gfxAttrs.dim.width);
                dAttrs.height = gfxAttrs.dim.height;
                dAttrs.videoOutput = envp->displayOutput;
                dAttrs.colorSpace  = colorSpace;
                hDisplay = Display_create(hBufTab, &dAttrs);

                if (hDisplay == NULL) {
                    ERR("Failed to create display device\n");
                    cleanup(THREAD_FAILURE);
                }
            }

            bufCnt++;
        }
        else {
            /* Get a buffer from the display device driver */
            if (Display_get(hDisplay, &hDstBuf) < 0) {
                ERR("Failed to get display buffer\n");
                cleanup(THREAD_FAILURE);
            }

            /* Send buffer back to the video thread */
            if (Fifo_put(envp->hOutFifo, hDstBuf) < 0) {
                ERR("Failed to send buffer to video thread\n");
                cleanup(THREAD_FAILURE);
            }
        }

        if (envp->videoStd == VideoStd_720P_60) {
            if (Time_delta(hTime, (UInt32*)&time) < 0) {
                ERR("Failed to get timer delta\n");
                cleanup(THREAD_FAILURE);
            }
            waitTime = DISPLAYLOOPLATENCY - time;
            if(waitTime > 0) {
                usleep(waitTime);
            }
            if(Time_reset(hTime) != Dmai_EOK) {
                ERR("Failed to reset timer\n");
                cleanup(THREAD_FAILURE);
            }
        }

        /* Incremement statistics for the user interface */
        gblIncFrames();           

        /* Give a filled buffer back to the display device driver */
        if (Display_put(hDisplay, hSrcBuf) < 0) {
            ERR("Failed to put display buffer\n");
            cleanup(THREAD_FAILURE);
        }

        frameCnt++;
    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousInit);
    Pause_off(envp->hPauseProcess);
    Pause_off(envp->hPausePrime);
    Fifo_flush(envp->hOutFifo);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    /* Clean up the thread before exiting */
    if (hFc) {
        Framecopy_delete(hFc);
    }

    if (hDisplay) {
        Display_delete(hDisplay);
    }

    /* Clean up the thread before exiting */
    if (hBufTab) {
        BufTab_delete(hBufTab);
    }

    if(hTime) {
        Time_delete(hTime);
    }

    return status;
}
Пример #7
0
/******************************************************************************
 * speechThrFxn
 ******************************************************************************/
Void *speechThrFxn(Void *arg)
{
    SpeechEnv              *envp                = (SpeechEnv *) arg;
    SPHDEC1_Params          defaultParams       = Sdec1_Params_DEFAULT;
    SPHDEC1_DynamicParams   defaultDynParams    = Sdec1_DynamicParams_DEFAULT;
    Void                   *status              = THREAD_SUCCESS;
    Sound_Attrs             sAttrs              = Sound_Attrs_MONO_DEFAULT;
    Loader_Attrs            lAttrs              = Loader_Attrs_DEFAULT;
    Buffer_Attrs            bAttrs              = Buffer_Attrs_DEFAULT;
    Sdec1_Handle            hSd1                = NULL;
    Sound_Handle            hSound              = NULL;
    Loader_Handle           hLoader             = NULL;
    Engine_Handle           hEngine             = NULL;
    Buffer_Handle           hOutBuf             = NULL;
    SPHDEC1_Params         *params;
    SPHDEC1_DynamicParams  *dynParams;
    Buffer_Handle           hInBuf;

    /* Open the codec engine */
    hEngine = Engine_open(envp->engineName, NULL, NULL);

    if (hEngine == NULL) {
        ERR("Failed to open codec engine %s\n", envp->engineName);
        cleanup(THREAD_FAILURE);
    }

    /* Create the sound device */
    sAttrs.sampleRate = 8000;
    sAttrs.mode = Sound_Mode_OUTPUT;
    sAttrs.leftGain  = 127;
    sAttrs.rightGain = 127;
    sAttrs.bufSize   = 128;
    hSound = Sound_create(&sAttrs);

    if (hSound == NULL) {
        ERR("Failed to create audio device\n");
        cleanup(THREAD_FAILURE);
    }

    /* Set the sample rate for the user interface */
    gblSetSamplingFrequency(sAttrs.sampleRate);

    /* Use supplied params if any, otherwise use defaults */
    params = envp->params ? envp->params : &defaultParams;
    dynParams = envp->dynParams ? envp->dynParams : &defaultDynParams;

    /* Create the speech decoder */
    hSd1 = Sdec1_create(hEngine, envp->speechDecoder, params, dynParams);

    if (hSd1 == NULL) {
        ERR("Failed to create speech decoder: %s\n", envp->speechDecoder);
        cleanup(THREAD_FAILURE);
    }

    /*
     * Make the output buffer size twice the size of what the codec needs
     * as the codec needs mono and the Sound module converts the decoded
     * mono samples to stereo before writing to the device driver.
     */
    hOutBuf = Buffer_create(OUTBUFSIZE, &bAttrs);

    if (hOutBuf == NULL) {
        ERR("Failed to allocate output buffer\n");
        cleanup(THREAD_FAILURE);
    }

    /* How much encoded data to feed the codec each process call */
    lAttrs.readSize = INBUFSIZE;

    /* Make the total ring buffer larger */
    lAttrs.readBufSize = lAttrs.readSize * 512;

    /* Create the file loader for reading encoded data */
    hLoader = Loader_create(envp->speechFile, &lAttrs);

    if (hLoader == NULL) {
        ERR("Failed to create loader\n");
        cleanup(THREAD_FAILURE);
    }

    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    /* Prime the file loader */
    Loader_prime(hLoader, &hInBuf);

    while (!gblGetQuit()) {
        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Decode the audio buffer */
        if (Sdec1_process(hSd1, hInBuf, hOutBuf) < 0) {
            ERR("Failed to decode audio buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Increment statistics for user interface */
        gblIncSoundBytesProcessed(Buffer_getNumBytesUsed(hInBuf));

        /*
         * Force the output buffer size since we are forcing the size of the
         * output buffer allocated as opposed to asking the codec for a size.
         */
        Buffer_setNumBytesUsed(hOutBuf, OUTBUFSIZE);

        /* Write the decoded samples to the sound device */
        if (Sound_write(hSound, hOutBuf) < 0) {
            ERR("Failed to write audio buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Load a new frame from the file system */
        if (Loader_getFrame(hLoader, hInBuf) < 0) {
            ERR("Failed to read a frame of encoded data\n");
            cleanup(THREAD_FAILURE);
        }

        /* Check if the clip has ended */
        if (Buffer_getUserPtr(hInBuf) == NULL) {
            /* Wait for the video clip to finish, if applicable */
            Rendezvous_meet(envp->hRendezvousLoop);

            /* If we are to loop the clip, start over */
            if (envp->loop) {
                /* Recreate the speech codec */
                Sdec1_delete(hSd1);
                hSd1 = Sdec1_create(hEngine, envp->speechDecoder,
                                    params, dynParams);

                if (hSd1 == NULL) {
                    ERR("Failed to create speech decoder: %s\n",
                        envp->speechDecoder);
                    cleanup(THREAD_FAILURE);
                }

                /* Re-prime the file loader */
                Loader_prime(hLoader, &hInBuf);
            }
            else {
                printf("End of clip reached, exiting..\n");
                cleanup(THREAD_SUCCESS);
            }
        }
    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousInit);
    Rendezvous_force(envp->hRendezvousLoop);
    Pause_off(envp->hPauseProcess);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    /* Clean up the thread before exiting */
    if (hLoader) {
        Loader_delete(hLoader);
    }

    if (hSd1) {
        Sdec1_delete(hSd1);
    }

    if (hSound) {
        Sound_delete(hSound);
    }

    if (hOutBuf) {
        Buffer_delete(hOutBuf);
    }

    if (hEngine) {
        Engine_close(hEngine);
    }

    return status;
}
Пример #8
0
/******************************************************************************
 * captureThrFxn
 ******************************************************************************/
Void *captureThrFxn(Void *arg)
{
    CaptureEnv           *envp     = (CaptureEnv *) arg;
    Void                 *status   = THREAD_SUCCESS;
    Capture_Attrs         cAttrs   = Capture_Attrs_DM365_DEFAULT;
    Display_Attrs         dAttrs   = Display_Attrs_DM365_VID_DEFAULT;
    BufferGfx_Attrs       gfxAttrs = BufferGfx_Attrs_DEFAULT;    
    Capture_Handle        hCapture = NULL;
    Display_Handle        hDisplay = NULL;
    BufTab_Handle         hBufTab  = NULL;
    BufTab_Handle         hDispBufTab = NULL;
    BufTab_Handle         hFifoBufTab = NULL;
    Buffer_Handle         hDstBuf, hCapBuf, hDisBuf, hBuf;
    BufferGfx_Dimensions  capDim;
    VideoStd_Type         videoStd;
    Int32                 width, height, bufSize;
    Int                   fifoRet;
    ColorSpace_Type       colorSpace = ColorSpace_YUV420PSEMI;
    Int                   bufIdx;
    Int                   numCapBufs;

    /* Create capture device driver instance */
    cAttrs.numBufs = NUM_CAPTURE_BUFS;
    cAttrs.videoInput = envp->videoInput;
    cAttrs.videoStd = envp->videoStd;
    cAttrs.colorSpace = colorSpace;

    videoStd = envp->videoStd;

    /* We only support D1, 720P and 1080P input */
    if (videoStd != VideoStd_D1_NTSC && videoStd != VideoStd_D1_PAL 
        && videoStd != VideoStd_720P_60 && videoStd != VideoStd_720P_50 &&
        videoStd != VideoStd_1080I_30) {
        ERR("Need D1/720P/1080P input to this demo\n");
        cleanup(THREAD_FAILURE);
    }
    if (envp->imageWidth > 0 && envp->imageHeight > 0) {
        if (VideoStd_getResolution(videoStd, &width, &height) < 0) {
            ERR("Failed to calculate resolution of video standard\n");
            cleanup(THREAD_FAILURE);
        }

        if (width < envp->imageWidth && height < envp->imageHeight) {
            ERR("User resolution (%ldx%ld) larger than detected (%ldx%ld)\n",
                envp->imageWidth, envp->imageHeight, width, height);
            cleanup(THREAD_FAILURE);
        }

       /*
        * Capture driver provides 32 byte aligned data. We 32 byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        envp->imageWidth  = Dmai_roundUp(envp->imageWidth,32);
        capDim.x          = 0;
        capDim.y          = 0;
        capDim.height     = envp->imageHeight;
        capDim.width      = envp->imageWidth;
        capDim.lineLength = BufferGfx_calcLineLength(capDim.width, colorSpace);
    } 
    else {
        /* Calculate the dimensions of a video standard given a color space */
        if (BufferGfx_calcDimensions(videoStd, colorSpace, &capDim) < 0) {
            ERR("Failed to calculate Buffer dimensions\n");
            cleanup(THREAD_FAILURE);
        }

       /*
        * Capture driver provides 32 byte aligned data. We 32 byte align the
        * capture and video buffers to perform zero copy encoding.
        */
        capDim.width      = Dmai_roundUp(capDim.width,32);
        envp->imageWidth  = capDim.width;
        envp->imageHeight = capDim.height;
    }

    numCapBufs = NUM_CAPTURE_BUFS;

    gfxAttrs.dim.height = capDim.height;
    gfxAttrs.dim.width = capDim.width;
    gfxAttrs.dim.lineLength = 
        Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width,
                     colorSpace), 32);
    gfxAttrs.dim.x = 0;
    gfxAttrs.dim.y = 0;
    if (colorSpace ==  ColorSpace_YUV420PSEMI) {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2;
    } 
    else {
        bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2;
    }

    /* Create a table of buffers to use with the capture driver */
    gfxAttrs.colorSpace = colorSpace;
    hBufTab = BufTab_create(numCapBufs, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    /* Create a table of buffers to use to prime Fifo to video thread */
    hFifoBufTab = BufTab_create(VIDEO_PIPE_SIZE, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hFifoBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    /* Update global data for user interface */
    gblSetImageWidth(envp->imageWidth);
    gblSetImageHeight(envp->imageHeight);

    /* Report the video standard and image size back to the main thread */
    Rendezvous_meet(envp->hRendezvousCapStd);

    /* Capture at half frame rate if using COMPONENT input at 720P */
    if ((envp->videoStd == VideoStd_720P_60) 
        && (envp->videoInput == Capture_Input_COMPONENT)) {
        cAttrs.videoStd = VideoStd_720P_30;
    }
    else {
        cAttrs.videoStd = envp->videoStd;    
    }

    /*If its component input and video std is 1080I_30 then make it 1080I_60.*/
    if (cAttrs.videoStd == VideoStd_1080I_30 && cAttrs.videoInput 
                        == Capture_Input_COMPONENT) {
        cAttrs.videoStd = VideoStd_1080I_60;
    }

    cAttrs.numBufs    = NUM_CAPTURE_BUFS;    
    cAttrs.colorSpace = colorSpace;
    cAttrs.captureDimension = &gfxAttrs.dim;
    /* Create the capture device driver instance */
    hCapture = Capture_create(hBufTab, &cAttrs);

    if (hCapture == NULL) {
        ERR("Failed to create capture device. Is video input connected?\n");
        cleanup(THREAD_FAILURE);
    }

    /* Create a table of buffers to use with the display driver */
    hDispBufTab = BufTab_create(NUM_DISPLAY_BUFS, bufSize,
                            BufferGfx_getBufferAttrs(&gfxAttrs));
    if (hDispBufTab == NULL) {
        ERR("Failed to create buftab\n");
        cleanup(THREAD_FAILURE);
    }

    /* Create display device driver instance if preview is needed */
    if (!envp->previewDisabled) {
        dAttrs.videoStd = envp->videoStd;
        if ( (dAttrs.videoStd == VideoStd_CIF) ||
            (dAttrs.videoStd == VideoStd_SIF_NTSC) ||
            (dAttrs.videoStd == VideoStd_SIF_PAL) ||
            (dAttrs.videoStd == VideoStd_VGA) ||
            (dAttrs.videoStd == VideoStd_D1_NTSC) ||        
            (dAttrs.videoStd == VideoStd_D1_PAL) ) {
            dAttrs.videoOutput = Display_Output_COMPOSITE;
        } else {
            dAttrs.videoOutput = Display_Output_COMPONENT;
        }    
        dAttrs.numBufs    = NUM_DISPLAY_BUFS;
        dAttrs.colorSpace = colorSpace;
        dAttrs.width = capDim.width;
        dAttrs.height = capDim.height;
        hDisplay = Display_create(hDispBufTab, &dAttrs);

        if (hDisplay == NULL) {
            ERR("Failed to create display device\n");
            cleanup(THREAD_FAILURE);
        }
    }

    for (bufIdx = 0; bufIdx < VIDEO_PIPE_SIZE; bufIdx++) {
        /* Queue the video buffers for main thread processing */
        hBuf = BufTab_getFreeBuf(hFifoBufTab);
        if (hBuf == NULL) {
            ERR("Failed to fill video pipeline\n");
            cleanup(THREAD_FAILURE);            
        }

       /* Fill with black the buffer */
       CapBuf_blackFill(hBuf);

        /* Send buffer to video thread for encoding */
        if (Fifo_put(envp->hOutFifo, hBuf) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }
    }
    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    while (!gblGetQuit()) {
        /* Pause processing? */
        Pause_test(envp->hPauseProcess);

        /* Capture a frame */
        if (Capture_get(hCapture, &hCapBuf) < 0) {
            ERR("Failed to get capture buffer\n");
            cleanup(THREAD_FAILURE);
        }

        /* Get a buffer from the display device */
        if ((!envp->previewDisabled) && (Display_get(hDisplay, &hDisBuf) < 0)) {
            ERR("Failed to get display buffer\n");
            cleanup(THREAD_FAILURE);
        }
        /* Send buffer to video thread for encoding */
        if (Fifo_put(envp->hOutFifo, hCapBuf) < 0) {
            ERR("Failed to send buffer to display thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Get a buffer from the video thread */
        fifoRet = Fifo_get(envp->hInFifo, &hDstBuf);

        if (fifoRet < 0) {
            ERR("Failed to get buffer from video thread\n");
            cleanup(THREAD_FAILURE);
        }

        /* Did the video thread flush the fifo? */
        if (fifoRet == Dmai_EFLUSH) {
            cleanup(THREAD_SUCCESS);
        }

        if (!envp->previewDisabled) {
            /* Release buffer to the display device driver */
            if (Display_put(hDisplay, hDstBuf) < 0) {
                ERR("Failed to put display buffer\n");
                cleanup(THREAD_FAILURE);
            }
        }

        if (envp->previewDisabled) {
            /* Return the processed buffer to the capture driver */
            if (Capture_put(hCapture, hDstBuf) < 0) {
                ERR("Failed to put capture buffer\n");
                cleanup(THREAD_FAILURE);
            } 
        }
        else {
            /* Return the displayed buffer to the capture driver */
            if (Capture_put(hCapture, hDisBuf) < 0) {
                ERR("Failed to put capture buffer\n");
                cleanup(THREAD_FAILURE);
            }
        }

        /* Increment statistics for the user interface */
        gblIncFrames();

    }

cleanup:
    /* Make sure the other threads aren't waiting for us */
    Rendezvous_force(envp->hRendezvousCapStd);
    Rendezvous_force(envp->hRendezvousInit);
    Pause_off(envp->hPauseProcess);
    Fifo_flush(envp->hOutFifo);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    if (hDisplay) {
        Display_delete(hDisplay);
    }

    if (hCapture) {
        Capture_delete(hCapture);
    }
    
    /* Clean up the thread before exiting */
    if (hBufTab) {
        BufTab_delete(hBufTab);
    }
    if (hFifoBufTab) {
        BufTab_delete(hFifoBufTab);
    }
    if (hDispBufTab) {
        BufTab_delete(hDispBufTab);
    }

    return status;
}
Пример #9
0
/*WEB server Thead Function*/
Void *weblistenThrFxn(Void *arg)
{
	PRINTF("get pid= %d\n", getpid());
	Void                   *status              = THREAD_SUCCESS;
	int 					listenSocket  		= 0 , flags = 0;
	struct sockaddr_in 		addr;
	//struct in_addr          addr_ip;
	int len, client_sock, opt = 1;
	struct sockaddr_in client_addr;
	webMsgInfo_1260		webinfo_1260;
	ssize_t			recvlen;
	//int count = 0;
	char  data_1260[2048] = {0};
	// signal_set();


	//	for(count = TS_STREAM; count < MAX_PROTOCOL; count++) {
	//		WebInitMultAddr(&s_MultAddr[count]);
	//	}

	len = sizeof(struct sockaddr_in);
	memset(&client_addr, 0, len);
	listenSocket =	socket(PF_INET, SOCK_STREAM, 0);

	if(listenSocket < 1)	{
		status  = THREAD_FAILURE;
		return status;
	}

	memset(&addr, 0, sizeof(struct sockaddr_in));
	addr.sin_family =       AF_INET;
	addr.sin_addr.s_addr = inet_addr("127.0.0.1");
	addr.sin_port = htons(ENCODESERVER_PORT);

	setsockopt(listenSocket, SOL_SOCKET, SO_REUSEADDR, &opt, sizeof(opt));

	if(bind(listenSocket, (struct sockaddr *)&addr, sizeof(struct sockaddr_in)) != 0)  	{
		DEBUG(DL_ERROR, "[weblistenThrFxn] bind failed,errno = %d,error message:%s \n", errno, strerror(errno));
		status  = THREAD_FAILURE;
		return status;
	}


	if(-1 == listen(listenSocket, MAX_LISTEN))     {
		DEBUG(DL_ERROR, "listen failed,errno = %d,error message:%s \n", errno, strerror(errno));
		status  = THREAD_FAILURE;
		return status;
	}

	if((flags = fcntl(listenSocket, F_GETFL, 0)) == -1)	{
		DEBUG(DL_ERROR, "fcntl F_GETFL error:%d,error msg: = %s\n", errno, strerror(errno));
		gblSetQuit();
		status  = THREAD_FAILURE;
		return status ;
	}

	if(fcntl(listenSocket, F_SETFL, flags | O_NONBLOCK) == -1) {
		DEBUG(DL_ERROR, "fcntl F_SETFL error:%d,error msg: = %s\n", errno, strerror(errno));
		gblSetQuit();
		status  = THREAD_FAILURE;
		return status ;
	}

	//ProtocolInit();
	//InitProtocalStatus();





	while(!gblGetQuit()) {
		fd_set rfds;
		FD_ZERO(&rfds);
		FD_SET(listenSocket, &rfds);
		//接收recv_buf 复位为空!
		select(FD_SETSIZE, &rfds , NULL , NULL , NULL);
		//PRINTF("\n");
		client_sock = accept(listenSocket, (struct sockaddr *)&client_addr, (socklen_t *)&len);

		if(0 > client_sock)     {
			PRINTF("\n");

			if(errno == ECONNABORTED || errno == EAGAIN) 	{
				//usleep(20000);
				continue;
			}

			DEBUG(DL_DEBUG, "weblisten thread Function errno  = %d\n", errno);
			status  = THREAD_FAILURE;
			return status;
		}

		memset(&webinfo_1260, 0, sizeof(webinfo_1260));
		recvlen = recv(client_sock, &webinfo_1260, sizeof(webinfo_1260), 0);
		//PRINTF("recvlen = %d webinfo.len =%d webinfo.type = %d,id=%x\n",
		//       recvlen, webinfo_1260.len, webinfo_1260.type, webinfo_1260.identifier);

		if(recvlen < 1)	{
			PRINTF("recv failed,errno = %d,error message:%s,client_sock = %d\n", errno, strerror(errno), client_sock);
			status  = THREAD_FAILURE;
			return status;
		}

		if(webinfo_1260.identifier != WEB_IDENTIFIER) {
			PRINTF("id  error,client_sock = %d\n", client_sock);
			status  = THREAD_FAILURE;
			return status;
		}

		len = webinfo_1260.len - sizeof(webinfo_1260);

		PRINTF("web deal begin =%d\n", webinfo_1260.type);

		switch(webinfo_1260.type) {
			case INT_TYPE:
				//PRINTF("web deal begin =%d\n", webinfo_1260.type);
				midParseInt(webinfo_1260.identifier,client_sock, data_1260, len);
				//PRINTF("web deal end =%d\n", webinfo_1260.type);
				break;

			case STRING_TYPE:
				//	PRINTF("web deal begin =%d\n", webinfo_1260.type);
				midParseString(webinfo_1260.identifier,client_sock, data_1260, len);
				//	PRINTF("web deal end =%d\n", webinfo_1260.type);
				break;

			case STRUCT_TYPE:
				//	PRINTF("web deal begin =%d\n", webinfo_1260.type);
				midParseStruct(webinfo_1260.identifier,client_sock, data_1260, len);
				//	PRINTF("web deal end =%d\n", webinfo_1260.type);
				break;

			default:
				break;
		}

		PRINTF("web deal end =%d\n", webinfo_1260.type);
		close(client_sock);
	}

	//ProtocolExit();

	close(listenSocket);
	DEBUG(DL_DEBUG, "Web listen Thread Function Exit!!\n");
	return status;
}