Exemple #1
0
/******************************************************************************
 * appMain
 ******************************************************************************/
Int appMain(Args * args)
{
    VIDENC1_Params         params    = Venc1_Params_DEFAULT;
    VIDENC1_DynamicParams  dynParams = Venc1_DynamicParams_DEFAULT;
    BufferGfx_Attrs        gfxAttrs  = BufferGfx_Attrs_DEFAULT;
    Buffer_Attrs           bAttrs    = Buffer_Attrs_DEFAULT;
    Time_Attrs             tAttrs    = Time_Attrs_DEFAULT;
    Venc1_Handle           hVe1      = NULL;
    FILE                  *outFile   = NULL;
    FILE                  *reconFile = NULL;
    FILE                  *inFile    = NULL;
    Engine_Handle          hEngine   = NULL;
    Time_Handle            hTime     = NULL;
    Bool                   flushed   = FALSE;
    Bool                   mustExit  = FALSE;
    BufTab_Handle          hBufTab   = NULL;
    Buffer_Handle          hOutBuf   = NULL;
    Buffer_Handle          hFreeBuf  = NULL;
    Buffer_Handle          hInBuf    = NULL;
    Buffer_Handle          hReconBuf = NULL;
    Int                    numFrame  = 0;
    Int                    flushCntr = 1;
    Int                    bufIdx;
    Int                    inBufSize, outBufSize;
    Cpu_Device             device;
    Int                    numBufs;
    ColorSpace_Type        colorSpace;
    UInt32                 time;
    Int                    ret = Dmai_EOK;

    printf("Starting application...\n");

    /* Initialize the codec engine run time */
    CERuntime_init();

    /* Initialize DMAI */
    Dmai_init();

    /* Determine which device the application is running on */
    if (Cpu_getDevice(NULL, &device) < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to determine target board\n");
        goto cleanup;
    }

    if (args->benchmark) {
        hTime = Time_create(&tAttrs);

        if (hTime == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to create Time object\n");
            goto cleanup;
        }
    }

    /* Open the input file with raw yuv data */
    inFile = fopen(args->inFile, "rb");

    if (inFile == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to open input file %s\n", args->inFile);
        goto cleanup;
    }

    /* Using a larger vbuf to enhance performance of file i/o */
    if (setvbuf(inFile, vbufferIn, _IOFBF, sizeof(vbufferIn)) != 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to setvbuf on input file descriptor\n");
        goto cleanup;
    }

    /* Open the output file where to put encoded data */
    outFile = fopen(args->outFile, "wb");

    if (outFile == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to open output file %s\n", args->outFile);
        goto cleanup;
    }

    /* Using a larger vbuf to enhance performance of file i/o */
    if (setvbuf(outFile, vbufferOut, _IOFBF, sizeof(vbufferOut)) != 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to setvbuf on output file descriptor\n");
        goto cleanup;
    }

    /* Open the output file where to put reconstructed frames */
    if (args->writeReconFrames) {
        reconFile = fopen(args->reconFile, "wb");

        if (reconFile == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to open output file %s\n", args->reconFile);
            goto cleanup;
        }

        /* Using a larger vbuf to enhance performance of file i/o */
        if (setvbuf(reconFile, vbufferRecon, _IOFBF,
                    sizeof(vbufferRecon)) != 0) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to setvbuf on output file descriptor\n");
            goto cleanup;
        }
    }

    /* Open the codec engine */
    hEngine = Engine_open(args->engineName, NULL, NULL);

    if (hEngine == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to open codec engine: %s\n", args->engineName);
        goto cleanup;
    }

    /* Set up codec parameters depending on bit rate */
    if (args->bitRate < 0) {
        /* Variable bit rate */
        params.rateControlPreset = IVIDEO_NONE;

        /*
         * If variable bit rate use a bogus bit rate value (> 0)
         * since it will be ignored.
         */
        params.maxBitRate        = 2000000;
    }
    else {
        /* Constant bit rate */
        params.rateControlPreset = IVIDEO_LOW_DELAY;
        params.maxBitRate        = args->bitRate;
    }

    /* Set up codec parameters depending on device */
    switch (device) {
    case Cpu_Device_DM6467:
        params.inputChromaFormat = XDM_YUV_420SP;
        params.reconChromaFormat = XDM_CHROMA_NA;
        break;
    case Cpu_Device_DM355:
        params.inputChromaFormat = XDM_YUV_422ILE;
        params.reconChromaFormat = XDM_YUV_420P;
        break;
    case Cpu_Device_DM365:
    case Cpu_Device_DM368:
        params.inputChromaFormat = XDM_YUV_420SP;
        params.reconChromaFormat = XDM_YUV_420SP;
        break;
    case Cpu_Device_DM3730:
        params.rateControlPreset = IVIDEO_STORAGE;
        params.inputChromaFormat = XDM_YUV_422ILE;
        break;
    default:
        params.inputChromaFormat = XDM_YUV_422ILE;
        break;
    }

    params.maxWidth              = args->width;
    params.maxHeight             = args->height;

    /* Workaround for SDOCM00068944: h264fhdvenc fails
       to create codec when params.dataEndianness is
       set as XDM_BYTE */
    if(device == Cpu_Device_DM6467) {
        if (!strcmp(args->codecName, "h264fhdvenc")) {
            params.dataEndianness        = XDM_LE_32;
        }
    }

    params.maxInterFrameInterval = 1;
    dynParams.targetBitRate      = params.maxBitRate;
    dynParams.inputWidth         = params.maxWidth;
    dynParams.inputHeight        = params.maxHeight;

    /* Create the video encoder */
    hVe1 = Venc1_create(hEngine, args->codecName, &params, &dynParams);

    if (hVe1 == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to create video encoder: %s\n", args->codecName);
        goto cleanup;
    }

    /* Ask the codec how much input data it needs */
    inBufSize = Venc1_getInBufSize(hVe1);

    /* Ask the codec how much space it needs for output data */
    outBufSize = Venc1_getOutBufSize(hVe1);

    /* Which color space to use in the graphics buffers depends on the device */
    colorSpace = ((device == Cpu_Device_DM6467)||
                  (device == Cpu_Device_DM365) ||
                  (device == Cpu_Device_DM368)) ? ColorSpace_YUV420PSEMI :
                 ColorSpace_UYVY;

    /* Align buffers to cache line boundary */
    gfxAttrs.bAttrs.memParams.align = bAttrs.memParams.align = BUFSIZEALIGN;

    /* Use cached buffers if requested */
    if (args->cache) {
        gfxAttrs.bAttrs.memParams.flags = bAttrs.memParams.flags
                                          = Memory_CACHED;
    }

    gfxAttrs.dim.width      = args->width;
    gfxAttrs.dim.height     = args->height;
    if ((device == Cpu_Device_DM6467) || (device == Cpu_Device_DM365)
            || (device == Cpu_Device_DM368)) {
        gfxAttrs.dim.height = Dmai_roundUp(gfxAttrs.dim.height, CODECHEIGHTALIGN);
    }
    gfxAttrs.dim.lineLength = BufferGfx_calcLineLength(args->width, colorSpace);
    gfxAttrs.colorSpace     = colorSpace;

    if (inBufSize < 0) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to calculate buffer attributes\n");
        goto cleanup;
    }

    /* Number of input buffers required */
    if(params.maxInterFrameInterval>1) {
        /* B frame support */
        numBufs = params.maxInterFrameInterval;
    }
    else {
        numBufs = 1;
    }

    /* Create a table of input buffers of the size requested by the codec */
    hBufTab =
        BufTab_create(numBufs, Dmai_roundUp(inBufSize, BUFSIZEALIGN),
                      BufferGfx_getBufferAttrs(&gfxAttrs));

    if (hBufTab == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to allocate contiguous buffers\n");
        goto cleanup;
    }

    /* Set input buffer table */
    Venc1_setBufTab(hVe1, hBufTab);

    /* Create the reconstructed frame buffer for raw yuv data */
    if (args->writeReconFrames) {
        hReconBuf =
            Buffer_create(Dmai_roundUp(inBufSize, BUFSIZEALIGN),
                          BufferGfx_getBufferAttrs(&gfxAttrs));

        if (hReconBuf == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to allocate contiguous buffer\n");
            goto cleanup;
        }
    }

    /* Create the output buffer for encoded video data */
    hOutBuf = Buffer_create(Dmai_roundUp(outBufSize, BUFSIZEALIGN), &bAttrs);

    if (hOutBuf == NULL) {
        ret = Dmai_EFAIL;
        fprintf(stderr,"Failed to create contiguous buffer\n");
        goto cleanup;
    }

    while (1) {

        /* Get a buffer for input */
        hInBuf = BufTab_getFreeBuf(hBufTab);

        if (hInBuf == NULL) {
            ret = Dmai_EFAIL;
            fprintf(stderr,"Failed to get a free contiguous buffer from BufTab\n");
            BufTab_print(hBufTab);
            goto cleanup;
        }

        if (args->benchmark) {
            if (Time_reset(hTime) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to reset timer\n");
                goto cleanup;
            }
        }

        /* Read a yuv input frame */
        printf("\n Frame %d: ", numFrame);
        if ((device == Cpu_Device_DM6467)||
                (device == Cpu_Device_DM365) ||
                (device == Cpu_Device_DM368)) {
            if(args->sp) {
                if (readFrame420SP(hInBuf, inFile, args->height) < 0) {
                    ret = Dmai_EFAIL;
                    goto cleanup;
                }
            } else {
                if (readFrame420P(hInBuf, inFile, args->height) < 0) {
                    ret = Dmai_EFAIL;
                    goto cleanup;
                }
            }
        }
        else {
            if (readFrameUYVY(hInBuf, inFile) < 0) {
                ret = Dmai_EFAIL;
                mustExit = TRUE;
            }
        }

        if (++numFrame == args->numFrames||mustExit == TRUE) {
            if(!(params.maxInterFrameInterval>1)) {
                /* No B-frame support */
                printf("... exiting \n");
                goto cleanup;
            }

            /*
             * When encoding a stream with B-frames, ending the processing
             * requires to free the buffer held by the encoder. This is done by
             * flushing the encoder and performing a last process() call
             * with a dummy input buffer.
             */
            printf("\n... exiting with flush (B-frame stream) \n");
            flushCntr = params.maxInterFrameInterval-1;
            flushed = TRUE;
            Venc1_flush(hVe1);
        }

        if (args->benchmark) {
            if (Time_delta(hTime, &time) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to get timer delta\n");
                goto cleanup;
            }

            printf("Read time: %uus\n", (Uns)time);
        }

        /*
         * Following flushing loop will iterate more than one time only
         * when the encoder completes processing by flushing the frames
         * held by the encoder. All flushed frames will be encoded as P
         * or I frames.
         */

        for(bufIdx = 0; bufIdx < flushCntr; bufIdx++) {

            if (args->cache) {
                /*
                *  To meet xDAIS DMA Rule 7, when input buffers are cached, we
                *  must writeback the cache into physical memory.  Also, per DMA
                *  Rule 7, we must invalidate the output buffer from
                *  cache before providing it to any xDAIS algorithm.
                */
                Memory_cacheWbInv(Buffer_getUserPtr(hInBuf),
                                  Buffer_getSize(hInBuf));

                /* Per DMA Rule 7, our output buffer cache lines must be cleaned */
                Memory_cacheInv(Buffer_getUserPtr(hOutBuf),
                                Buffer_getSize(hOutBuf));

                if (args->benchmark) {
                    if (Time_delta(hTime, &time) < 0) {
                        ret = Dmai_EFAIL;
                        fprintf(stderr,"Failed to get timer delta\n");
                        goto cleanup;
                    }

                    printf("Pre-process cache maintenance: %uus \n", (Uns) time);
                }
            }

            /* Make sure the whole buffer is used for input */
            BufferGfx_resetDimensions(hInBuf);

            /* Encode the video buffer */
            if (Venc1_process(hVe1, hInBuf, hOutBuf) < 0) {
                ret = Dmai_EFAIL;
                fprintf(stderr,"Failed to encode video buffer\n");
                goto cleanup;
            }

            /* if encoder generated output content, free released buffer */
            if (Buffer_getNumBytesUsed(hOutBuf)>0) {
                /* Get free buffer */
                hFreeBuf = Venc1_getFreeBuf(hVe1);
                /* Free buffer */
                BufTab_freeBuf(hFreeBuf);
            }
            /* if encoder did not generate output content */
            else {
                /* if non B frame sequence */
                /* encoder skipped frame probably exceeding target bitrate */
                if (params.maxInterFrameInterval<=1) {
                    /* free buffer */
                    printf(" Encoder generated 0 size frame\n");
                    BufTab_freeBuf(hInBuf);
                }
            }

            if (args->benchmark) {
                if (Time_delta(hTime, &time) < 0) {
                    ret = Dmai_EFAIL;
                    fprintf(stderr,"Failed to get encode time\n");
                    goto cleanup;
                }

                printf("[%d] Encode: %uus\n", numFrame, (Uns)time);
            }

            if (args->cache) {
                /* Writeback the outBuf. */
                Memory_cacheWb(Buffer_getUserPtr(hOutBuf),
                               Buffer_getSize(hOutBuf));

                if (args->benchmark) {
                    if (Time_delta(hTime, &time) < 0) {
                        ret = Dmai_EFAIL;
                        fprintf(stderr,"Failed to get timer delta\n");
                        goto cleanup;
                    }

                    printf("Post-process cache write back: %uus \n", (Uns) time);
                }
            }

            /* Write the encoded frame to the file system */
            if (Buffer_getNumBytesUsed(hOutBuf)) {
                if (fwrite(Buffer_getUserPtr(hOutBuf),
                           Buffer_getNumBytesUsed(hOutBuf), 1, outFile) != 1) {
                    ret = Dmai_EFAIL;
                    fprintf(stderr,"Failed to write encoded video data to file\n");
                    goto cleanup;
                }
            }

            /* Write the reconstructed frame to the file system */
            if (args->writeReconFrames) {
                processReconData(Venc1_getReconBufs(hVe1), hInBuf, hReconBuf);

                if (Buffer_getNumBytesUsed(hReconBuf)) {
                    if (fwrite(Buffer_getUserPtr(hReconBuf),
                               Buffer_getNumBytesUsed(hReconBuf), 1, reconFile) != 1) {
                        ret = Dmai_EFAIL;
                        fprintf(stderr,"Failed to write reconstructed frame to file\n");
                        goto cleanup;
                    }
                }
            }

            if (args->benchmark) {
                if (Time_delta(hTime, &time) < 0) {
                    ret = Dmai_EFAIL;
                    printf("Failed to get timer delta\n");
                    goto cleanup;
                }

                printf("File write time: %uus\n", (Uns)time);

                if (Time_total(hTime, &time) < 0) {
                    ret = Dmai_EFAIL;
                    fprintf(stderr,"Failed to get timer total\n");
                    goto cleanup;
                }

                printf("Total: %uus\n", (Uns)time);
            }
        }

        /* If the codec flushing completed, exit main thread */
        if (flushed) {
            /* Free dummy input buffer used for flushing process() calls */
            printf("freeing dummy input buffer ... \n");
            BufTab_freeBuf(hInBuf);
            break;
        }
    }

cleanup:
    /* Clean up the application */
    if (hOutBuf) {
        Buffer_delete(hOutBuf);
    }

    if (hReconBuf) {
        Buffer_delete(hReconBuf);
    }

    if (hVe1) {
        Venc1_delete(hVe1);
    }

    if (hBufTab) {
        BufTab_delete(hBufTab);
    }

    if (hEngine) {
        Engine_close(hEngine);
    }

    if (inFile) {
        fclose(inFile);
    }

    if (outFile) {
        fclose(outFile);
    }

    if (reconFile) {
        fclose(reconFile);
    }

    if (hTime) {
        Time_delete(hTime);
    }

    printf("End of application.\n");

    if (ret == Dmai_EFAIL)
        return 1;
    else
        return 0;
}
Exemple #2
0
Void *videoThrFxn(Void *arg)
{
	VideoEnv *envp = (VideoEnv *) arg;

	Venc1_Handle hVe1 = NULL;
	VIDENC1_Params params = Venc1_Params_DEFAULT;
	VIDENC1_DynamicParams dynParams = Venc1_DynamicParams_DEFAULT;
	IH264VENC_Params h264Params = IH264VENC_PARAMS;
	IH264VENC_DynamicParams h264DynParams = H264VENC_TI_IH264VENC_DYNAMICPARAMS;
	VUIParamBuffer VUI_Buffer = H264VENC_TI_VUIPARAMBUFFER;

	BufTab_Handle hVidBufTab = NULL;
	Buffer_Handle hVInBuf, hWOutBuf;
	BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT;

	ColorSpace_Type colorSpace = ColorSpace_YUV420PSEMI;

	Int bufSize = 0;

	Void *status = THREAD_SUCCESS;

	/* Initialization */
	params.maxWidth = envp->imageWidth;
	params.maxHeight = envp->imageHeight;
	params.inputChromaFormat = XDM_YUV_420SP;
	params.reconChromaFormat = XDM_YUV_420SP;
	params.maxFrameRate = envp->videoFrameRate;
	params.encodingPreset = XDM_USER_DEFINED;
	params.rateControlPreset = IVIDEO_USER_DEFINED;
	params.maxBitRate = 10000000;

	dynParams.targetBitRate = envp->videoBitRate*0.9;
	dynParams.inputWidth = envp->imageWidth;
	dynParams.captureWidth = Dmai_roundUp(BufferGfx_calcLineLength(envp->imageWidth, colorSpace), 32);
	dynParams.inputHeight = envp->imageHeight;
	dynParams.refFrameRate = params.maxFrameRate;
	dynParams.targetFrameRate = params.maxFrameRate;
	dynParams.intraFrameInterval = 0;
	dynParams.interFrameInterval = 0;

	h264Params.videncParams = params;
	h264Params.videncParams.size = sizeof(IH264VENC_Params);
	h264Params.encQuality = 1;
	h264Params.enableDDRbuff = 1; /* Uses DDR instead of VICP buffers */
	h264Params.enableARM926Tcm = 0;
	h264Params.enableVUIparams = (0x1 << 1);
	h264Params.videncParams.inputContentType = IVIDEO_PROGRESSIVE;

	h264DynParams.videncDynamicParams = dynParams;
	h264DynParams.videncDynamicParams.size = sizeof(IH264VENC_DynamicParams);

	h264DynParams.VUI_Buffer = &VUI_Buffer;
	h264DynParams.VUI_Buffer->aspectRatioInfoPresentFlag = 1;
	h264DynParams.VUI_Buffer->overscanInfoPresentFlag = 0;
	h264DynParams.VUI_Buffer->videoSignalTypePresentFlag = 0;
	h264DynParams.VUI_Buffer->timingInfoPresentFlag = 1;
	h264DynParams.VUI_Buffer->numUnitsInTicks = 1;
	h264DynParams.VUI_Buffer->timeScale = params.maxFrameRate / 1000;
	h264DynParams.VUI_Buffer->fixedFrameRateFlag = 1; 
	h264DynParams.VUI_Buffer->nalHrdParameterspresentFlag = 1;
	h264DynParams.VUI_Buffer->picStructPresentFlag = 1;

	h264DynParams.idrFrameInterval = 15;

	hVe1 = Venc1_create(envp->hEngine, envp->videoEncoder,
			(IVIDENC1_Params *) &h264Params,
			(IVIDENC1_DynamicParams *) &h264DynParams);
	if (hVe1 == NULL) {
		ERR("Failed to create video encoder: %s\n", envp->videoEncoder);
		cleanup(THREAD_FAILURE);
	}

	/* Store the output buffer size in the environment */
	envp->outBufSize = Venc1_getOutBufSize(hVe1);

	/* Signal that the codec is created and output buffer size available */
	Rendezvous_meet(envp->hRendezvousWriter);

	/* Video BufTab create */
	BufferGfx_calcDimensions(VideoStd_D1_PAL, colorSpace, &gfxAttrs.dim);
	gfxAttrs.dim.width = 704;
	gfxAttrs.dim.height = 576;
	gfxAttrs.dim.lineLength = Dmai_roundUp(BufferGfx_calcLineLength(gfxAttrs.dim.width, colorSpace), 32);
	gfxAttrs.colorSpace = colorSpace;
	bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2;
	hVidBufTab = BufTab_create(NUM_VIDEO_BUFS, bufSize, BufferGfx_getBufferAttrs(&gfxAttrs));
	if (hVidBufTab == NULL) {
		ERR("Failed to create video buftab\n");
		cleanup(THREAD_FAILURE);
	}

	/* Set input buffer table */
    Venc1_setBufTab(hVe1, hVidBufTab);

	/* Send video buffers to DEI */
	Int nBufId = 0;
	for (nBufId = 0; nBufId < NUM_VIDEO_BUFS; nBufId++) {
		hVInBuf = BufTab_getBuf(hVidBufTab, nBufId);
		if (Fifo_put(envp->hVideoOutFifo, hVInBuf) < 0) {
			ERR("Failed to send buffer to dei thread\n");
			cleanup(THREAD_FAILURE);
		}
	}

	/* Signal that initialization is done and wait for other threads */
	Rendezvous_meet(envp->hRendezvousInit);

	while(1) {

		/* Get buffer from DEI thread */
		if(Fifo_get(envp->hVideoInFifo, &hVInBuf) < 0) {
			ERR("Failed to get buffer from dei thread\n");
			cleanup(THREAD_FAILURE);
		}

		/* Get buffer from Writer thread */
		if(Fifo_get(envp->hWriterOutFifo, &hWOutBuf) < 0) {
			ERR("Failed to get buffer from writer thread\n");
			cleanup(THREAD_FAILURE);
		}

		/* Make sure the whole buffer is used for input */
		BufferGfx_resetDimensions(hVInBuf);

		/* Encode */
		if (Venc1_process(hVe1, hVInBuf, hWOutBuf) < 0) {
			ERR("Failed to encode video buffer\n");
			cleanup(THREAD_FAILURE);
		}

		/* Put buffer to dei thread */
		if (Fifo_put(envp->hVideoOutFifo, hVInBuf) < 0) {
			ERR("Failed to send buffer to dei thread\n");
			cleanup(THREAD_FAILURE);
		}

		/* Put buffer to writer thread */
		if (Fifo_put(envp->hWriterInFifo, hWOutBuf) < 0) {
			ERR("Failed to send buffer to dei thread\n");
			cleanup(THREAD_FAILURE);
		}

	}

cleanup:

	/* Make sure the other threads aren't waiting for us */
	Rendezvous_force(envp->hRendezvousInit);
	Rendezvous_force(envp->hRendezvousWriter);

	/* Make sure the other threads aren't waiting for init to complete */
	Rendezvous_meet(envp->hRendezvousCleanup);

	if (hVidBufTab) {
		BufTab_delete(hVidBufTab);
	}

	if (hVe1) {
		Venc1_delete(hVe1);
	}

	return status;
}