Example #1
0
tResult cXtionCamera::Init(tInitStage eStage, __exception )
{
    RETURN_IF_FAILED(cFilter::Init(eStage, __exception_ptr));
    if (eStage == StageFirst)
    {
        //create the output pins of the filter
        RETURN_IF_FAILED(m_oRGBVideo.Create("Video_RGB", adtf::IPin::PD_Output, static_cast<IPinEventSink*> (this)));
        RETURN_IF_FAILED(RegisterPin(&m_oRGBVideo));

        RETURN_IF_FAILED(m_oDepthImage.Create("Depth_Image", adtf::IPin::PD_Output, static_cast<IPinEventSink*> (this)));
        RETURN_IF_FAILED(RegisterPin(&m_oDepthImage));
    }
    else if (eStage == StageNormal)
    {
        //load the configuration file for the settings of xtion camera    
        tResult nResult;
        nResult = LoadConfiguration(); 
        if (IS_FAILED(nResult))
            THROW_ERROR_DESC(nResult,"Failed to load the configuration file for the xtion");          
        
        //set the videoformat of the rgb video pin
        m_sBitmapFormat.nWidth = m_nWidthVideo;
        m_sBitmapFormat.nHeight = m_nHeightVideo;
        m_sBitmapFormat.nBitsPerPixel = 24;
        m_sBitmapFormat.nPixelFormat = cImage::PF_RGB_888;
        m_sBitmapFormat.nBytesPerLine = m_nWidthVideo * 3;
        m_sBitmapFormat.nSize = m_sBitmapFormat.nBytesPerLine * m_nHeightVideo;
        m_sBitmapFormat.nPaletteSize = 0;
        m_oRGBVideo.SetFormat(&m_sBitmapFormat, NULL);
        
        //set the videoformat of the depth video pin
        m_sBitmapFormatDepth.nWidth = m_nWidthDepth;
        m_sBitmapFormatDepth.nHeight = m_nHeightDepth;
        m_sBitmapFormatDepth.nBitsPerPixel = 16;
        m_sBitmapFormatDepth.nPixelFormat = cImage::PF_GREYSCALE_16;
        m_sBitmapFormatDepth.nBytesPerLine = m_nWidthDepth * 2;
        m_sBitmapFormatDepth.nSize = m_sBitmapFormatDepth.nBytesPerLine * m_nHeightDepth;
        m_sBitmapFormatDepth.nPaletteSize = 0;
        m_oDepthImage.SetFormat(&m_sBitmapFormatDepth, NULL);    
    }
    else if (eStage == StageGraphReady)    
    {        
        //check which streams has to be enabled in th xtion
        tBool videoEnabled = m_oRGBVideo.IsConnected();        
        tBool depthEnabled = m_oDepthImage.IsConnected();

        // if no output pin is connected the depth image is enabled because one stream must be set to tTrue;
        if (videoEnabled==tFalse && depthEnabled==tFalse) depthEnabled = tTrue;
            
        // initialize the grabber with the videoformats, which streams has the be enabled, the framerates and some more options
        if (!(m_xtionGrabber.initialize(m_nWidthDepth,m_nHeightDepth,m_FPSDepth,m_nWidthVideo,m_nHeightVideo,m_FPSVideo,depthEnabled,videoEnabled,m_setDepthInMillimeter,m_setRegistration,m_setDepthColorSync,m_setAutoExposure,m_setAutoWhiteBalance)))
            THROW_ERROR_DESC(ERR_DEVICE_NOT_READY,"Failed to initialize Xtion!")
        else
            LOG_INFO(cString::Format("Xtion Capture Device: Grabber is now initialized. Depth mode: %d Video mode: %d",depthEnabled,videoEnabled));
        // creates the thread for the reading of data from the xtion
        tResult nResult;
        nResult = m_oWorker.Create(cKernelThread::TF_Suspended, static_cast<adtf::IKernelThreadFunc*>(this));
        if (IS_FAILED(nResult))
            THROW_ERROR_DESC(nResult,"Failed to create thread for xtion");
        }
tResult ROI::Init(tInitStage eStage, __exception)
{
    RETURN_IF_FAILED(cFilter::Init(eStage, __exception_ptr));

    if (eStage == StageFirst)
    {
        RETURN_IF_FAILED(InitDescriptions(__exception_ptr));
        RETURN_IF_FAILED(CreateInputPins(__exception_ptr));
        RETURN_IF_FAILED(CreateOutputPins(__exception_ptr));
    }
    else if (eStage == StageNormal)
    {
        hoodScanLineNumber = GetPropertyInt(HOOD_SCANLINE_NUMBER_PROPERTY);
        roomScanLineNumber = GetPropertyInt(ROOM_SCANLINE_NUMBER_PROPERTY);
        maxHoodDetectionCount = GetPropertyInt(MAX_HOOD_DETECTION_COUNT_PROPERTY);
        rgbVideoManipulation = GetPropertyInt(RGB_VIDEO_MANIPULATION_PROPERTY);
        depthVideoManipulation = GetPropertyInt(DEPTH_VIDEO_MANIPULATION_PROPERTY);
        isHoodDetectionEnabled = GetPropertyBool(DETECT_HOOD_PROPERTY);
        isRoomDetectionEnabled = GetPropertyBool(DETECT_ROOM_PROPERTY);
        roomHeightManipulation = GetPropertyFloat(ROOM_HEIGHT_MANIPULATION_PROPERTY);

        logger.Log(cString::Format("roomHeightManipulation: %d", roomHeightManipulation).GetPtr(), false);
        logger.Log(cString::Format("hoodScanLineNumber: %d", hoodScanLineNumber).GetPtr(), false);
        logger.Log(cString::Format("roomScanLineNumber: %d", roomScanLineNumber).GetPtr(), false);
        logger.Log(cString::Format("processingWidthPercentage: %d", processingWidthPercentage).GetPtr(), false);
        logger.Log(cString::Format("maxHoodDetectionCount: %d", maxHoodDetectionCount).GetPtr(), false);
        logger.Log(cString::Format("rgbVideoManipulation: %d", rgbVideoManipulation).GetPtr(), false);
        logger.Log(cString::Format("depthVideoManipulation: %d", depthVideoManipulation).GetPtr(), false);
        logger.Log(cString::Format("isHoodDetectionEnabled: %d", isHoodDetectionEnabled).GetPtr(), false);
        logger.Log(cString::Format("isRoomDetectionEnabled: %d", isRoomDetectionEnabled).GetPtr(), false);
    }
    else if (eStage == StageGraphReady)
    {
        // init RGB Video
        cObjectPtr<IMediaType> rgbMediaType;
        RETURN_IF_FAILED(rgbVideoInputPin.GetMediaType(&rgbMediaType));

        cObjectPtr<IMediaTypeVideo> rgbVideoType;
        RETURN_IF_FAILED(rgbMediaType->GetInterface(IID_ADTF_MEDIA_TYPE_VIDEO, (tVoid **) &rgbVideoType));

        rgbVideoInputFormat = *(rgbVideoType->GetFormat());
        rgbVideoOutputFormat = *(rgbVideoType->GetFormat());
        rgbVideoOutputPin.SetFormat(&rgbVideoOutputFormat, NULL);

        // init Depth Video
        cObjectPtr<IMediaType> depthMediaType;
        RETURN_IF_FAILED(depthVideoInputPin.GetMediaType(&depthMediaType));

        cObjectPtr<IMediaTypeVideo> depthVideoType;
        RETURN_IF_FAILED(depthMediaType->GetInterface(IID_ADTF_MEDIA_TYPE_VIDEO, (tVoid **) &depthVideoType));

        depthVideoInputFormat = *(depthVideoType->GetFormat());
        depthVideoOutputFormat = *(depthVideoType->GetFormat());
        depthVideoOutputPin.SetFormat(&depthVideoOutputFormat, NULL);

        logger.Log(cString::Format("RGB Input format: %d x %d @ %d Bit", rgbVideoInputFormat.nWidth, rgbVideoInputFormat.nHeight,
                                   rgbVideoInputFormat.nBitsPerPixel).GetPtr(), false);
        logger.Log(cString::Format("RGB Output format: %d x %d @ %d Bit", rgbVideoOutputFormat.nWidth, rgbVideoOutputFormat.nHeight,
                                   rgbVideoOutputFormat.nBitsPerPixel).GetPtr(), false);

        logger.Log(cString::Format("Depth Input format: %d x %d @ %d Bit", depthVideoInputFormat.nWidth, depthVideoInputFormat.nHeight,
                                   depthVideoInputFormat.nBitsPerPixel).GetPtr(), false);
        logger.Log(cString::Format("Depth Output format: %d x %d @ %d Bit", depthVideoOutputFormat.nWidth, depthVideoOutputFormat.nHeight,
                                   depthVideoOutputFormat.nBitsPerPixel).GetPtr(), false);

        if (depthVideoOutputFormat.nBitsPerPixel != 8)
        {
            THROW_ERROR_DESC(depthVideoOutputFormat.nBitsPerPixel, "Wrong depth video format. Use HTWK_Grayscale in front of this filter.");
        }

        // init processing parameters
        processingData.processingWidth = depthVideoInputFormat.nWidth * (processingWidthPercentage / 100.0);
        processingData.startOffset = (depthVideoInputFormat.nWidth - processingData.processingWidth) / 2;
        processingData.hoodScanLineStepWidth = processingData.processingWidth / (hoodScanLineNumber - 1);
        processingData.roomScanLineStepWidth = processingData.processingWidth / (roomScanLineNumber - 1);

        logger.Log(cString::Format("hoodScanLineNumber: %d", hoodScanLineNumber).GetPtr(), false);
        logger.Log(cString::Format("processingWidthPercentage: %d", processingWidthPercentage).GetPtr(), false);
        logger.Log(cString::Format("processingWidth: %d", processingData.processingWidth).GetPtr(), false);
        logger.Log(cString::Format("startOffset: %d", processingData.startOffset).GetPtr(), false);
        logger.Log(cString::Format("hoodScanLineStepWidth: %d", processingData.hoodScanLineStepWidth).GetPtr(), false);
        logger.Log(cString::Format("roomScanLineStepWidth: %d", processingData.roomScanLineStepWidth).GetPtr(), false);
    }

    RETURN_NOERROR;
}