Exemplo n.º 1
0
void Xtion::update()
{
    int changedIndex = -1;
    openni::OpenNI::waitForAnyStream(&(streams[0]), (int)streams.size(), &changedIndex);
    switch (changedIndex)
    {
        case 0:
            updateColorImage();
        //    break;
        //case 1:
            updateDepthImage();
            break;
        default:
            break;
    }
}
Exemplo n.º 2
0
int TrackingWindow::qt_metacall(QMetaObject::Call _c, int _id, void **_a)
{
    _id = QLabel::qt_metacall(_c, _id, _a);
    if (_id < 0)
        return _id;
    if (_c == QMetaObject::InvokeMetaMethod) {
        switch (_id) {
        case 0: updateDepthImage((*reinterpret_cast< unsigned char*(*)>(_a[1]))); break;
        case 1: updateMaskImage((*reinterpret_cast< unsigned char*(*)>(_a[1]))); break;
        case 2: addPoint((*reinterpret_cast< uint(*)>(_a[1])),(*reinterpret_cast< uint(*)>(_a[2])),(*reinterpret_cast< JointID(*)>(_a[3]))); break;
        default: ;
        }
        _id -= 3;
    }
    return _id;
}
int Tracking::qt_metacall(QMetaObject::Call _c, int _id, void **_a)
{
    _id = QObject::qt_metacall(_c, _id, _a);
    if (_id < 0)
        return _id;
    if (_c == QMetaObject::InvokeMetaMethod) {
        switch (_id) {
        case 0: updateDepthImage((*reinterpret_cast< unsigned char*(*)>(_a[1]))); break;
        case 1: updateMaskImage((*reinterpret_cast< unsigned char*(*)>(_a[1]))); break;
        case 2: addPoint((*reinterpret_cast< uint(*)>(_a[1])),(*reinterpret_cast< uint(*)>(_a[2])),(*reinterpret_cast< JointID(*)>(_a[3]))); break;
        case 3: shutdown(); break;
        case 4: updateFrame(); break;
        case 5: recordOrStop((*reinterpret_cast< bool(*)>(_a[1]))); break;
        default: ;
        }
        _id -= 6;
    }
    return _id;
}
Exemplo n.º 4
0
void ofxRGBDCaptureGui::update(ofEventArgs& args){
    
	if(!providerSet || !depthImageProvider->deviceFound()){
		return;
	}
	
	//JG conv -- recordContext.update();
	depthImageProvider->update();
	if(depthImageProvider->isFrameNew()){
        if(currentTab == TabRecord){
	        updateDepthImage(depthImageProvider->getRawDepth());
        }
		else if(currentTab == TabCalibrate){
			calibrationPreview.setTestImage( depthImageProvider->getRawIRImage() );
		}
		
		if(recorder.isRecording()){
			//JG conv -- recorder.addImage( (unsigned short*)recordDepth.getRawDepthPixels());
			recorder.addImage(depthImageProvider->getRawDepth());
		}
	}
}
Exemplo n.º 5
0
    void updateFrame()
    {
        // フレームを取得する
        pxcStatus sts = senseManager->AcquireFrame( true );
        if ( sts < PXC_STATUS_NO_ERROR ) {
            return;
        }

        // 画像を初期化
        handImage = cv::Mat::zeros( DEPTH_HEIGHT, DEPTH_WIDTH, CV_8UC4 );

        // フレームデータを取得する
        const PXCCapture::Sample *sample = senseManager->QuerySample();
        if ( sample ) {
            // 各データを表示する
            updateDepthImage( sample->depth );
        }

        // 手の更新
        updateHandFrame();

        // フレームを解放する
        senseManager->ReleaseFrame();
    }
Exemplo n.º 6
0
void PCSDKImage::paint(QPainter *painter)
{
    updateDepthImage();
    OpenCVImage::paint(painter);
}
Exemplo n.º 7
0
void ofxRGBDCaptureGui::draw(ofEventArgs& args){
    
	if(fullscreenPoints && currentTab == TabPlayback){
		drawPointcloud(depthSequence.currentDepthRaw, true);
		return;
	}
    
    bool drawCamera = providerSet && depthImageProvider->deviceFound();    
	if(currentTab == TabCalibrate){
        if(!drawCamera){
            ofPushStyle();
            ofSetColor(255, 0, 0);
            ofDrawBitmapString("Camera not found. Plug and unplug the device and restart the application.", previewRect.x + 30, previewRect.y + 30);
            ofPopStyle();
        }
        else{
            depthImageProvider->getRawIRImage().draw(previewRect);
            calibrationPreview.draw(0, btnheight*2);
            alignment.drawDepthImages();            
        }
	}
	else if(currentTab == TabRecord){

        if(!drawCamera){
            ofPushStyle();
            ofSetColor(255, 0, 0);
            ofDrawBitmapString("Camera not found. Plug and unplug the device and restart the application.", previewRect.x + 30, previewRect.y + 30);
            ofPopStyle();
        }
        else{
            if( currentRenderMode == RenderPointCloud){
                drawPointcloud(depthImageProvider->getRawDepth(), false);
            }
            else{
                ofPushStyle();
                ofSetColor(255, 255, 255, 60);
                ofLine(320, btnheight*2, 320, btnheight*2+480);
                ofLine(0, btnheight*2+240, 640, btnheight*2+240);
                ofPopStyle();
                //depthImageProvider->getDepthImage().draw(previewRect);        
                depthImage.draw(previewRect);
            }
        }
	}
	else if(currentTab == TabPlayback) {
        if(currentRenderMode == RenderPointCloud){
            drawPointcloud(depthSequence.currentDepthRaw, false);            
        }
        else {
            updateDepthImage(depthSequence.currentDepthRaw);
            depthImage.draw(previewRect);
        }
    
		//draw timeline
		timeline.draw();
	}
    
    if(currentTabObject != NULL){
        ofPushStyle();
        ofRectangle highlightRect = ofRectangle(currentTabObject->x,currentTabObject->y+currentTabObject->height*.75,
                                                currentTabObject->width,currentTabObject->height*.25);
        ofSetColor(timeline.getColors().highlightColor);
        ofRect(highlightRect);        
        ofPopStyle();    	
    }

    if(currentRenderModeObject != NULL){
        ofPushStyle();
        ofRectangle highlightRect = ofRectangle(currentRenderModeObject->x,currentRenderModeObject->y+currentRenderModeObject->height*.75,
                                                currentRenderModeObject->width,currentRenderModeObject->height*.25);
        ofSetColor(timeline.getColors().highlightColor);
        ofRect(highlightRect);        
        ofPopStyle();    	    
    }
    
    for(int i = 0; i < btnTakes.size(); i++){
    	if(btnTakes[i].isSelected){
        	ofPushStyle();
            ofSetColor(timeline.getColors().highlightColor);
            ofRectangle highlighRect(btnTakes[i].button->x,btnTakes[i].button->y,
                                     btnTakes[i].button->width, btnTakes[i].button->height*.25);
                                     
            ofRect(highlighRect);
            ofPopStyle();
        }
    	
        ofPushStyle();
        ofSetColor(timeline.getColors().disabledColor);
        float percentComplete = float(btnTakes[i].takeRef->framesConverted) / float(btnTakes[i].takeRef->numFrames);
        float processedWidth = btnTakes[i].button->width*percentComplete;
        ofRectangle highlighRect(btnTakes[i].button->x + processedWidth,
                                 btnTakes[i].button->y,
                                 btnTakes[i].button->width-processedWidth, btnTakes[i].button->height);
        ofRect(highlighRect);
        ofPopStyle();
    }
    
	//draw save meter if buffer is getting full
	if(recorder.isRecording()){
		ofPushStyle();
		ofSetColor(255, 0, 0);
		ofNoFill();
		ofSetLineWidth(5);
		
		ofRect(previewRect);
		ofPopStyle();
	}
	
	if(recorder.numFramesWaitingSave() > 0){
		ofPushStyle();
		float width = recorder.numFramesWaitingSave()/2000.0 * btnRecordBtn->width;
		ofFill();
		ofSetColor(255,0, 0);
		ofRect(btnRecordBtn->x,btnRecordBtn->y,width,btnRecordBtn->height);
		
		if(ofGetFrameNum() % 30 < 15){
			ofSetColor(255, 0, 0, 40);
			ofRect(*btnRecordBtn);
		}
		ofPopStyle();
	}
}