示例#1
0
void CUIDesignerView::OnInitialUpdate()
{
	__super::OnInitialUpdate();

	// TODO: 在此添加专用代码和/或调用基类
	CUIDesignerDoc* pDoc=GetDocument();

	m_LayoutManager.Init(this->GetSafeHwnd(),pDoc->GetPathName());
	CFormUI* pForm=m_LayoutManager.GetForm();

	g_pClassView->InsertUITreeItem(pForm,pDoc->GetTitle());
	if(pForm->GetRowCount()>0)
	{
		InitUI(pForm->GetItemAt(0), 1);
		m_LayoutManager.GetManager()->InitControls(pForm->GetItemAt(0));
	}
	g_pClassView->SelectUITreeItem(pForm);
	m_bInit=true;

	m_MultiTracker.SetHandleSize(TRACKER_HANDLE_SIZE);
	m_MultiTracker.SetStyle(dottedLine|resizeOutside);
	m_MultiTracker.SetFormSize(pForm->GetInitSize());
	m_MultiTracker.Add(CreateTracker(pForm));

	SetScrollSizes(MM_TEXT,CSize(FORM_INIT_WIDTH+80,FORM_INIT_HEIGHT+80));
}
示例#2
0
void CUIDesignerView::RedoUI(CControlUI* pControl, CControlUI* pParent)
{
	ASSERT(pControl && pParent);
	if(!pControl || !pParent)
		return;

	CContainerUI* pContainer = static_cast<CContainerUI*>(pParent->GetInterface(_T("Container")));
	ExtendedAttributes* pExtended = (ExtendedAttributes*)pContainer->GetTag();
	pContainer->Add(pControl);
	m_MultiTracker.Add(CreateTracker(pControl));
	InitUI(pControl, pExtended->nDepth + 1, TRUE);
	pContainer->SetPos(pContainer->GetPos());
}
示例#3
0
文件: main.cpp 项目: Barbakas/windage
void main()
{
	// connect camera
	capture = cvCaptureFromCAM(CV_CAP_ANY);
	if(!capture)
	{
		std::cout << "can not connect any camera" << std::endl;
		exit(0);
	}

	cvNamedWindow("tracking information window");

	resizeImage = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 3);
	grayImage = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 1);
	resultImage = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 3);

	logging = new windage::Logger(&std::cout);
	logging->updateTickCount();

	// create tracker
	tracker = CreateTracker();

#if USE_TEMPLATE_IMAEG
	IplImage* sampleImage = cvLoadImage(TEMPLATE_IMAGE, 0);

	double threahold = tracker->GetDetector()->GetThreshold();
	tracker->GetDetector()->SetThreshold(30.0);
	tracker->AttatchReferenceImage(sampleImage);
	tracker->TrainingReference(SCALE_FACTOR, SCALE_STEP);
	tracker->GetDetector()->SetThreshold(threahold);
#endif

	// initialize rendering engine using GLUT
	renderer = new OpenGLRenderer();
	renderer->Initialize(RENDERING_WIDTH, RENDERING_HEIGHT, "windage Camera Tracjectory");
	renderer->SetCameraSize(WIDTH, HEIGHT);
	
	glutDisplayFunc(display);
	glutIdleFunc(idle);
	glutKeyboardFunc(keyboard);
	glutMouseFunc(mouseClick);
	glutMotionFunc(mouseMove);

	glutMainLoop();

	cvReleaseCapture(&capture);
	cvDestroyAllWindows();
}
示例#4
0
void CUIDesignerView::SelectUI(CControlUI* pControl)
{
	if(pControl==NULL||pControl==m_MultiTracker.GetFocused())
		return;

	CControlUI* pParent=pControl;
	CControlUI* pForm=pControl;
	while(pParent=pParent->GetParent())
		pForm=pParent;
	if(pForm!=m_LayoutManager.GetForm())
		return;

	m_MultiTracker.RemoveAll();
	if(pControl->IsVisible())
		m_MultiTracker.Add(CreateTracker(pControl));
	g_pPropertiesWnd->ShowProperty(pControl);

	this->Invalidate(FALSE);
}
示例#5
0
void CUIDesignerView::PasteUI(LPCTSTR xml)
{
	CDialogBuilder builder;
	CControlUI* pRoot=builder.Create(xml, (UINT)0, NULL, m_LayoutManager.GetManager());
	if(pRoot)
	{
		CControlUI* pParent = m_MultiTracker.GetFocused();
		if(pParent->GetInterface(_T("Container")) == NULL)
			pParent = pParent->GetParent();
		if(pParent == NULL)
			pParent = m_LayoutManager.GetForm();

		m_MultiTracker.RemoveAll();
		CContainerUI* pContainer = static_cast<CContainerUI*>(pParent->GetInterface(_T("Container")));
		CContainerUI* pRootContainer = static_cast<CContainerUI*>(pRoot->GetInterface(_T("Container")));
		ExtendedAttributes* pExtended = (ExtendedAttributes*)pContainer->GetTag();
		for(int i=0; i<pRootContainer->GetCount(); i++)
		{
			CControlUI* pControl = pRootContainer->GetItemAt(i);
			if(pControl->IsFloat())
			{
				SIZE sz = pControl->GetFixedXY();
				sz.cx += COPY_OFFSET_XY;
				sz.cy += COPY_OFFSET_XY;
				pControl->SetFixedXY(sz);
			}
			pContainer->Add(pControl);
			m_MultiTracker.Add(CreateTracker(pControl));
			InitUI(pControl, pExtended->nDepth + 1);
		}
		CArray<CControlUI*,CControlUI*> arrSelected;
		m_MultiTracker.GetSelected(arrSelected);
		m_UICommandHistory.Begin(arrSelected, actionAdd);
		m_UICommandHistory.End();

		pContainer->SetPos(pContainer->GetPos());

		pRootContainer->SetAutoDestroy(false);
		delete pRootContainer;
		this->GetDocument()->SetModifiedFlag();
	}
}
示例#6
0
文件: main.cpp 项目: Barbakas/windage
void main()
{
	windage::Logger* log = new windage::Logger(&std::cout);

	char message[100];
	IplImage* inputImage1 = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 3);
	IplImage* undistImage1 = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 3);
	IplImage* grayImage1 = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 1);
	IplImage* inputImage2 = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 3);
	IplImage* undistImage2 = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 3);
	IplImage* grayImage2 = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 1);

	// Multipel tracker Initialize
	IplImage* trainingImage = cvLoadImage("reference1_320.png", 0);
	IplImage* referenceImage = cvLoadImage("reference1.png");
	
	windage::ModifiedSURFTracker* tracker1 = CreateTracker(trainingImage, 0);
	windage::ModifiedSURFTracker* tracker2 = CreateTracker(trainingImage, 5);

	// for undistortion and calculate coordinate
	windage::Calibration* calibration = new windage::Calibration();
	calibration->Initialize(intrinsicValues[0], intrinsicValues[1], intrinsicValues[2], intrinsicValues[3], intrinsicValues[4], intrinsicValues[5], intrinsicValues[6], intrinsicValues[7]);
	calibration->InitUndistortionMap(WIDTH, HEIGHT);

	windage::Matrix3 rotation12;
	windage::Matrix3 rotation21;
	windage::Vector3 translation12;
	windage::Vector3 translation21;	

	// adaptive threshold
	int fastThreshold = 30;
	const int MAX_FAST_THRESHOLD = 80;
	const int MIN_FAST_THRESHOLD = 40;
	const int ADAPTIVE_THRESHOLD_VALUE = 500;
	const int THRESHOLD_STEP = 1;

	bool updating = true;
	
	bool processing = true;
	cvNamedWindow("result1");
	cvNamedWindow("result2");
	while(processing)
	{
		// camera frame grabbing and convert to gray color
		log->updateTickCount();

		// load image -> undistImage
		undistImage1 = cvLoadImage("picture 8.jpg");
		undistImage2 = cvLoadImage("picture 9.jpg");

		calibration->Undistortion(undistImage1, inputImage1);
		calibration->Undistortion(undistImage2, inputImage2);
		cvCvtColor(inputImage1, grayImage1, CV_BGRA2GRAY);
		cvCvtColor(inputImage2, grayImage2, CV_BGRA2GRAY);
		log->log("capture", log->calculateProcessTime());

		// call tracking algorithm
		log->updateTickCount();
		tracker1->SetFeatureExtractThreshold(fastThreshold);
		tracker2->SetFeatureExtractThreshold(fastThreshold);
		tracker1->UpdateCameraPose(grayImage1);
		tracker2->UpdateCameraPose(grayImage2);

		double trackingTime = log->calculateProcessTime();
		log->log("tracking", trackingTime);
		log->logNewLine();

		// update fast threshold for Adaptive threshold
		int featureCount1 = tracker1->GetFeatureCount();
		int featureCount2 = tracker2->GetFeatureCount();
#ifdef ADAPTIVE_THRESHOLD
		int featureCount = (featureCount1 + featureCount2) / 2.0;
		if(featureCount > ADAPTIVE_THRESHOLD_VALUE )	fastThreshold = MIN(MAX_FAST_THRESHOLD, fastThreshold+THRESHOLD_STEP);
		else											fastThreshold = MAX(MIN_FAST_THRESHOLD, fastThreshold-THRESHOLD_STEP);
#endif

		// draw tracking result
		int matchedCount1 = tracker1->GetMatchedCount();
		if(matchedCount1 > FIND_FEATURE_COUNT)
		{
			tracker1->DrawOutLine(inputImage1, true);
			tracker1->DrawInfomation(inputImage1, 100.0);
			tracker1->DrawDebugInfo(inputImage1);
		}
		int matchedCount2 = tracker2->GetMatchedCount();
		if(matchedCount2 > FIND_FEATURE_COUNT)
		{
			tracker2->DrawOutLine(inputImage2, true);
			tracker2->DrawInfomation(inputImage2, 100.0);
			tracker2->DrawDebugInfo(inputImage2);
		}

		std::cout << std::endl;
		CvScalar pos1 = tracker1->GetCameraParameter()->GetCameraPosition();
		std::cout << pos1.val[0] << ", " << pos1.val[1] << ", " << pos1.val[2] << std::endl;
		CvScalar pos2 = tracker2->GetCameraParameter()->GetCameraPosition();
		std::cout << pos2.val[0] << ", " << pos2.val[1] << ", " << pos2.val[2] << std::endl;
		std::cout << std::endl;
		

/*
		// update
		if(matchedCount1 > FIND_FEATURE_COUNT && matchedCount2 > FIND_FEATURE_COUNT && updating)
		{
			
			rotation12 = windage::MultiCameraCoordinate::GetRotation(tracker1->GetCameraParameter(), tracker2->GetCameraParameter());
			translation12 = windage::MultiCameraCoordinate::GetTranslation(tracker1->GetCameraParameter(), tracker2->GetCameraParameter());
			rotation21 = windage::MultiCameraCoordinate::GetRotation(tracker2->GetCameraParameter(), tracker1->GetCameraParameter());
			translation21 = windage::MultiCameraCoordinate::GetTranslation(tracker2->GetCameraParameter(), tracker1->GetCameraParameter());
			updating = false;
		}

		double tempValue;
		tempValue = cvGetReal2D(tracker1->GetCameraParameter()->GetExtrinsicMatrix(), 1, 3);
		cvSetReal2D(tracker1->GetCameraParameter()->GetExtrinsicMatrix(), 1, 3, tempValue+50);
		tempValue = cvGetReal2D(tracker2->GetCameraParameter()->GetExtrinsicMatrix(), 1, 3);
		cvSetReal2D(tracker2->GetCameraParameter()->GetExtrinsicMatrix(), 1, 3, tempValue+50);

		if(matchedCount1 > FIND_FEATURE_COUNT)
		{
			windage::Matrix4 extrinsic = windage::MultiCameraCoordinate::CalculateExtrinsic(tracker1->GetCameraParameter(), rotation12.Transpose(), translation12);
			calibration->SetExtrinsicMatrix(extrinsic.m1);

			windage::Vector3 temp = windage::Vector3(0.0, 0.0, 0.0);
			CvPoint center = calibration->ConvertWorld2Image(temp.x, temp.y, temp.z);
			CvPoint centerX = calibration->ConvertWorld2Image(temp.x + 50.0, temp.y, temp.z);
			CvPoint centerY = calibration->ConvertWorld2Image(temp.x, temp.y + 50.0, temp.z);
			CvPoint centerZ = calibration->ConvertWorld2Image(temp.x, temp.y, temp.z + 50.0);

			// draw outline
			CvScalar color = CV_RGB(255, 255, 0);
			cvLine(inputImage2, calibration->ConvertWorld2Image(-REAL_WIDTH/4.0, -REAL_HEIGHT/4.0, 0.0), 
								calibration->ConvertWorld2Image(+REAL_WIDTH/4.0, -REAL_HEIGHT/4.0, 0.0), color, 5);
			cvLine(inputImage2, calibration->ConvertWorld2Image(+REAL_WIDTH/4.0, -REAL_HEIGHT/4.0, 0.0), 
								calibration->ConvertWorld2Image(+REAL_WIDTH/4.0, +REAL_HEIGHT/4.0, 0.0), color, 5);
			cvLine(inputImage2, calibration->ConvertWorld2Image(+REAL_WIDTH/4.0, +REAL_HEIGHT/4.0, 0.0), 
								calibration->ConvertWorld2Image(-REAL_WIDTH/4.0, +REAL_HEIGHT/4.0, 0.0), color, 5);
			cvLine(inputImage2, calibration->ConvertWorld2Image(-REAL_WIDTH/4.0, +REAL_HEIGHT/4.0, 0.0), 
								calibration->ConvertWorld2Image(-REAL_WIDTH/4.0, -REAL_HEIGHT/4.0, 0.0), color, 5);

			cvLine(inputImage2, center, centerX, CV_RGB(255, 0, 0), 10);
			cvLine(inputImage2, center, centerY, CV_RGB(0, 255, 0), 10);
			cvLine(inputImage2, center, centerZ, CV_RGB(0, 0, 255), 10);
		}
		if(matchedCount2 > FIND_FEATURE_COUNT)
		{
			windage::Matrix4 extrinsic = windage::MultiCameraCoordinate::CalculateExtrinsic(tracker2->GetCameraParameter(), rotation21.Transpose(), translation21);
			calibration->SetExtrinsicMatrix(extrinsic.m1);

			windage::Vector3 temp = windage::Vector3(0.0, 0.0, 0.0);
			CvPoint center = calibration->ConvertWorld2Image(temp.x, temp.y, temp.z);
			CvPoint centerX = calibration->ConvertWorld2Image(temp.x + 50.0, temp.y, temp.z);
			CvPoint centerY = calibration->ConvertWorld2Image(temp.x, temp.y + 50.0, temp.z);
			CvPoint centerZ = calibration->ConvertWorld2Image(temp.x, temp.y, temp.z + 50.0);

			// draw outline
			CvScalar color = CV_RGB(255, 255, 0);
			cvLine(inputImage1, calibration->ConvertWorld2Image(-REAL_WIDTH/4.0, -REAL_HEIGHT/4.0, 0.0), 
								calibration->ConvertWorld2Image(+REAL_WIDTH/4.0, -REAL_HEIGHT/4.0, 0.0), color, 5);
			cvLine(inputImage1, calibration->ConvertWorld2Image(+REAL_WIDTH/4.0, -REAL_HEIGHT/4.0, 0.0), 
								calibration->ConvertWorld2Image(+REAL_WIDTH/4.0, +REAL_HEIGHT/4.0, 0.0), color, 5);
			cvLine(inputImage1, calibration->ConvertWorld2Image(+REAL_WIDTH/4.0, +REAL_HEIGHT/4.0, 0.0), 
								calibration->ConvertWorld2Image(-REAL_WIDTH/4.0, +REAL_HEIGHT/4.0, 0.0), color, 5);
			cvLine(inputImage1, calibration->ConvertWorld2Image(-REAL_WIDTH/4.0, +REAL_HEIGHT/4.0, 0.0), 
								calibration->ConvertWorld2Image(-REAL_WIDTH/4.0, -REAL_HEIGHT/4.0, 0.0), color, 5);

			cvLine(inputImage1, center, centerX, CV_RGB(255, 0, 0), 10);
			cvLine(inputImage1, center, centerY, CV_RGB(0, 255, 0), 10);
			cvLine(inputImage1, center, centerZ, CV_RGB(0, 0, 255), 10);
		}
//*/
		sprintf(message, "Tracking Time : %.2f(ms)", trackingTime);
		windage::Utils::DrawTextToImage(inputImage1, cvPoint(20, 30), message);
		windage::Utils::DrawTextToImage(inputImage2, cvPoint(20, 30), message);
		sprintf(message, "FAST feature count : %d, threashold : %d", featureCount1, fastThreshold);
		windage::Utils::DrawTextToImage(inputImage1, cvPoint(20, 50), message);
		sprintf(message, "FAST feature count : %d, threashold : %d", featureCount2, fastThreshold);
		windage::Utils::DrawTextToImage(inputImage2, cvPoint(20, 50), message);
		sprintf(message, "Match count : %d", matchedCount1);
		windage::Utils::DrawTextToImage(inputImage1, cvPoint(20, 70), message);
		sprintf(message, "Match count : %d", matchedCount2);
		windage::Utils::DrawTextToImage(inputImage2, cvPoint(20, 70), message);

		cvShowImage("result1", inputImage1);
		cvShowImage("result2", inputImage2);

		char ch = cvWaitKey();
		switch(ch)
		{
		case 'u':
		case 'U':
			updating = true;
			break;
		case 'q':
		case 'Q':
			processing = false;
			break;
		}
	}

	cvDestroyAllWindows();
}
示例#7
0
void CUIDesignerView::OnLButtonDown(UINT nFlags, CPoint point)
{
	// TODO: 在此添加消息处理程序代码和/或调用默认值
	CClientDC dc(this);
	OnPrepareDC(&dc);//Device coordinates to Logical coordinates
	dc.SetWindowOrg(-FORM_OFFSET_X,-FORM_OFFSET_Y);//Logical coordinates to Form coordinates

	CPoint ptLogical=point-m_ptDPtoLP;//Device coordinates to Logical coordinates
	ptLogical.Offset(-FORM_OFFSET_X,-FORM_OFFSET_Y);//Logical coordinates to Form coordinates

	CControlUI* pControl=m_LayoutManager.FindControl(ptLogical);
	CTrackerElement* pTracker=NULL;
	if(pControl==NULL)
		pControl=m_LayoutManager.GetForm();

	int nHit=m_MultiTracker.HitTest(ptLogical);
	int nType=GetControlType(pControl);
	if((nFlags&MK_CONTROL)==0&&nHit==hitNothing)
		m_MultiTracker.RemoveAll();
	if(nHit==hitNothing)
		m_MultiTracker.Add(CreateTracker(pControl));
	else
		m_MultiTracker.SetFocus(ptLogical);

	if(nHit>=0||nType==typeControl)
	{
		m_MultiTracker.Track(this, ptLogical, FALSE,&dc);
	}
	else
	{
		CUITracker tracker;
		int nClass=g_pToolBoxWnd->GetCurSel()->GetClass();
		CRect rect;
		if (tracker.TrackRubberBand(this, point, TRUE))
		{
			rect=tracker.GetRect();
			rect.NormalizeRect();
			rect.OffsetRect(-FORM_OFFSET_X,-FORM_OFFSET_Y);
			if(rect.Width()<10&&rect.Height()<10)
				rect.SetRect(ptLogical.x,ptLogical.y,ptLogical.x+UI_DEFAULT_WIDTH,ptLogical.y+UI_DEFAULT_HEIGHT);
		}
		else
		{
			rect.SetRect(ptLogical.x,ptLogical.y,ptLogical.x+UI_DEFAULT_WIDTH,ptLogical.y+UI_DEFAULT_HEIGHT);
		}

		if(nClass>classPointer)
		{
			CControlUI* pNewControl=m_LayoutManager.NewUI(nClass,rect,pControl);

		
			CArray<CControlUI*,CControlUI*> arrSelected;
			arrSelected.Add(pNewControl);
			m_UICommandHistory.Begin(arrSelected, actionAdd);
			m_UICommandHistory.End();
			g_pClassView->InsertUITreeItem(pNewControl);

			CContainerUI *pContainer = (CContainerUI *)pNewControl->GetInterface(_T("Container"));
			if (pContainer != NULL)
			{
				for (int it = 0; it < pContainer->GetRowCount(); it++)
				{
					g_pClassView->InsertUITreeItem(pContainer->GetItemAt(it));
				}
			}

			g_pToolBoxWnd->SetCurSel(classPointer);

			m_MultiTracker.RemoveAll();
			m_MultiTracker.Add(CreateTracker(pNewControl));
		}
	}

	g_pClassView->SelectUITreeItem(m_MultiTracker.GetFocused());
	if(m_MultiTracker.GetSize()==1)
		g_pPropertiesWnd->ShowProperty(m_MultiTracker.GetFocused());
	else
		g_pPropertiesWnd->HideAllProperties(TRUE,TRUE);
	
	this->Invalidate(FALSE);

// 	__super::OnLButtonDown(nFlags, point);
}
示例#8
0
文件: main.cpp 项目: Barbakas/windage
int main(int argc, char ** argv )
{
    osg::ArgumentParser arguments(&argc, argv);
    osg::ref_ptr<osg::Group> root = new osg::Group();
	osg::ref_ptr<osg::Projection>		projectionMatrix;
	osg::ref_ptr<osg::MatrixTransform>	modelViewMatrix;

	osg::ref_ptr<osg::Group>	sceneGroup = new osg::Group();
	osg::ref_ptr<osg::Group>	foregroundGroup = new osg::Group();
	osg::ref_ptr<CNVideoLayer>	videoBackground;
	osg::ref_ptr<osg::Image>	cameraImage = new osg::Image();

	// for checking FPS
	logging = new windage::Logger(&std::cout);
	logging->updateTickCount();

	// initialize tracker
	capture = cvCaptureFromCAM(CV_CAP_ANY);
	input = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 3);
	gray = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 1);

	tracker = CreateTracker();
	tracker->GetDetector()->SetThreshold(15.0);

	localCoordinators.resize(TEMPLATE_IMAGE_COUNT);
	for(int i=0; i<TEMPLATE_IMAGE_COUNT; i++)
	{
		char filename[100];
		sprintf_s(filename, TEMPLATE_IMAGE, i);
		IplImage* gray = cvLoadImage(filename, 0);
		tracker->AttatchReferenceImage(gray);
		tracker->TrainingReference(1.0, 1);

		localCoordinators[i] = new osg::MatrixTransform();
	}

	// initialize ar tool
	arTool = new windage::Coordinator::ARForOSG();
	arTool->Initialize(WIDTH, HEIGHT);
	arTool->AttatchCameraParameter(tracker->GetCameraParameter(0));

	// initialize OSG
	// create projection matrix
	arTool->SetProjectionMatrix();
	osg::Matrixd _proj = windageARTool::ForOSG::ConvertOSGMatrix(arTool->GetProjectionMatrix());
	projectionMatrix = new osg::Projection(_proj);
	modelViewMatrix  = new osg::MatrixTransform();

	// setup scene graph
	root->addChild(projectionMatrix.get());
	projectionMatrix->addChild(modelViewMatrix.get());
	modelViewMatrix->addChild(sceneGroup.get());

	// create background scene
	cameraImage->setImage(WIDTH, HEIGHT, 1, GL_BGRA, GL_BGR, GL_UNSIGNED_BYTE, (unsigned char*)input->imageData, osg::Image::NO_DELETE);	
	videoBackground = new CNVideoLayer(cameraImage.get(), 1);
	videoBackground->init();
	sceneGroup->addChild(videoBackground.get());

	// create foregruond scene
	foregroundGroup->getOrCreateStateSet()->setRenderBinDetails(100, "RenderBin");
	foregroundGroup->getOrCreateStateSet()->setMode(GL_NORMALIZE, osg::StateAttribute::ON);
	sceneGroup->addChild(foregroundGroup.get());

	// tracker transform
	osg::ref_ptr<osg::MatrixTransform> localCoordinates;
	localCoordinates = new osg::MatrixTransform();
	foregroundGroup->addChild(localCoordinates.get());
	
	// create viewer
	// for OSG
	osgViewer::Viewer* viewer;
	viewer = new osgViewer::Viewer();
	viewer->addEventHandler(new osgViewer::StatsHandler);

	viewer->setUpViewInWindow(10, 40, 640, 480 );
	viewer->addEventHandler(new PickHandler());
	viewer->setSceneData(root.get());
	viewer->setThreadingModel(osgViewer::ViewerBase::ThreadingModel::ThreadPerContext);
	viewer->realize();
	
	// attatch osg model
	osg::ref_ptr<osg::MatrixTransform> objectCoordinate = new osg::MatrixTransform();
//	localCoordinates->addChild(objectCoordinate);
	for(int i=0; i<TEMPLATE_IMAGE_COUNT; i++)
	{
		foregroundGroup->addChild(localCoordinators[i]);
		localCoordinators[i]->addChild(objectCoordinate);
	}

	double scaleFactor = 50;
	double x = 0.0;
	double y = 0.0;
	double z = 3.0;
	osg::Matrixd scale;	scale.makeScale(scaleFactor, scaleFactor, scaleFactor);
	osg::Matrixd translate;	translate.makeTranslate(x, y, z);
	objectCoordinate->postMult(translate);
	objectCoordinate->postMult(scale);
	objectCoordinate->addChild(LoadModel("model/cow.osg"));

#ifdef SAVE_RENDERING_IMAGE
    IplImage* saveImage = cvCreateImage(cvSize(640, 480), IPL_DEPTH_8U, 3);
    IplImage* saveTempImage = cvCreateImage(cvSize(640, 480), IPL_DEPTH_8U, 4);
    bool saving = false;
    writer = cvCreateVideoWriter("rendering.avi", CV_FOURCC_DEFAULT, 30, cvSize(saveImage->width, saveImage->height), 1);

    osg::Image* osgImage = new osg::Image();
    osgImage->allocateImage(saveImage->width, saveImage->height, 1, GL_RGBA, GL_UNSIGNED_BYTE);
    viewer->getCamera()->attach(osg::Camera::COLOR_BUFFER, osgImage);
#endif

	while (!viewer->done())
    {
		GetTrackerCoordinate();
		viewer->frame();

#ifdef SAVE_RENDERING_IMAGE
        std::cout << "read buffer" << std::endl;
        osgImage->readPixels(0, 0, saveImage->width, saveImage->height, GL_RGBA, GL_UNSIGNED_BYTE);
        memcpy(saveTempImage->imageData, osgImage->data(), sizeof(char)*saveImage->width*saveImage->height*4);
        cvCvtColor(saveTempImage, saveImage, CV_RGBA2BGR);
        cvFlip(saveImage, saveImage);
        
        if(writer) cvWriteFrame(writer, saveImage);
#endif
    }

	cvReleaseCapture(&capture);
    return 0;
}
示例#9
0
文件: ctlinplc.cpp 项目: Rupan/winscp
HRESULT COleControl::OnActivateInPlace(BOOL bUIActivate, LPMSG pMsg)
{
#ifdef _AFXDLL
	if (m_bOpen)
	{
		m_pWndOpenFrame->SetActiveWindow();
		SendAdvise(OBJECTCODE_SHOWWINDOW);
		return S_OK;
	}
#endif

	// Initialize pointer to in-place site, if necessary.
	if (m_pInPlaceSite == NULL)
	{
		if (m_pClientSite == NULL)
			return E_UNEXPECTED;

		if ((GetControlFlags() & windowlessActivate) &&
			SUCCEEDED(m_pClientSite->QueryInterface(IID_IOleInPlaceSiteWindowless,
			reinterpret_cast<void**>(&m_pInPlaceSiteWndless))))
		{
			m_bInPlaceSiteWndless = m_bInPlaceSiteEx = TRUE;
		}
		else if ((GetControlFlags() & noFlickerActivate) &&
			SUCCEEDED(m_pClientSite->QueryInterface(IID_IOleInPlaceSiteEx,
			reinterpret_cast<void**>(&m_pInPlaceSiteEx))))
		{
			m_bInPlaceSiteEx = TRUE;
		}
		else if (SUCCEEDED(m_pClientSite->QueryInterface(IID_IOleInPlaceSite,
			reinterpret_cast<void**>(&m_pInPlaceSite))))
		{
			m_bInPlaceSiteEx = FALSE;
		}
		else
		{
			m_pInPlaceSite = NULL;
			return E_FAIL;
		}
	}

	ASSERT(m_pInPlaceSite != NULL);

	if ((m_bInPlaceActive && !bUIActivate) || m_bUIActive)
	{
		CWnd* pWndOuter = GetOuterWindow();
		HWND hwndParent;
		if ((pWndOuter != NULL) &&
			SUCCEEDED(m_pInPlaceSite->GetWindow(&hwndParent)) &&
			(hwndParent == ::GetParent(pWndOuter->m_hWnd)))
		{
			::SetWindowPos(pWndOuter->m_hWnd, NULL, 0, 0, 0, 0,
				SWP_NOZORDER|SWP_NOMOVE|SWP_NOSIZE|SWP_NOACTIVATE|
				SWP_SHOWWINDOW);
			OnSetObjectRects(m_rcPos, NULL);
			return S_OK;
		}
	}

	// Check if container allows windowless activation.
	if (m_bInPlaceSiteWndless)
	{
		if (m_pInPlaceSiteWndless->CanWindowlessActivate() != S_OK)
			m_bInPlaceSiteWndless = FALSE;
	}

	HRESULT hr = E_FAIL;
	if (m_pInPlaceSite != NULL)
		hr = m_pInPlaceSite->CanInPlaceActivate();

	if (hr != NOERROR)
	{
		// Site doesn't allow in-place activation.
		return OnOpen(FALSE, pMsg);
	}

	if (!m_bInPlaceActive)
	{
		if (m_bInPlaceSiteEx)
		{
			// flicker-free and/or windowless activation
			BOOL bNoRedraw;
			m_pInPlaceSiteEx->OnInPlaceActivateEx(&bNoRedraw,
				m_bInPlaceSiteWndless ? ACTIVATE_WINDOWLESS : 0);
			if (GetControlFlags() & noFlickerActivate)
				m_bNoRedraw = bNoRedraw;
		}
		else
		{
			// old-style activation
			m_pInPlaceSite->OnInPlaceActivate();
		}
	}

	HWND hwndParent = NULL;

	if (SUCCEEDED(m_pInPlaceSite->GetWindow(&hwndParent)))
	{
		CRect rcClip;
		m_frameInfo.cb = sizeof(OLEINPLACEFRAMEINFO);

		RELEASE(m_pInPlaceFrame);
		RELEASE(m_pInPlaceDoc);

		if (SUCCEEDED(m_pInPlaceSite->GetWindowContext(
						&m_pInPlaceFrame, &m_pInPlaceDoc,
						&m_rcPos, &rcClip, &m_frameInfo)))
		{
			ASSERT(m_pInPlaceFrame != NULL);

			CRect rectClip;
			if (!m_bInPlaceSiteWndless)
			{
				_GetClippingCoordinates(&m_rcPos, &rcClip, rectClip,
					&m_ptOffset);
				m_bInPlaceActive = CreateControlWindow(hwndParent, m_rcPos,
					rectClip);
			}
			else
			{
				m_bInPlaceActive = TRUE;
			}

			if (m_bInPlaceActive)
			{
				if (bUIActivate)
				{
					if (m_bInPlaceSiteEx)
					{
						if (m_pInPlaceSiteEx->RequestUIActivate() != S_OK)
							m_pInPlaceSite->OnUIDeactivate(FALSE);
					}

					BuildSharedMenu();

					m_bUIActive = TRUE;
					m_pInPlaceSite->OnUIActivate();

					m_pInPlaceFrame->SetActiveObject(
						&m_xOleInPlaceActiveObject, NULL);
					if (m_pInPlaceDoc != NULL)
						m_pInPlaceDoc->SetActiveObject(
							&m_xOleInPlaceActiveObject, NULL);

					if (m_hWnd != NULL)
					{
						BOOL bHandles = AmbientShowGrabHandles();
						BOOL bHatching = AmbientShowHatching();

						if (bHandles || bHatching)
							CreateTracker(bHandles, bHatching, rcClip);
					}

					AddFrameLevelUI();

					if (bUIActivate != -1 &&
						(m_hWnd != NULL) && !IsChild(GetFocus()))
					{
						SetFocus();
					}
				}

				// Pass thru the window message that caused us to be activated
				if ((m_hWnd != NULL || m_bInPlaceSiteWndless) && (pMsg != NULL))
					ForwardActivationMsg(pMsg);

				// Send appropriate notifications...
				SendAdvise(OBJECTCODE_SHOWOBJECT);

				return S_OK;
			}
		}
	}

	RELEASE(m_pInPlaceFrame);
	RELEASE(m_pInPlaceDoc);

	return E_FAIL;
}