예제 #1
0
파일: SPED3.cpp 프로젝트: srjek/dcpu16
 void OnReshape(int w, int h) {
     if (h == 0)
         h = 1;
     glViewport(0, 0, w, h);
     float ratio = ((float) w) / h;
     buildProjectionMatrix(30, ratio, 1, 10);
 }
void ARDrawingContext::drawAugmentedScene()
{
  // Init augmentation projection
  Matrix44 projectionMatrix;
  int w = m_backgroundImage.cols;
  int h = m_backgroundImage.rows;
  buildProjectionMatrix(m_calibration, w, h, projectionMatrix);

  glMatrixMode(GL_PROJECTION);
  glLoadMatrixf(projectionMatrix.data);

  glMatrixMode(GL_MODELVIEW);
  glLoadIdentity();

  if (isPatternPresent)
  {
    // Set the pattern transformation
    Matrix44 glMatrix = patternPose.getMat44();
    glLoadMatrixf(reinterpret_cast<const GLfloat*>(&glMatrix.data[0]));

    // Render model
    drawCoordinateAxis();
    drawCubeModel();
  }
}
예제 #3
0
void Render2DDeviceImpl::resize(u32 width, u32 height)
{
	if (!width || !height)
	{
		return ;
	}
	m_deviceParameters.BackBufferWidth = width;
	m_deviceParameters.BackBufferHeight = height;
	buildProjectionMatrix(width, height, m_projectionMatrix);
	resetDevice();
}
예제 #4
0
파일: test.cpp 프로젝트: mkkellogg/GTE
void changeSize(int w, int h) {
 
    float ratio;
    // Prevent a divide by zero, when window is too short
    // (you cant make a window of zero width).
    if(h == 0)
        h = 1;
 
    // Set the viewport to be the entire window
    glViewport(0, 0, w, h);
 
    ratio = (1.0f * w) / h;
    buildProjectionMatrix(53.13f, ratio, 1.0f, 30.0f);
}
예제 #5
0
// --------------------------------------------------------------------------
// display(引数なし)
// 画面の更新時に呼ばれる関数です
// 戻り値: なし
// --------------------------------------------------------------------------
void display(void)
{
    // 描画用のバッファクリア
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    if (cap.isOpened()) {
        // カメラ画像取得
        cv::Mat image_raw;
        cap >> image_raw;

        // 歪み補正
        cv::Mat image;
        cv::remap(image_raw, image, mapx, mapy, cv::INTER_LINEAR);

        // カメラ画像(RGB)表示
        cv::Mat rgb;
        cv::cvtColor(image, rgb, cv::COLOR_BGR2RGB);
        cv::flip(rgb, rgb, 0);
        glDepthMask(GL_FALSE);
        glDrawPixels(rgb.cols, rgb.rows, GL_RGB, GL_UNSIGNED_BYTE, rgb.data);

        // BGRA画像
        cv::Mat bgra;
        cv::cvtColor(image, bgra, cv::COLOR_BGR2BGRA);

        // データを渡す
        BGRAVideoFrame frame;
        frame.width = bgra.cols;
        frame.height = bgra.rows;
        frame.data = bgra.data;
        frame.stride = bgra.step;

        // マーカ検出
        MarkerDetector detector(calibration);
        detector.processFrame(frame);
        std::vector<Transformation> transformations = detector.getTransformations();

        // 射影変換行列を計算
        Matrix44 projectionMatrix = buildProjectionMatrix(calibration.getIntrinsic(), frame.width, frame.height);

        // 射影変換行列を適用
        glMatrixMode(GL_PROJECTION);
        glLoadMatrixf(projectionMatrix.data);

        // ビュー行列の設定
        glMatrixMode(GL_MODELVIEW);
        glLoadIdentity();

        // デプス有効
        glDepthMask(GL_TRUE);

        // 頂点配列有効
        glEnableClientState(GL_VERTEX_ARRAY);
        glEnableClientState(GL_COLOR_ARRAY);

        // ビュー行列を退避
        glPushMatrix();

        // ラインの太さ設定
        glLineWidth(3.0f);

        // ライン頂点配列
        float lineX[] = { 0, 0, 0, 1, 0, 0 };
        float lineY[] = { 0, 0, 0, 0, 1, 0 };
        float lineZ[] = { 0, 0, 0, 0, 0, 1 };

        // 2D平面
        const GLfloat squareVertices[] = { -0.5f, -0.5f,
                                            0.5f, -0.5f,
                                           -0.5f,  0.5f,
                                            0.5f,  0.5f };

        // 2D平面カラー(RGBA)
        const GLubyte squareColors[] = { 255, 255,   0, 255,
                                           0, 255, 255, 255,
                                           0,   0,   0,   0,
                                         255,   0, 255, 255 };

        // AR描画
        for (size_t i = 0; i < transformations.size(); i++) {
            // 変換行列
            Matrix44 glMatrix = transformations[i].getMat44();

            // ビュー行列にロード
            glLoadMatrixf(reinterpret_cast<const GLfloat*>(&glMatrix.data[0]));

            // 2D平面の描画
            glEnableClientState(GL_COLOR_ARRAY);
            glVertexPointer(2, GL_FLOAT, 0, squareVertices);
            glColorPointer(4, GL_UNSIGNED_BYTE, 0, squareColors);
            glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
            glDisableClientState(GL_COLOR_ARRAY);

            // 座標軸のスケール
            float scale = 0.5;
            glScalef(scale, scale, scale);

            // カメラから見えるようにちょっと移動
            glTranslatef(0, 0, 0.1f);

            // X軸
            glColor4f(1.0f, 0.0f, 0.0f, 1.0f);
            glVertexPointer(3, GL_FLOAT, 0, lineX);
            glDrawArrays(GL_LINES, 0, 2);

            // Y軸
            glColor4f(0.0f, 1.0f, 0.0f, 1.0f);
            glVertexPointer(3, GL_FLOAT, 0, lineY);
            glDrawArrays(GL_LINES, 0, 2);

            // Z軸
            glColor4f(0.0f, 0.0f, 1.0f, 1.0f);
            glVertexPointer(3, GL_FLOAT, 0, lineZ);
            glDrawArrays(GL_LINES, 0, 2);
        }

        // 頂点配列無効
        glDisableClientState(GL_VERTEX_ARRAY);

        // ビュー行列を戻す
        glPopMatrix();
    }
예제 #6
0
파일: Camera.cpp 프로젝트: fobnn/Banzai
void Camera::update()
{
	buildViewMatrix();
	buildProjectionMatrix();
}
예제 #7
0
bool Render2DDeviceImpl::create(void *window, u32 width, u32 height)
{
	IDirect3D9 *d3d9 = ::Direct3DCreate9(D3D_SDK_VERSION);
	if (!d3d9)
	{
		printf("d3d9 init failed");
		return false;
	}
	D3DCAPS9 caps;
	d3d9->GetDeviceCaps(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, &caps);

	DWORD devBehaviorFlags = D3DCREATE_MULTITHREADED;
	if (caps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT) {
		devBehaviorFlags |= D3DCREATE_HARDWARE_VERTEXPROCESSING;
	}
	else {
		devBehaviorFlags |= D3DCREATE_SOFTWARE_VERTEXPROCESSING;
	}

	m_deviceParameters.BackBufferWidth = (UINT)width;
	m_deviceParameters.BackBufferHeight = (UINT)height;
	m_deviceParameters.BackBufferFormat = D3DFMT_X8R8G8B8;
	m_deviceParameters.BackBufferCount = 1;
	m_deviceParameters.MultiSampleType = D3DMULTISAMPLE_NONE;
	m_deviceParameters.MultiSampleQuality = 0;
	m_deviceParameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
	m_deviceParameters.hDeviceWindow = (HWND)window;
	m_deviceParameters.Windowed = TRUE;
	m_deviceParameters.EnableAutoDepthStencil = FALSE;
	m_deviceParameters.AutoDepthStencilFormat = D3DFMT_UNKNOWN;
	m_deviceParameters.Flags = 0;
	m_deviceParameters.FullScreen_RefreshRateInHz = D3DPRESENT_RATE_DEFAULT;
	m_deviceParameters.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;

	HRESULT hr = d3d9->CreateDevice(
		D3DADAPTER_DEFAULT,		// primary adapter
		D3DDEVTYPE_HAL,         // device type
		(HWND)window,			// window associated with device
		devBehaviorFlags,		// 
		&m_deviceParameters,    // present parameters
		&m_device			    // return created device
		);
	if (FAILED(hr))
	{
		d3d9->Release();
		return false;
	}
	d3d9->Release();

	if (m_device == 0)
	{
		printf("device create failed");
		return false;
	}
	ID3DXBuffer* sbuf = 0;
	ID3DXBuffer* errorMsg = 0;
	D3DXCompileShader(s_shader, s_shaderLength, NULL, NULL, "vsMain", "vs_2_0", 0, &sbuf, &errorMsg, &m_vsConstants);
	if (errorMsg) {
#ifdef _DEBUG
		std::cout << (const char *)errorMsg->GetBufferPointer() << std::endl;
#endif
		errorMsg->Release();
	}
	if (!sbuf) {
		m_device->Release();
		m_device = 0;
		return false;
	}

	m_device->CreateVertexShader((DWORD *)sbuf->GetBufferPointer(), &m_vertexShader);
	sbuf->Release();
	D3DXCompileShader(s_shader, s_shaderLength, NULL, NULL, "psMain", "ps_2_0", 0, &sbuf, NULL, NULL);
	if (errorMsg) {
#ifdef _DEBUG
		std::cout << (const char *)errorMsg->GetBufferPointer() << std::endl;
#endif
		errorMsg->Release();
	}
	if (!sbuf) {
		m_vertexShader->Release();
		m_vertexShader = 0;
		m_device->Release();
		m_device = 0;
		return false;
	}
	m_device->CreatePixelShader((DWORD *)sbuf->GetBufferPointer(), &m_pixelShader);
	sbuf->Release();

	m_hProjMat = m_vsConstants->GetConstantByName(0, "projMat");
	m_hViewMat = m_vsConstants->GetConstantByName(0, "viewMat");

	D3DVERTEXELEMENT9 elements[] = {
		{ 0, 0, D3DDECLTYPE_FLOAT2,		D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_POSITION },
		{ 0, 8, D3DDECLTYPE_D3DCOLOR,	D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_COLOR },
		{ 0, 12, D3DDECLTYPE_FLOAT2,	D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_TEXCOORD },
		D3DDECL_END()
	};

	
	

	hr = m_device->CreateVertexDeclaration(elements, &m_vertexFormat);


	m_nullTexture = vnnew Texture2DImpl();
	m_nullTexture->createTexture(m_device, 1, 1);
	D3DLOCKED_RECT ret;
	m_nullTexture->m_texture->LockRect(0, &ret, NULL, 0);
	*(u32 *)ret.pBits = 0xFFFFFFFF;
	m_nullTexture->m_texture->UnlockRect(0);

	

	m_vertices = vnmalloc(Vertex, kMaxVertices);
	m_indices = vnmalloc(u16, kMaxIndices);

	TextureManager::instance().setDelegate(this);

	buildProjectionMatrix(width, height, m_projectionMatrix);

	applyRenderStates();

	return true;	
}