Пример #1
0
	void SquareObject::OnCreate() {

		CreateBuffers(m_Scale.x, m_Scale.y);

		//行列の定義
		Mat4x4 World;
		World.affineTransformation(
			m_Scale,
			Vec3(0, 0, 0),
			m_Qt,
			m_Pos
		);

		auto TexPtr = App::GetApp()->GetResource<TextureResource>(m_TextureResName);
		auto NormTexPtr = App::GetApp()->GetResource<TextureResource>(m_NormalTextureResName);

		m_PtrObj = make_shared<BcDrawObject>();
		m_PtrObj->m_MeshRes = m_SquareMesh;
		m_PtrObj->m_TextureRes = TexPtr;
		m_PtrObj->m_NormalTextureRes = NormTexPtr;
		m_PtrObj->m_WorldMatrix = World;
		m_PtrObj->m_Camera = GetStage<Stage>()->GetCamera();
		m_PtrObj->m_OwnShadowmapActive = true;
		m_PtrObj->m_SamplerState = SamplerState::LinearWrap;
		m_PtrObj->m_ShadowmapUse = false;
		m_PtrObj->m_FogEnabled = true;
		//フォグはきつめに
		m_PtrObj->m_FogColor = Col4(0.3f, 0.3f, 0.3f, 1.0f);
		m_PtrObj->m_FogStart = -10.0f;
		m_PtrObj->m_FogEnd = -30.0f;


	}
void ParticleEmitter::Initialize()
{
	activeBuffer = 0;

	startSize = 0.01;
	endSize = 0.01;
	minVelocity = 0;
	maxVelocity = 5;
	minLifeSpan = 1;
	maxLifeSpan = 3;

	maxParticles = 100000;

	particles = new Particle[maxParticles];

	CreateBuffers();

	drawShader.CreateShaderProgram("ParticleDraw.vert", "ParticleDraw.geom", "ParticleDraw.frag");
	drawShader.setFloat("startSize", startSize);
	drawShader.setFloat("endSize", endSize);
	
	const char* varyings[] = { "vPosition", "vVelocity", "vLifetime", "vLifespan" };
	updateShader.CreateShaderProgram("ParticleUpdate.vert", varyings, 4);
	updateShader.setFloat("minLife", minLifeSpan);
	updateShader.setFloat("maxLife", maxLifeSpan);
	updateShader.setFloat("minVelocity", minVelocity);
	updateShader.setFloat("maxVelocity", maxVelocity);
}
Пример #3
0
// Called when a remote computer wants to connect to us
// When WSAAccept accepted the connection, it created a new socket hSocket for it and wrote the remote IP in pHost
void CConnection::AcceptFrom(SOCKET hSocket, SOCKADDR_IN* pHost)
{
	// Make sure the newly accepted socket is valid
	ASSERT( ! IsValid() );

	// Record the connection information here
	m_hSocket		= hSocket;							// Keep the socket here
	m_pHost			= *pHost;							// Copy the remote IP address into this object
	m_sAddress		= inet_ntoa( m_pHost.sin_addr );	// Store it as a string also
	UpdateCountry();

	// Make new input and output buffer objects
	ASSERT( m_pInput == NULL );
	ASSERT( m_pOutput == NULL );
	CreateBuffers();

	// Facts about the connection
	m_bInitiated	= FALSE;			// We didn't initiate this connection
	m_bConnected	= TRUE;				// We're connected right now
	m_tConnected	= GetTickCount();	// Record the time this happened

	// Choose asynchronous, non-blocking reading and writing on the new socket
	DWORD dwValue = 1;
	ioctlsocket( m_hSocket, FIONBIO, &dwValue );

	// Record one more incoming connection in the statistics
	Statistics.Current.Connections.Incoming++;
}
Пример #4
0
//Main function
int main(int argc, char** argv)
{	
	glutInit(&argc, argv);
	glutInitWindowSize(400, 400);
	glutInitWindowPosition(50, 50);
	glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA);
	glutCreateWindow("OpenGL Tutorial");
	//glutFullScreen();
	
	InitGlutCallbacks();
	
	glewExperimental = GL_TRUE;
	GLenum res = glewInit();
	if(res != GLEW_OK)
	{
		fprintf(stderr, "Error: '%s'\n", glewGetErrorString(res));
		return 1;
	}
	
	InitializeProgram();
	CreateBuffers();
	
	glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
	glutMainLoop();
}
void OpenCLRotator180Context::Rotate(size_t width, size_t height,
                                     size_t pitchIn, size_t pitchOut,
                                     void *pInY,  void *pInUV,
                                     void *pOutY, void *pOutUV)
{
    if (!pInY || !pInUV || !pOutY || !pOutUV)
        throw cl::Error(CL_INVALID_VALUE);

    cl::size_t<3> origin  = make_size_t(0, 0, 0);
    cl::size_t<3> Y_size  = make_size_t(width / 4, height, 1);
    cl::size_t<3> UV_size = make_size_t(width / 4, height/2, 1);

    CreateBuffers(Y_size, UV_size);

    SetKernelArgs();

    m_queue.enqueueWriteImage(m_InY, 0, origin, Y_size, pitchIn, 0, pInY);
    m_queue.enqueueWriteImage(m_InUV, 0, origin, UV_size, pitchIn, 0, pInUV);

    m_queue.enqueueNDRangeKernel(m_kernelY, cl::NullRange,
            cl::NDRange(Y_size[0],Y_size[1]),
            cl::NDRange(1,1));
    m_queue.enqueueNDRangeKernel(m_kernelUV, cl::NullRange,
            cl::NDRange(UV_size[0],UV_size[1]),
            cl::NDRange(1,1));

    m_queue.enqueueReadImage(m_OutY, 0, origin, Y_size, pitchOut, 0, pOutY);
    m_queue.enqueueReadImage(m_OutUV, 0, origin, UV_size, pitchOut, 0, pOutUV);

    m_queue.finish();
}
Пример #6
0
static int Control(vout_display_t *vd, int query, va_list ap)
{
    vout_display_sys_t *sys = vd->sys;

    switch (query) {
        case VOUT_DISPLAY_CHANGE_DISPLAY_SIZE:
        case VOUT_DISPLAY_CHANGE_DISPLAY_FILLED:
        case VOUT_DISPLAY_CHANGE_ZOOM:
        case VOUT_DISPLAY_CHANGE_SOURCE_ASPECT:
        case VOUT_DISPLAY_CHANGE_SOURCE_CROP: {
            const vout_display_cfg_t *cfg = va_arg(ap,
                                                   const vout_display_cfg_t *);

            /* Update the window size */
            uint32_t mask = XCB_CONFIG_WINDOW_WIDTH | XCB_CONFIG_WINDOW_HEIGHT;
            const uint32_t values[] = {
                cfg->display.width, cfg->display.height
            };

            xcb_configure_window(sys->conn, sys->drawable.dest, mask, values);
            DeleteBuffers(vd);
            CreateBuffers(vd, cfg);
            xcb_flush(sys->conn);
            return VLC_SUCCESS;
        }

        case VOUT_DISPLAY_RESET_PICTURES:
            vlc_assert_unreachable();
        default:
            msg_Err(vd, "Unknown request in XCB RENDER display");
            return VLC_EGENERIC;
    }
}
Пример #7
0
const bool AsciiRenderer::Init(const unsigned int width, const unsigned int height, float fov, float nearClip, float farClip)
{
	if(m_ColorBuffer != NULL)
	{
		for(unsigned int y = 0; y < m_Height; ++y)
		{
			delete[] m_ColorBuffer[y];
		}
		delete[] m_ColorBuffer;
	}
	if(m_DepthBuffer != NULL)
	{
		for(unsigned int y = 0; y < m_Height; ++y)
		{
			delete[] m_DepthBuffer[y];
		}
		delete[] m_DepthBuffer;
	}
	assert(width > 0);
	assert(height > 0);
	m_ProjectionMatrix = Matrix4x4f::PerspectiveProjection(fov, nearClip, farClip, float(width)/float(height));
	m_NearClip = nearClip;
	m_FarClip = farClip;
	CreateBuffers(width, height); //calls clearDepth which needs farClip, thus last
	return true;
}
Пример #8
0
bool VideoOutputOpenGL::Init(int width, int height, float aspect, WId winid,
                             const QRect &win_rect, MythCodecID codec_id)
{
    QMutexLocker locker(&gl_context_lock);

    bool success = true;
    // FIXME Mac OS X overlay does not work with preview
    window.SetAllowPreviewEPG(true);
    gl_parent_win = winid;

    VideoOutput::Init(width, height, aspect, winid, win_rect, codec_id);

    SetProfile();
    InitPictureAttributes();
    success &= SetupContext();
    InitDisplayMeasurements(width, height, false);
    success &= CreateBuffers();
    success &= CreatePauseFrame();
    success &= SetupOpenGL();
    InitOSD();
    MoveResize();

    if (!success)
        TearDown();

    return success;
}
Renderer::Renderer(const Desc & desc)
	: RendererHelper<1>("PerlinNoiseOceanRenderer", "PerlinNoiseOceanWireFrameRenderer", Renderer::ERenderPass::Deferred_Pass)
	, mHeightMapCS(nullptr)
	, mCubeMapTexture(Engine::GetInstance()->GetTextureManager()->LoadTextureCubeMap(desc.mSkyboxCubeMapTextureFilename))
	, mOceanColorTexture(Engine::GetInstance()->GetTextureManager()->LoadTexture2D("medias/textures/OceanColor256.tif", GL_REPEAT, GL_REPEAT))
	//, mOceanColorTexture(Engine::GetInstance()->GetTextureManager()->GetDefaultTexture2D())
	, mHeightMapTextureSize(desc.mHeightMapTextureSize)
	, mMapSize(desc.mMapWidth, desc.mMapDepth)
	, mPatchCount(desc.mMapWidth / 64, desc.mMapDepth / 64)
	, mMapCount(0)
	, mDrawNormalShader("TerrainDrawNormals")
{
	PRINT_BEGIN_SECTION;
	PRINT_MESSAGE("Initialize PerlinNoiseOceanRenderer.....");

	const glm::vec3 vertices[] =
	{
		glm::vec3(0.0f,	0.0f, 0.0f),
		glm::vec3(1.0f,	0.0f, 0.0f),
		glm::vec3(0.0f,	0.0f, 1.0f),
		glm::vec3(1.0f,	0.0f, 1.0f)
	};
		

	//setup vao and vbo stuff
	CreateBuffers();

	glBindVertexArray(mVaoID);

	glBindBuffer(GL_ARRAY_BUFFER, mVboIDs[VertexArrayBufferIndex]);
	glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
	GL_CHECK_ERRORS;

	glEnableVertexAttribArray(Shader::POSITION_ATTRIBUTE);
	glVertexAttribPointer(Shader::POSITION_ATTRIBUTE, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), 0);
	GL_CHECK_ERRORS;

	glPatchParameteri(GL_PATCH_VERTICES, 4);

	glBindVertexArray(0);

	GL_CHECK_ERRORS;

	mMapCount = (GLint)desc.mMaps.size();

	PerMapData * modelMatrixBuffer = new PerMapData[mMapCount];
	for (int i = 0; i < mMapCount; ++i)
	{
		modelMatrixBuffer[i].mModelDQ = desc.mMaps[i].mModelDQ;
	}
	mModelMatrixBuffer.CreateResource(GL_STATIC_DRAW, GL_RGBA32F, (mMapCount * sizeof(PerMapData)), modelMatrixBuffer);

	LoadShaders(desc);

	mIsInitialized = true;

	PRINT_MESSAGE(".....PerlinNoiseOceanRenderer initialized!");
	PRINT_END_SECTION;
}
HeightMapCS::HeightMapCS(bool precomputeNormals)
	: ComputeShaderHelper<1>("DeepOceanComputeShader")
	, mWaveCount(0)
	, mHeightMapTextureId(0)
	, mPrecomputeNormals(precomputeNormals)
{
	CreateBuffers();
}
Пример #11
0
void GameSystem::StartGame(float gametime, float fps,HINSTANCE hinstance)
{
	//window
	mainHwnd.CreateHwnd(hinstance,WndProc);
	//time
	gameTime.Reset();
	gameTime.Update();	

	//Camera
	cam.SetProj(0.35*XM_PI, ScreenWidth / (ScreenHeight*1.0f), 0.5f, 20000.0f);

	cam.Update();
	// dx
	if (SUCCEEDED(CreateSwapChain()))
	{
		ID3D11Texture2D* pSwapChainBuffer = 0;
		swapChain->GetBuffer(0, _uuidof(ID3D11Texture2D), (void**)&pSwapChainBuffer);

		device->CreateRenderTargetView(pSwapChainBuffer, NULL, &Backbuffer);

		directX.init(device, deviceContext);

		pSwapChainBuffer->Release();
	}
	//create shaderss

	//TEST
	//createShaders();

	//Defered

	DeferedRendering.StartUp(device, deviceContext,swapChain);

	//Create Objects etc....

	//TEST
	//setShaders();

	//store matrixes
	XMStoreFloat4x4(&matrix.World, XMMatrixTranspose(XMMatrixScaling(1.0, 1.0, 1.0)));
	XMStoreFloat4x4(&matrix.View, XMMatrixTranspose(cam.GetViewMa()));
	XMStoreFloat4x4(&matrix.Proj, XMMatrixTranspose(cam.GetProjMa()));
	//CreateBuffer
	CreateBuffers();

	

	hMap.CreateMap(256,256,256,256,device,deviceContext);
	hMap.setupFrust(256, 256, device);
	obj.LoadObjFile(L"skull.obj");
	obj.createTexture(device, deviceContext, L"teapot.png");
	obj.createbuff(device);

	Ssao.startUp(device, deviceContext);

	//View projektion from sunlight
	shadow.StartUp(device,deviceContext,sunMatrix);
}
 void CS_Renderer::Initialize()
 {
   CreateBuffers();
   if (!shader && !computeshader && !texture)
   {
     InitShaders();
     LoadTexture();
   }
 }
Пример #13
0
AdvancedNav::AdvancedNav()
{
	m_sponza = new FBXFile();
	
	m_sponza->load("./Resources/SponzaSimple.fbx", FBXFile::UNITS_CENTIMETER);

	CreateShader();
	CreateBuffers();

}
Пример #14
0
	Grid::Grid(shared_ptr<RHI_Device> rhiDevice)
	{
		m_indexCount	= 0;
		m_terrainHeight = 200;
		m_terrainWidth	= 200;

		vector<RHI_Vertex_PosCol> vertices;
		vector<unsigned> indices;
		BuildGrid(&vertices, &indices);
		CreateBuffers(vertices, indices, rhiDevice);
	}
Пример #15
0
	ViewEntity::ViewEntity(Framework::D3DWrapper* wrapper, Framework::AssetImporter* assetImporter, std::string meshName, D3DXVECTOR3 pos)				
	{
		m_D3dwrapper = wrapper;
		m_assetImporter = assetImporter; 
		std::vector<Framework::WSMesh> *vMesh = m_assetImporter->GetMeshes(meshName); 
		if(vMesh->size() > 0) 
			m_mesh = &vMesh->at(0); 
		
		D3DXMatrixTranslation(&this->m_worldMatrix, pos.x, pos.y, pos.z); 

		CreateBuffers(); 
	}	
Пример #16
0
Sphere::Sphere(glm::vec3 pos, float radius, unsigned int rings, unsigned int sectors)
{
    m_pos = m_centerPos = pos;
    m_radius = radius;
    m_rings = rings;
    m_sectors = sectors;

    CreateBuffers();
    GenerateVertices();
    GenerateIndexes();
    GenerateNormals();
}
Пример #17
0
BasicRenderer::BasicRenderer(ID3D11Device* pd3dDevice) 	
{	    
    m_pd3dDevice =pd3dDevice;
	m_pVertexShaderP = NULL;
	m_pPixelShaderP = NULL;
	m_pVertexLayout = NULL;    
	m_pConstantBufferPerFrame = NULL;
	m_pConstantBufferPerDraw = NULL;	
	m_pWireFrameRS = NULL;  
    m_context = NULL;
	CreateBuffers();
}
//--------------------------------------------------------------------------------------
HRESULT LoadAudioIntoBuffer( ID3D10Device* pd3dDevice, UINT uiTexSizeX, LPCTSTR szFileName )
{
    HRESULT hr = S_OK;

    // Load the wave file
    CAudioData audioData;
    if( !audioData.LoadWaveFile( ( TCHAR* )szFileName ) )
        return E_FAIL;

    // Normalize the data
    audioData.NormalizeData();

    // If we have data, get the number of samples
    unsigned long ulNumSamples = audioData.GetNumSamples();

    // Find out how much Y space (time) our spectogram will need
    g_uiTexY = ( ulNumSamples / uiTexSizeX ) - 1;

    // Create a texture large enough to hold our data
    hr = CreateBuffers( pd3dDevice, uiTexSizeX, g_uiTexY );
    if( FAILED( hr ) )
        return hr;

    // Create temp storage with space for imaginary data
    unsigned long size = uiTexSizeX * g_uiTexY;
    D3DXVECTOR2* pvData = new D3DXVECTOR2[ size ];
    if( !pvData )
        return E_OUTOFMEMORY;

    float* pDataPtr = audioData.GetChannelPtr( 0 );
    for( unsigned long s = 0; s < size; s++ )
    {
        if( s < ulNumSamples )
        {
            pvData[s].x = pDataPtr[ s ];
            pvData[s].y = 0.0f;
        }
        else
        {
            pvData[s] = D3DXVECTOR2( 0.0f, 0.0f );
        }
    }

    // Update the texture with this information
    pd3dDevice->UpdateSubresource( g_pSourceTexture, D3D10CalcSubresource( 0, 0, 1 ), NULL,
                                   pvData, uiTexSizeX * sizeof( D3DXVECTOR2 ), 0 );

    SAFE_DELETE_ARRAY( pvData );

    return hr;
}
Пример #19
0
HRESULT DX11SceneRepHashSDF::Init( ID3D11Device* pd3dDevice, bool justHash /*= false*/, unsigned int hashNumBuckets /*= 300000*/, unsigned int hashBucketSize /*= 10*/, unsigned int numSDFBlocks /*= 100000*/, float voxelSize /*= 0.005f*/ )
{
	HRESULT hr = S_OK;

	m_JustHashAndNoSDFBlocks = justHash;
	m_HashNumBuckets = hashNumBuckets;
	m_HashBucketSize = hashBucketSize;
	m_VirtualVoxelSize = voxelSize;
	m_SDFNumBlocks = numSDFBlocks;

	V_RETURN(CreateBuffers(pd3dDevice));

	return hr;
}
Пример #20
0
    Example_ComputeShader() :
        ExampleBase { L"LLGL Example: Compute Shader", { 800, 800 } }
    {
        // Check if samplers are supported
        const auto& renderCaps = renderer->GetRenderingCaps();

        if (!renderCaps.features.hasComputeShaders)
            throw std::runtime_error("compute shaders are not supported by this renderer");

        // Create all graphics objects
        CreateBuffers();
        CreateComputePipeline();
        CreateGraphicsPipeline();
    }
Пример #21
0
status_t
VideoNode::FormatChanged(const media_source &src,
						 const media_destination &dst,
						 int32 from_change_count,
						 const media_format &format)
{
	printf("VideoNode::FormatChanged enter\n");
	if (src != fInput.source)
		return B_MEDIA_BAD_SOURCE;
	if (dst != fInput.destination)
		return B_MEDIA_BAD_DESTINATION;

	color_space colorspace = format.u.raw_video.display.format;
	BRect		frame(0, 0, format.u.raw_video.display.line_width - 1, format.u.raw_video.display.line_count - 1);
	status_t	err;

	DeleteBuffers();
	if (fOverlayEnabled) {
		fVideoView->RemoveOverlay();
		err = CreateBuffers(frame, colorspace, true); // try overlay
		if (err) {
			printf("VideoNode::FormatChanged creating overlay buffer failed\n");
			err = CreateBuffers(frame, colorspace, false); // no overlay
		}
	} else {
		err = CreateBuffers(frame, colorspace, false); // no overlay
	}
	if (err) {
		printf("VideoNode::FormatChanged failed (lost buffer group!)\n");
		return B_MEDIA_BAD_FORMAT;
	}	

	fInput.format = format;

	printf("VideoNode::FormatChanged leave\n");
	return B_OK;	
}
Пример #22
0
	void BcSphereObject::OnCreate() {
		CreateBuffers();

		//Rigidbodyの初期化
		auto PtrGameStage = GetStage<GameStage>();
		Rigidbody body;
		body.m_Owner = GetThis<GameObject>();
		body.m_Mass = 1.0f;
		body.m_Scale = m_Scale;
		body.m_Quat = m_Qt;
		body.m_Pos = m_Pos;
		body.m_CollType = CollType::typeSPHERE;
		body.m_IsFixed = true;
//		body.m_IsDrawActive = true;
		body.SetToBefore();
		PtrGameStage->AddRigidbody(body);


		//行列の定義
		Mat4x4 World;
		World.affineTransformation(
			m_Scale,
			Vec3(0, 0, 0),
			m_Qt,
			m_Pos
		);
		auto TexPtr = App::GetApp()->GetResource<TextureResource>(m_TextureResName);
		//描画データの構築
		m_PtrObj = make_shared<BcDrawObject>();
		m_PtrObj->m_MeshRes = m_SphereMesh;
		m_PtrObj->m_TextureRes = TexPtr;
		m_PtrObj->m_WorldMatrix = World;
		m_PtrObj->m_Camera = GetStage<Stage>()->GetCamera();
		m_PtrObj->m_OwnShadowmapActive = false;
		m_PtrObj->m_ShadowmapUse = true;
		m_PtrObj->m_FogEnabled = true;
		//フォグは青色っぽく
		m_PtrObj->m_FogColor = Col4(0.4f, 0.4f, 0.8f, 1.0f);
		m_PtrObj->m_FogStart = -10.0f;
		m_PtrObj->m_FogEnd = -30.0f;

		//シャドウマップ描画データの構築
		m_PtrShadowmapObj = make_shared<ShadowmapObject>();
		m_PtrShadowmapObj->m_MeshRes = m_SphereMesh;
		//描画データの行列をコピー
		m_PtrShadowmapObj->m_WorldMatrix = World;
		m_PtrShadowmapObj->m_Camera = GetStage<Stage>()->GetCamera();
	}
Пример #23
0
      STDMETHODIMP CDecoder::SetDecoderProperties2(const Byte *prop, UInt32 size)
      {
         CProps *pProps = (CProps*)prop;

         if (size != sizeof(CProps))
            return E_FAIL;

         if (pProps->_ver != LZHAM_PROPS_VER)
            return E_FAIL;

         memcpy(&_props, pProps, sizeof(CProps));
                  
         _propsWereSet = true;
         
         return CreateBuffers();
      }
Пример #24
0
status_t
VideoConsumer::FormatChanged(const media_source& producer,
	const media_destination& consumer, int32 fromChangeCount,
	const media_format& format)
{
	FUNCTION("VideoConsumer::FormatChanged\n");
	
	if (consumer != fIn.destination)
		return B_MEDIA_BAD_DESTINATION;

	if (producer != fIn.source)
		return B_MEDIA_BAD_SOURCE;

	fIn.format = format;
	
	return CreateBuffers(format);
}
Пример #25
0
//===============================================================================================================================
//GrassFieldMesh::GrassFieldMesh(D3D* d3d, int fieldSize, int width, vector<float> heightmap, XMFLOAT3 Vertex, XMFLOAT3 rot, XMFLOAT3 scale, string textureName)
GrassFieldMesh::GrassFieldMesh(D3D* d3d, int fieldSize, int width, vector<float> heightmap, ZShadeSandboxMesh::MeshParameters mp)
    :	ZShadeSandboxMesh::CustomMesh( d3d, mp )
    ,	m_RenderShader(true)
    ,	m_FieldSize(fieldSize)
    ,	m_Width(width)
    ,	m_Heightmap(heightmap)
{
    mMeshType = ZShadeSandboxMesh::EMeshType::CUSTOM;

    m_pGrassFieldShader = new GrassFieldShader(mD3DSystem);//, "Vegitation\\Grass\\GrassField.fxo");

    mp.shader = m_pGrassFieldShader;
    mp.useCustomShader = true;

    Initialize();
    CreateBuffers();
}
Пример #26
0
	void CubeObject::OnCreate() {
		CreateBuffers();
		m_Scale = Vec3(1.0f, 1.0f, 1.0f);
		m_Qt.identity();
		m_Pos = Vec3(0, 0, 0.0);
		///ルートシグネチャ作成
		CreateRootSignature();
		///デスクプリタヒープ作成
		CreateDescriptorHeap();
		///コンスタントバッファ作成
		CreateConstantBuffer();
		///パイプラインステート作成
		CreatePipelineState();
		///コマンドリスト作成
		CreateCommandList();
		//コンスタントバッファの更新
		UpdateConstantBuffer();
	}
Пример #27
0
FireParticleSystem::FireParticleSystem(int a_particlesNum)
{
  for(int i=0;i<2;i++)
  {
    m_posAndSizeBuffers[i] = -1;
    m_velAndHPBuffers[i]   = -1;
    m_drawVAOs[i]          = -1;
  }
  m_vertexPosTBO = -1;

  m_particlesNum = 0;
  m_currPinPongId = 0;

  m_pFogTexture = NULL;

  glGenBuffers(2, m_posAndSizeBuffers); CHECK_GL_ERRORS;
  glGenBuffers(2, m_velAndHPBuffers);   CHECK_GL_ERRORS;
  glGenBuffers(2, m_randBuffers);       CHECK_GL_ERRORS;

  glGenTextures(1, &m_vertexPosTBO);

  SetParticlesNum(a_particlesNum);
  Init(); // dispatching call

  m_renderProgram  = ShaderProgram("../ParticleSystem/Particle.vert", "../ParticleSystem/FireParticle.geom", "../ParticleSystem/FireParticle.frag");
  m_animateProgram = ShaderProgram("../ParticleSystem/FireParticlePhysics.vert");

  const GLchar* names[3] = {"newPosAndSize", "newVelAndHp", "outRndSeed"};
  glTransformFeedbackVaryings (m_animateProgram.program, 3, names, GL_SEPARATE_ATTRIBS); CHECK_GL_ERRORS;
  if(!m_animateProgram.Link())
    throw std::runtime_error("can not relink program after glTransformFeedbackVaryings");

  CreateBuffers(m_renderProgram.program, m_animateProgram.program);

  //m_pFogTexture = new Texture2D("../data/fog.bmp");
  //m_pFogTexture = new Texture2D("../data/particle.tga");
  m_pFogTexture = new Texture2D("../data/fire_texturemap_small_grey.bmp");

  m_pFullScreenQuad = new FullScreenQuad();
  m_lastDeltaTime = 0.0f;
  m_windChangeTime = 0.0f;
}
Пример #28
0
status_t
VideoConsumer::Connected(const media_source& producer,
	const media_destination& where, const media_format& format,
	media_input* outInput)
{
	FUNCTION("VideoConsumer::Connected\n");
	
	fIn.source = producer;
	fIn.format = format;
	fIn.node = Node();
	sprintf(fIn.name, "Video Consumer");
	*outInput = fIn;

	uint32 userData = 0;
	int32 changeTag = 1;
	status_t ret = CreateBuffers(format);
	if (ret == B_OK) {
		// TODO: With overlay bitmaps, there seems to be a problem with
		// mapping the BBitmap areas into the BBuffers. Until that is fixed,
		// don't enable a shared BBufferGroup.
		if (!fTryOverlay) {
			ret = SetOutputBuffersFor(producer, fIn.destination, 
				fBuffers, &userData, &changeTag, true);
			if (ret != B_OK)
				ERROR("SetOutputBuffersFor() failed: %s\n", strerror(ret));
		}
		fIn.format.u.raw_video.display.bytes_per_row
			= fBitmap[0]->BytesPerRow();
	} else {
		ERROR("VideoConsumer::Connected - COULDN'T CREATE BUFFERS\n");
		return ret;
	}

	*outInput = fIn;
		// bytes per row might have changed
	fConnectionActive = true;

	FUNCTION("VideoConsumer::Connected - EXIT\n");
	return B_OK;
}
Пример #29
0
status_t
VideoNode::Connected(const media_source &src,
					 const media_destination &dst,
					 const media_format &format,
					 media_input *out_input)
{
	/* The connection process:
	 *                BBufferProducer::FormatProposal
	 *                BBufferConsumer::AcceptFormat
	 *                BBufferProducer::PrepareToConnect
	 * we are here => BBufferConsumer::Connected
	 *                BBufferProducer::Connect
	 */

	if (dst != fInput.destination)
		return B_MEDIA_BAD_DESTINATION;

	fInput.source = src;
	fInput.format = format;

	if (fInput.format.u.raw_video.field_rate < 1.0)
		fInput.format.u.raw_video.field_rate = 25.0;

	color_space colorspace = format.u.raw_video.display.format;
	BRect		frame(0, 0, format.u.raw_video.display.line_width - 1, format.u.raw_video.display.line_count - 1);
	status_t	err;

	DeleteBuffers();
	err = CreateBuffers(frame, colorspace, fOverlayEnabled);
	if (err) {
		printf("VideoNode::Connected failed, fOverlayEnabled = %d\n", fOverlayEnabled);
		return err;
	}	

	*out_input = fInput;

	return B_OK;
	
}
Пример #30
0
void GPUParticleEmitter::Initialize(unsigned int maxParticles, float lifespanMin,
									float lifespanMax, float velocityMin, float velocityMax, float startSize,
									float endSize, const vec4& startColor, const vec4& endColor)
{
	this->maxParticles = maxParticles;
	this->lifespanMin = lifespanMin;
	this->lifespanMax = lifespanMax;
	this->velocityMin = velocityMin;
	this->velocityMax = velocityMax;
	this->startSize = startSize;
	this->endSize = endSize;
	this->startColor = startColor;
	this->endColor = endColor;

	particles = new GPUParticle::Particle[maxParticles];

	activeBuffer = 0;

	CreateBuffers();
	CreateUpdateProgram();
	CreateDrawProgram();
}