Esempio n. 1
0
Delaunay3DWindow::Delaunay3DWindow(Parameters& parameters)
    :
    Window3(parameters),
    mLightGray({ 0.75f, 0.75f, 0.75f, 1.0f })
{
    if (!SetEnvironment() || !CreateScene())
    {
        parameters.created = false;
        return;
    }

    InitializeCamera();

    mNoCullState = std::make_shared<RasterizerState>();
    mNoCullState->cullMode = RasterizerState::CULL_NONE;

    mNoCullWireState = std::make_shared<RasterizerState>();
    mNoCullWireState->cullMode = RasterizerState::CULL_NONE;
    mNoCullWireState->fillMode = RasterizerState::FILL_WIREFRAME;

    mBlendState = std::make_shared<BlendState>();
    mBlendState->target[0].enable = true;
    mBlendState->target[0].srcColor = BlendState::BM_SRC_ALPHA;
    mBlendState->target[0].dstColor = BlendState::BM_INV_SRC_ALPHA;
    mBlendState->target[0].srcAlpha = BlendState::BM_SRC_ALPHA;
    mBlendState->target[0].dstAlpha = BlendState::BM_INV_SRC_ALPHA;
}
Esempio n. 2
0
Fluids3DWindow::Fluids3DWindow(Parameters& parameters)
    :
    Window3(parameters),
    mFluid(mEngine, mProgramFactory, GRID_SIZE, GRID_SIZE, GRID_SIZE, 0.002f)
{
    if (!SetEnvironment() || !CreateNestedBoxes())
    {
        parameters.created = false;
        return;
    }

    // Use blending for the visualization.
    mAlphaState = std::make_shared<BlendState>();
    mAlphaState->target[0].enable = true;
    mAlphaState->target[0].srcColor = BlendState::BM_SRC_ALPHA;
    mAlphaState->target[0].dstColor = BlendState::BM_INV_SRC_ALPHA;
    mAlphaState->target[0].srcAlpha = BlendState::BM_SRC_ALPHA;
    mAlphaState->target[0].dstAlpha = BlendState::BM_INV_SRC_ALPHA;
    mEngine->SetBlendState(mAlphaState);

    // The alpha channel must be zero for the blending of density to work
    // correctly through the fluid region.
    mEngine->SetClearColor({ 1.0f, 1.0f, 1.0f, 0.0f });

    // The geometric proxies for volume rendering are concentric boxes.  They
    // are drawn from inside to outside for correctly sorted drawing, so depth
    // buffering is not needed.
    mNoDepthState = std::make_shared<DepthStencilState>();
    mNoDepthState->depthEnable = false;
    mEngine->SetDepthStencilState(mNoDepthState);

    mFluid.Initialize();
    InitializeCamera();
    UpdateConstants();
}
Esempio n. 3
0
GeometryShadersWindow::GeometryShadersWindow(Parameters& parameters)
    :
    Window3(parameters)
{
    if (!SetEnvironment() || !CreateScene())
    {
        parameters.created = false;
        return;
    }

    mEngine->SetClearColor({ 1.0f, 1.0f, 1.0f, 1.0f });
    InitializeCamera();

    mCamera->SetFrustum(60.0f, GetAspectRatio(), 0.1f, 100.0f);
    Vector4<float> camPosition{ 2.8f, 0.0f, 0.0f, 1.0f };
    Vector4<float> camDVector{ -1.0f, 0.0f, 0.0f, 0.0f };
    Vector4<float> camUVector{ 0.0f, 0.0f, 1.0f, 0.0f };
    Vector4<float> camRVector = Cross(camDVector, camUVector);
    mCamera->SetFrame(camPosition, camDVector, camUVector, camRVector);

#if defined(SAVE_RENDERING_TO_DISK)
    mTarget = std::make_shared<DrawTarget>(1, DF_R8G8B8A8_UNORM, mXSize,
        mYSize);
    mTarget->GetRTTexture(0)->SetCopyType(Resource::COPY_STAGING_TO_CPU);
#endif
}
BSplineSurfaceFitterWindow::BSplineSurfaceFitterWindow(Parameters& parameters)
    :
    Window3(parameters)
{
    if (!SetEnvironment())
    {
        parameters.created = false;
        return;
    }

    mNoCullState = std::make_shared<RasterizerState>();
    mNoCullState->cullMode = RasterizerState::CULL_NONE;

    mNoCullWireState = std::make_shared<RasterizerState>();
    mNoCullWireState->cullMode = RasterizerState::CULL_NONE;
    mNoCullWireState->fillMode = RasterizerState::FILL_WIREFRAME;

    mBlendState = std::make_shared<BlendState>();
    mBlendState->target[0].enable = true;
    mBlendState->target[0].srcColor = BlendState::BM_SRC_ALPHA;
    mBlendState->target[0].dstColor = BlendState::BM_INV_SRC_ALPHA;
    mBlendState->target[0].srcAlpha = BlendState::BM_SRC_ALPHA;
    mBlendState->target[0].dstAlpha = BlendState::BM_INV_SRC_ALPHA;

    mEngine->SetRasterizerState(mNoCullState);
    mEngine->SetClearColor({ 0.0f, 0.5f, 0.75f, 1.0f });

    CreateScene();
    InitializeCamera();
}
Esempio n. 5
0
int InitializeAllCameras()
{
    for (int i= 0; i < ccd_ncam; i++){
        err = InitializeCamera(i);
        check_sbig_error(err,"Error initializing camera\n");
    }
    return(0);
}
Esempio n. 6
0
LightsWindow::LightsWindow(Parameters& parameters)
    :
    Window3(parameters)
{
    mEngine->SetClearColor({ 0.0f, 0.25f, 0.75f, 1.0f });
    mWireState = std::make_shared<RasterizerState>();
    mWireState->fillMode = RasterizerState::FILL_WIREFRAME;

    CreateScene();
    InitializeCamera();
}
StructuredBuffersWindow::StructuredBuffersWindow(Parameters& parameters)
    :
    Window3(parameters)
{
    if (!SetEnvironment() || !CreateScene())
    {
        parameters.created = false;
        return;
    }

    InitializeCamera();
}
MinimumVolumeBox3DWindow::MinimumVolumeBox3DWindow(Parameters& parameters)
    :
    Window3(parameters),
    mVertices(NUM_POINTS)
{
    CreateScene();
    InitializeCamera();

    mWireState = std::make_shared<RasterizerState>();
    mWireState->cullMode = RasterizerState::CULL_NONE;
    mWireState->fillMode = RasterizerState::FILL_WIREFRAME;
    mEngine->SetRasterizerState(mWireState);
}
Esempio n. 9
0
TexturingWindow::TexturingWindow(Parameters& parameters)
    :
    Window3(parameters)
{
    if (!SetEnvironment())
    {
        parameters.created = false;
        return;
    }

    CreateScene();
    InitializeCamera();
}
Esempio n. 10
0
bool SpotlightSample::Init()
{
	if (!DirectXGame::Init())
		return false;

	InitializeCamera();
	InitializeLighting();
	InitializeGeometry();
	InitializeShaders();
	InitializeShaderResources();

	return true;
}
Esempio n. 11
0
BlendedAnimationsWindow::BlendedAnimationsWindow(Parameters& parameters)
    :
    Window3(parameters),
    mUpArrowPressed(false),
    mShiftPressed(false)
{
    if (!SetEnvironment())
    {
        parameters.created = false;
        return;
    }

    std::string gtePath = mEnvironment.GetVariable("GTE_PATH");
    std::string rootPath = gtePath + "/Samples/Graphics/BlendedAnimations/Data/";
    SkinController::Updater postUpdate =
        [this](std::shared_ptr<VertexBuffer> const& vbuffer)
        {
            mEngine->Update(vbuffer);
        };
    mManager = std::make_unique<BipedManager>(rootPath, "Biped", mProgramFactory, postUpdate);

    // Set animation information.  The counts differ in debug and release
    // builds because of the differing frame rates of those builds.
#if defined(_DEBUG)
    int idleWalkCount = 100;
    int walkCount = 10;
    int walkRunCount = 100;
    mApplicationTime = 0.0;
    mApplicationTimeDelta = 0.01;
#else
    int idleWalkCount = 1000;
    int walkCount = 100;
    int walkRunCount = 1000;
    mApplicationTime = 0.0;
    mApplicationTimeDelta = 0.001;
#endif

    // The idle head turning occurs too frequently (frequency = 1 in the
    // original model).  Reduce the turning by half.
    mManager->SetIdle(0.5, 0.0);

    // The walk and run cycles must be aligned properly for blending.  A
    // phase of 0.2 for the run cycle aligns the biped feet.
    mManager->SetRun(1.0, 0.2);

    // The initial state is 'idle'.
    mManager->Initialize(idleWalkCount, walkCount, walkRunCount);

    CreateScene();
    InitializeCamera();
}
Esempio n. 12
0
PickingWindow::PickingWindow(Parameters& parameters)
    :
    Window3(parameters),
    mNumActiveSpheres(0)
{
    if (!SetEnvironment())
    {
        parameters.created = false;
        return;
    }

    CreateScene();
    InitializeCamera();
}
MultipleRenderTargetsWindow::MultipleRenderTargetsWindow(Parameters& parameters)
    :
    Window3(parameters),
    mActiveOverlay(0)
{
    if (!SetEnvironment() || !CreateScene())
    {
        parameters.created = false;
        return;
    }

    mEngine->SetClearColor({ 0.75f, 0.75f, 0.75f, 1.0f });
    InitializeCamera();
    CreateOverlays();
}
Esempio n. 14
0
void Level00::VInitialize()
{
	mState = BASE_SCENE_STATE_INITIALIZING;

	VOnResize();

	InitializeLevel();
	InitializeGrid();
	InitializeRobots();
	InitializeGeometry();
	InitializeWallShaders();
	InitializeLightShaders();
	InitializePlayerShaders();
	InitializeCamera();

	mState = BASE_SCENE_STATE_RUNNING;
}
MinimumVolumeSphere3DWindow::MinimumVolumeSphere3DWindow(Parameters& parameters)
    :
    Window3(parameters),
    mNumActive(2),
    mVertices(NUM_POINTS)
{
    mNoCullWireState = std::make_shared<RasterizerState>();
    mNoCullWireState->cullMode = RasterizerState::CULL_NONE;
    mNoCullWireState->fillMode = RasterizerState::FILL_WIREFRAME;
    mEngine->SetRasterizerState(mNoCullWireState);

    CreateScene();
    InitializeCamera();

    mMVS3(mNumActive, &mVertices[0], mMinimalSphere);
    UpdateScene();
}
Esempio n. 16
0
MassSprings3DWindow::MassSprings3DWindow(Parameters& parameters)
    :
    Window3(parameters),
    mSimulationTime(0.0f),
    mSimulationDelta(0.001f)
{
    if (!SetEnvironment() || !CreateMassSpringSystem())
    {
        parameters.created = false;
        return;
    }

    mWireState = std::make_shared<RasterizerState>();
    mWireState->fillMode = RasterizerState::FILL_WIREFRAME;

    CreateBoxFaces();
    InitializeCamera();
}
void ANDOR885_Camera::setCameraStat(int cStat) throw(std::exception)
{
	bool error;

	if (cStat == ANDOR_ON){

		// Turn on camera, if it's not already on
		if (!initialized){
//			std::cerr << "ANDOR885_Camera: Initializing..." << std::endl;
			initialized = !InitializeCamera();
		}

		if (!initialized){
			throw ANDOR885_Exception("Error initializing camera");
		} else {
//			std::cerr << "ANDOR885_Camera: Camera on" << std::endl;
			cameraStat = ANDOR_ON;
		}
	} 
	else if (cStat == ANDOR_OFF) {

		// Turn off camera, if it's not already off.
		if (initialized) {
			error = deviceExit();
		}

		if (error) {
			throw ANDOR885_Exception("Error shutting down camera");
		} else {
		//	std::cerr << "ANDOR885_Camera: Camera off" << std::endl;
			cameraStat = ANDOR_OFF;
		}
	} 
	else {
		throw ANDOR885_Exception("Unrecognized camera status requested");
	}
}
Esempio n. 18
0
//-----------------------------------------------------------------------------
//!
//-----------------------------------------------------------------------------
tSonar3D::tSonar3D(
    const boost::shared_ptr<tStructureOverlayLayer>& xSonarImageryLayer,
    const boost::shared_ptr<tLayer>& xWaypointLayer,
    const int cameraDepthLevel,
    QWidget* pParent) 
    : QWidget(pParent)
    , m_CameraDistanceMin(10.0)
    , m_CameraDistanceMax(250.0)
    , m_pSonarDataEngine(0)
    , m_pNativeWindow(0)
    , m_CursorEnabled(false)
    , m_pCameraInteractor(0)
    , m_TrackBallInteractor()
    , m_FPSInteractor(&m_SceneSettings.camera)
    , m_CameraDistanceMeters(cDefaultCameraDistanceMeters)
    , m_ZoomStepMeters(10)
    , m_CameraRelativeAngleDegrees(0)
    , m_CameraDepthLevel(cameraDepthLevel)
    , m_NewSonarDataAvailable(false)
    , m_SceneUpdated(false)
    , m_RenderingInProgress(false)
    , m_PreviewMode(false)
    , m_VesselMode(true)
    , m_FollowVessel(true)
    , m_NightMode(false)
    , m_CameraRotating(false)
    , m_RelativeAzimuth(false)
    , m_FirstVesselUpdate(true)
    , m_LastHeading(0)
    , m_HeadSequence(0)
    , m_Target(tVector3d::ZERO)
{
    setFocusPolicy(Qt::StrongFocus);

    m_pNativeWindow = new tNativeWindow(this);

    QVBoxLayout* pLayout = new QVBoxLayout(this);
    pLayout->addWidget(m_pNativeWindow);

    pLayout->setSpacing(0);
    pLayout->setContentsMargins(0, 0, 0, 0);

    setLayout(pLayout);

    m_SceneSettings.exaggeration = 1;
    m_SceneSettings.wireframe = false;

    boost::shared_ptr<tSonarPointCloudManager> xManager(new tSonarPointCloudManager(xSonarImageryLayer));
    Connect(xManager.get(), SIGNAL(DataUpdated(unsigned int)), this, SLOT(OnUpdatedSonar3DData(unsigned int)));
    Connect(xManager.get(), SIGNAL(DataAdded(unsigned int)), this, SLOT(OnNewSonar3DData(unsigned int)));
    Connect(xManager.get(), SIGNAL(DataRemoved(unsigned int)), this, SLOT(OnRemovedSonar3DData(unsigned int)));

    m_pSonarDataEngine = new tSonar3DDataEngine(xManager);
    CreateRenderer(xWaypointLayer, xManager);

    m_MaxNumberOfSonarColumns = xManager->MaxNumberOfSonarColumns();

    InitializeCamera();

    m_pCameraInteractor = &m_TrackBallInteractor;
    Connect(m_pSonarDataEngine, SIGNAL(DataSourceChanged(bool)), m_xSonar3DRenderer.get(), SLOT(OnDataSourceChanged(bool)), Qt::QueuedConnection);
}
Esempio n. 19
0
HRESULT CDimap::Initialize(char* pszAuxFile,
						   char* pszEphFile,
						   char* pszAttFile,
						   char* pszCamFile)
{
	FILE* fpAux=fopen(pszAuxFile,"rt");
	if(fpAux==NULL)
	{
		COutput::OutputFileOpenFailed(pszAuxFile);
		
		return S_FALSE;
	}

	FILE* fpEph=fopen(pszEphFile,"rt");
	if(fpEph==NULL)
	{
		COutput::OutputFileOpenFailed(pszEphFile);

		fclose(fpAux);

		return S_FALSE;
	}

	FILE* fpAtt=fopen(pszAttFile,"rt");
	if(fpAtt==NULL)
	{
		COutput::OutputFileOpenFailed(pszAttFile);

		fclose(fpAux);
		fclose(fpEph);

		return S_FALSE;
	}

	char szBuffer[512];
	char szTag[256];
	char szMean[10];
	char szValue[256];

	double lfT0,lfTn;
	while(!feof(fpAux))
	{
		fgets(szBuffer,511,fpAux);
		
		sscanf(szBuffer,"%s%s%s",szTag,szMean,szValue);
		if(strcmp(szTag,"satelliteID")==0)
		{
			if(strcmp(szValue,"\"HJ1A\"")==0)
			{
				m_nSatelliteID=HJ1A;
			}
			else if(strcmp(szValue,"\"HJ1B\"")==0)
			{
				m_nSatelliteID=HJ1B;
			}
		}
		else if(strcmp(szTag,"cameraID")==0)
		{
			if(strcmp(szValue,"\"CCD1\"")==0)
			{
				if(m_nSatelliteID==HJ1A)
				{
					m_nSensorType=HJ1A_CCD1;
				}
				else if(m_nSatelliteID==HJ1B)
				{
					m_nSensorType=HJ1B_CCD1;
				}
			}
			else if(strcmp(szValue,"\"CCD2\"")==0)
			{
				if(m_nSatelliteID==HJ1A)
				{
					m_nSensorType=HJ1A_CCD2;
				}
				else if(m_nSatelliteID==HJ1B)
				{
					m_nSensorType=HJ1B_CCD2;
				}
			}
			else if(strcmp(szValue,"\"CCD\"")==0)
			{
				if(m_nSatelliteID==HJ1A)
				{
					m_nSensorType=HJ1A_CCD;
				}
				else if(m_nSatelliteID==HJ1B)
				{
					m_nSensorType=HJ1B_CCD;
				}
			}
			else if(strcmp(szValue,"\"HSI\"")==0)
			{
				m_nSensorType=HJ1A_HSI;
			}
			else if(strcmp(szValue,"\"IRS\"")==0)
			{
				m_nSensorType=HJ1B_IRS;
			}
		}
		else if(strcmp(szTag,"firstLineTimeCode")==0)
		{
			lfT0=atof(szValue);
		}
		else if(strcmp(szTag,"lastLineTimeCode")==0)
		{
			lfTn=atof(szValue);
		}
		else if(strcmp(szTag,"imageWidth")==0)
		{
			m_nCols=atoi(szValue);
		}
		else if(strcmp(szTag,"imageHeight")==0)
		{
			m_nRows=atoi(szValue);
		}
	}
	m_fT0=lfT0;
	m_fLSP=(lfTn-lfT0)/(m_nRows-1);

	if(m_nSensorType==HJ1B_IRS)
	{
		int nFrame=m_nRows;
		m_nRows=m_nRows*m_ScanCamera.nScanFrame;

		fseek(fpAux,0,SEEK_SET);

		int i=0;
		for(i=0;i<nFrame;i++)
		{
			while(!feof(fpAux))
			{
				fgets(szBuffer,511,fpAux);
				
				if(szBuffer[0]=='{')
				{
					break;
				}
			}

			while(!feof(fpAux))
			{
				fgets(szBuffer,511,fpAux);
				if(szBuffer[0]=='}')
				{
					break;
				}
				
				sscanf(szBuffer,"%s%s%s",szTag,szMean,szValue);
				if(strcmp(szTag,"GpsTimeCode")==0)
				{
					m_ScanFrameTime.Add(atof(szValue));
				}
			}
		}
		if(m_ScanFrameTime.GetSize()!=nFrame)
		{
			COutput::Output("aux file format error");
			
			fclose(fpAux);
			fclose(fpEph);
			fclose(fpAtt);

			return S_FALSE;
		}
	}


	int nEphNum=0;
	while(!feof(fpEph))
	{
		fgets(szBuffer,511,fpEph);
		sscanf(szBuffer,"%s%s%s",szTag,szMean,szValue);
		if(strcmp(szTag,"groupNumber")==0)
		{
			nEphNum=atoi(szValue);
			break;
		}
	}
	int i=0;
	for(i=0;i<nEphNum;i++)
	{
		while(!feof(fpEph))
		{
			fgets(szBuffer,511,fpEph);
			
			if(szBuffer[0]=='{')
			{
				break;
			}
		}

		Ephemeris ep;
		double GX,GY,GZ;
		
		while(!feof(fpEph))
		{
			fgets(szBuffer,511,fpEph);
			if(szBuffer[0]=='}')
			{
				break;
			}
			
			sscanf(szBuffer,"%s%s%s",szTag,szMean,szValue);
			if(strcmp(szTag,"timeCode")==0)
			{
				ep.t=atof(szValue);
			}
			else if(strcmp(szTag,"PX")==0)
			{
				GX=atof(szValue);
			}
			else if(strcmp(szTag,"PY")==0)
			{
				GY=atof(szValue);
			}
			else if(strcmp(szTag,"PZ")==0)
			{
				GZ=atof(szValue);
			}
			else if(strcmp(szTag,"VX")==0)
			{
				ep.V.x=atof(szValue);
			}
			else if(strcmp(szTag,"VY")==0)
			{
				ep.V.y=atof(szValue);
			}
			else if(strcmp(szTag,"VZ")==0)
			{
				ep.V.z=atof(szValue);
			}
		}

		m_Projection.Geocentric2Geodetic(GX,GY,GZ,&ep.P.x,&ep.P.y,&ep.P.z);
		m_Ephemeris.Add(ep);
	}

	int nAttNum=0;
	while(!feof(fpAtt))
	{
		fgets(szBuffer,511,fpAtt);
		sscanf(szBuffer,"%s%s%s",szTag,szMean,szValue);
		if(strcmp(szTag,"groupNumber")==0)
		{
			nAttNum=atoi(szValue);
			break;
		}
	}
	for(i=0;i<nAttNum;i++)
	{
		while(!feof(fpAtt))
		{
			fgets(szBuffer,511,fpAtt);
			
			if(szBuffer[0]=='{')
			{
				break;
			}
		}

		Attitude at;
		
		while(!feof(fpAtt))
		{
			fgets(szBuffer,511,fpAtt);
			if(szBuffer[0]=='}')
			{
				break;
			}
			
			sscanf(szBuffer,"%s%s%s",szTag,szMean,szValue);
			if(strcmp(szTag,"timeCode")==0)
			{
				at.t=atof(szValue);
			}
			else if(strcmp(szTag,"roll")==0)
			{
				at.ROLL=atof(szValue);
				at.ROLL=at.ROLL*PI/180.0;
			}
			else if(strcmp(szTag,"pitch")==0)
			{
				at.PITCH=atof(szValue);
				at.PITCH=at.PITCH*PI/180.0;
			}
			else if(strcmp(szTag,"yaw")==0)
			{
				at.YAW=atof(szValue);
				at.YAW=at.YAW*PI/180.0;
			}
		}

		m_Attitude.Add(at);
	}

	InitializeCamera(pszCamFile);
	
	fclose(fpAux);
	fclose(fpEph);
	fclose(fpAtt);

	double lfTc=LineDate(m_nRows/2);
	Ephemeris epc=EphemerisDate(lfTc);
	double lfLatitude=epc.P.x;
	double lfLongitude=epc.P.y;
	long Zone;
	m_Projection.CalcZone(lfLatitude,lfLongitude,&Zone);

	m_Projection.SetZoneStep(6);
	m_Projection.SetZone(Zone);
	m_Projection.SetHemisphere(lfLatitude>0?'N':'S');

	double lfTempAltitude;
	Image2Geodetic(0,0,0,&m_fDatasetFrameLat[0],&m_fDatasetFrameLon[0],&lfTempAltitude);
	Image2Geodetic(0,m_nCols-1,0,&m_fDatasetFrameLat[1],&m_fDatasetFrameLon[1],&lfTempAltitude);
	Image2Geodetic(m_nRows-1,m_nCols-1,0,&m_fDatasetFrameLat[2],&m_fDatasetFrameLon[2],&lfTempAltitude);
	Image2Geodetic(m_nRows-1,0,0,&m_fDatasetFrameLat[3],&m_fDatasetFrameLon[3],&lfTempAltitude);
	Image2Geodetic(m_nRows/2,m_nCols/2,0,&m_fDatasetCenterLat,&m_fDatasetCenterLon,&lfTempAltitude);
	for(i=0;i<4;i++)
	{
		m_fDatasetFrameLat[i]=m_fDatasetFrameLat[i]*180.0/PI;
		m_fDatasetFrameLon[i]=m_fDatasetFrameLon[i]*180.0/PI;
	}

/*	double tempx0,tempy0,tempz0;
	Image2Geodetic(1000,23800/2-1,0,&tempx0,&tempy0,&tempz0);
	double tempx1,tempy1,tempz1;
	Image2Geodetic(1000,23800/2,0,&tempx1,&tempy1,&tempz1);
*/
	return S_OK;
}
Esempio n. 20
0
void VenomModuleStart(GameMemory* memory) {
  SystemInfo* sys = &memory->systemInfo;
  GameData* data = PushStruct(GameData, &memory->mainBlock);
  memory->userdata = data;
  RenderState* rs = &memory->renderState;
  
  BeginProfileEntry("Initalize Terrain Generator");
  InitalizeTerrainGenerator(&data->terrain, V3(0.0, 0.0, 0.0), &memory->mainBlock);
  EndProfileEntry();

  rs->terrain = &data->terrain;
  GetEngine()->physicsSimulation.terrain = &data->terrain;

#if 0
  {
    ModelData data = {};
    data = ImportExternalModelData(VENOM_ASSET_FILE("axis.obj"), 0);
    FILE* file = fopen("test.txt", "wb");
    assert(file != 0);
    for(size_t i = 0; i < data.meshData.vertexCount; i++) {
      fprintf(file, "V3{%ff, %ff, %ff},\n", data.meshData.vertices[i].position.x,
        data.meshData.vertices[i].position.y, data.meshData.vertices[i].position.z);
    }
    fprintf(file, "\n");
    for(size_t i = 0; i < data.meshData.indexCount; i++) {
      fprintf(file, "%u,", data.meshData.indices[i]);
      if(i % 3 == 0) fprintf(file, "\n");
    }
  }
#endif

 
  InitializeCamera(&data->camera, 45*DEG2RAD, 0.1f, 10000.0f, sys->screenWidth, sys->screenHeight);
  data->camera.position = {4, 10, 2};


  EntityContainerInit(&data->entityContainer, 1024, 8);

  {
    AssetManifest *assetManifest = &memory->assetManifest;
    EntityContainer *entityContainer = &data->entityContainer;
    EntityIndex player_index;
    Entity *player = CreateEntity(EntityType_Player, &player_index, entityContainer);
    assign_model_to_entity(player_index, GetModelID("player", assetManifest), assetManifest, entityContainer);
    ModelAsset *asset = GetModelAsset(player->modelID, assetManifest);
    player->animation_state.model_id = player->modelID;
    player->position = player->position += V3(1, 5.0f, 1);
    data->playerEntityIndex = player_index;

#if 0
    Orientation cameraOrientation = CalculateCameraOrientationForTrackTarget(player->position);
    data->camera.position = cameraOrientation.position;
    V3 eulerRotation = QuaternionToEuler(cameraOrientation.rotation);
    data->camera.pitch = eulerRotation.x;
    data->camera.yaw = eulerRotation.y;
#endif

    RNGSeed seed(15);
    ScatterInRectangle(&seed, -128, -128, 256, 256, 8, 8, [&](V2 point) {
      Entity *e = CreateEntity(EntityType_StaticObject, entityContainer);
      e->modelID = GetModelID("Tree", assetManifest);
      e->position.x = point.x;
      e->position.z = point.y;
    });

  }
}
ANDOR885_Camera::ANDOR885_Camera()
{
	debugging = false;

	initialized = false;
	notDestructed = false;
	extension = ".tif";

	eventMetadata = NULL;

	pauseCameraMutex = new omni_mutex();
	pauseCameraCondition = new omni_condition(pauseCameraMutex);
	stopEventMutex = new omni_mutex();
	stopEventCondition = new omni_condition(stopEventMutex);
	stopEvent = false;
	numAcquiredMutex = new omni_mutex();
	numAcquiredCondition = new omni_condition(numAcquiredMutex);
	waitForEndOfAcquisitionMutex = new omni_mutex();
	waitForEndOfAcquisitionCondition = new omni_condition(waitForEndOfAcquisitionMutex);
	waitForCleanupEventMutex = new omni_mutex();
	waitForCleanupEventCondition = new omni_condition(waitForCleanupEventMutex);
	bool cleanupEvent = false;

	//Initialize necessary parameters
	readMode_t.name = "Read mode"; //If ever there is more than one read mode, be sure to properly initialize this for playing events!
	readMode_t.choices[READMODE_IMAGE] = "Image";

	shutterMode_t.name = "Shutter mode";
	shutterMode_t.choices[SHUTTERMODE_AUTO] = "Auto";
	shutterMode_t.choices[SHUTTERMODE_OPEN] = "Open";
	shutterMode_t.choices[SHUTTERMODE_CLOSE] = "Closed";

	acquisitionMode_t.name = "**Acquisition mode (RTA)";
	acquisitionMode_t.choices[ACQMODE_SINGLE_SCAN] = "Single scan";
	acquisitionMode_t.choices[ACQMODE_KINETIC_SERIES] = "Kinetic series";
	acquisitionMode_t.choices[ACQMODE_RUN_TILL_ABORT] = "Run 'til abort";

	triggerMode_t.name = "**Trigger mode (EE)";
	triggerMode_t.choices[TRIGGERMODE_EXTERNAL] = "External";
	triggerMode_t.choices[TRIGGERMODE_EXTERNAL_EXPOSURE] = "External exposure";
	triggerMode_t.choices[TRIGGERMODE_INTERNAL] = "Internal";

	preAmpGain_t.name = "*Preamp Gain";
//	preAmpGain_t.choices.push_back("");
//	preAmpGain_t.choiceFlags.push_back(PREAMP_BLANK);
	preAmpGain = NOT_AVAILABLE;
//	preAmpGainPos = PREAMP_BLANK;

	verticalShiftSpeed_t.name = "*Vertical Shift Speed (us/px)";
	verticalClockVoltage_t.name = "*Vertical Clock Voltage";
	horizontalShiftSpeed_t.name = "*Horizontal Shift Speed (us)";

//	pImageArray = NULL;

	cameraStat		=	ANDOR_ON;
	acquisitionMode	=	ACQMODE_RUN_TILL_ABORT;
	readMode		=	READMODE_IMAGE;
	exposureTime	=	(float) 0.05; // in seconds
	accumulateTime	=	0;
	kineticTime		=	0;
	ttl				=	TTL_OPEN_HIGH;
	shutterMode		=	SHUTTERMODE_OPEN;
	closeTime		=	SHUTTER_CLOSE_TIME;
	openTime		=	SHUTTER_OPEN_TIME;
//	triggerMode		=	TRIGGERMODE_EXTERNAL_EXPOSURE; //will be set by InitializeCamera
	frameTransfer	=	ANDOR_OFF;
//	spoolMode		=	ANDOR_OFF;				
	coolerSetpt		=  -90;
	coolerStat		=	ANDOR_ON;
	cameraTemp		=	20;
	EMCCDGain		=	NOT_AVAILABLE;

	verticalShiftSpeed = 0;
	verticalClockVoltage = 0;
	horizontalShiftSpeed = 0;

	readMode_t.initial = readMode_t.choices.find(readMode)->second;
	shutterMode_t.initial = shutterMode_t.choices.find(shutterMode)->second;
//	triggerMode_t.initial = triggerMode_t.choices.find(triggerMode)->second;
	acquisitionMode_t.initial = acquisitionMode_t.choices.find(acquisitionMode)->second;

	//Name of path to which files should be saved
	filePath		=	createFilePath();
	logPath			=	"C:\\Documents and Settings\\EP Lab\\Desktop\\";
	palPath			=	"C:\\Documents and Settings\\User\\My Documents\\My Pictures\\Andor_iXon\\GREY.PAL";

	initialized = !InitializeCamera();

	if (initialized){
		notDestructed = true;

		omni_thread::create(playCameraWrapper, (void*) this, omni_thread::PRIORITY_HIGH);
	

		try {
			setExposureTime(exposureTime);
			setTriggerMode(triggerMode);
		} catch (ANDOR885_Exception& e){
			std::cerr << e.printMessage() << std::endl;
			initialized = false;
		}
		


		if (debugging) {
			try {
				setAcquisitionMode(acquisitionMode);
				setReadMode(readMode);
			} catch (ANDOR885_Exception& e){
				std::cerr << e.printMessage() << std::endl;
				initialized = false;
			}
		} 
	}
}