Пример #1
0
// Draws the given model with texturing, shading and a normal map.
void Rasterizer::DrawSolidTexturedNormalMapped(Model3D& model, std::vector<DirectionalLight*> directionalLights, std::vector<AmbientLight*> ambientLights, std::vector<PointLight*> pointLights)
{
	std::vector<Polygon3D> _polygonList = model.GetPolygonList();
	std::vector<Vertex> _vertexList = model.GetTransformedVertexList();
	std::vector<UVCoordinate> _uvCoordList = model.GetUVCoordinateList();

	for (unsigned int i = 0; i < _polygonList.size(); i++)
	{
		Polygon3D poly = _polygonList[i];
		if (poly.GetBackfacing() == true)
			continue;

		Vertex v1 = _vertexList[poly.GetVertexIndex(0)];
		Vertex v2 = _vertexList[poly.GetVertexIndex(1)];
		Vertex v3 = _vertexList[poly.GetVertexIndex(2)];

		// Set the uv coordinates of each vertex temporarily to the coordinates in the 
		// uv coordinate list.
		v1.SetUVCoordinate(_uvCoordList[poly.GetUVIndex(0)]);
		v2.SetUVCoordinate(_uvCoordList[poly.GetUVIndex(1)]);
		v3.SetUVCoordinate(_uvCoordList[poly.GetUVIndex(2)]);

		// Fill the polygon using the models texture.
		if (model.GetNormalMapOn() == true)
			FillPolygonTexturedNormalMapped(v1, v2, v3, v1.GetColor(), model, directionalLights, ambientLights, pointLights);
		else
			FillPolygonTextured(v1, v2, v3, v1.GetColor(), model);
	}
}
Пример #2
0
void MainWindow::OnOpenModel()
{
    const QString filtersAll(QString::fromUtf8(
                                 "Model files (*.mff);;All files (*.*)"));
    QString filterSel(QString::fromUtf8(
                          "Model files (*.mff)"));
    const QString sFilePath = QFileDialog::getOpenFileName(this,
                              QString::fromUtf8("Open 3D model"),
                              qApp->applicationDirPath(),
                              filtersAll, &filterSel);
    if(sFilePath.isEmpty())
    {
        return;
    }

    Model3D model;
    if(model.Load(sFilePath))
    {
        OnCloseModel();

        m_sFilePath = sFilePath;

        setWindowTitle(m_sFilePath);
        ui->openGLWidget->SetModel(model);
    }
}
Пример #3
0
    void Init()
    {
        Super::Init();

        mWorld = GD_NEW(World, this, "Launch::Gamedesk");
        mWorld->Init(&mOctree);

        Model3D* pModel = Cast<Model3D>(mWorld->SpawnEntity(Model3D::StaticClass()));
        pModel->SetMesh("link's house.ase");
        pModel->Select(true);

		Keyboard& keyboard = InputSubsystem::GetKeyboard();
		keyboard.AddKeyListener(this, Keyboard::Key_W, Keyboard::Key_Down);
		keyboard.AddKeyListener(this, Keyboard::Key_S, Keyboard::Key_Down);
		keyboard.AddKeyListener(this, Keyboard::Key_A, Keyboard::Key_Down);
		keyboard.AddKeyListener(this, Keyboard::Key_D, Keyboard::Key_Down);
        keyboard.AddKeyListener(this, Keyboard::Key_Escape, Keyboard::Key_Down);
	        
		Mouse& mouse = InputSubsystem::GetMouse();
		mouse.AddMoveListener(this);

        mMainWindow->AddListener(this);

        mFont.GetFont( "Data/Fonts/tahoma.ttf", 14 );
    }
Пример #4
0
void ModelBrowserUI::InsertModelInWorld( Q3ListBoxItem* pItem )
{
    Camera* camera = mTool->GetEditor()->GetWorldManager().GetCurrentCamera();
    Vector3f  pos = camera->GetPosition() + (camera->GetView() * 2.0f);

    Model3D* newModel = Cast<Model3D>( mTool->GetEditor()->GetWorldManager().SpawnEntity( Model3D::StaticClass(), pos ) );
    newModel->SetMesh( String("Data/Meshes/") + String(pItem->text().ascii()) );
}
Пример #5
0
bool OpenGLRenderer::renderModel3DWireframe(Model3D &model3D, const glm::vec4 &color, Camera &camera, RenderTarget &renderTarget)
{
    __(glDepthRangef(camera.getNear(), camera.getFar()));

    /* Enable wireframe mode */
    __(glPolygonMode(GL_FRONT_AND_BACK, GL_LINE));
    __(glEnable(GL_LINE_SMOOTH));
    __(glDisable(GL_CULL_FACE));

    /* Calculate MVP matrix */
    glm::mat4 MVP = camera.getPerspectiveMatrix() * camera.getViewMatrix() * model3D.getModelMatrix();

    /* Cast the model into an internal type */
    OpenGLAsset3D *glObject = static_cast<OpenGLAsset3D *>(model3D.getAsset3D());

    /* Set the color for the wireframe shader */
    _wireframeShader->setColor(color);

    /* Bind the render target */
    renderTarget.bind();
    {
        __(glEnable(GL_MULTISAMPLE));

        __(glEnable(GL_DEPTH_TEST));
        __(glDepthFunc(GL_LEQUAL));
        __(glBlendEquation(GL_FUNC_ADD));
        __(glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA));
        __(glEnable(GL_BLEND));

        /* Bind program to upload the uniform */
        _wireframeShader->attach();

        /* Send our transformation to the currently bound shader, in the "MVP" uniform */
        _wireframeShader->setUniformMat4("u_MVPMatrix", &MVP);

        /* Set the shader custom parameters */
        _wireframeShader->setCustomParams();

        /* Draw the model */
        __(glBindVertexArray(glObject->getVertexArrayID()));
        {
            std::vector<uint32_t> offset = glObject->getIndicesOffsets();
            std::vector<uint32_t> count = glObject->getIndicesCount();

            for (size_t i = 0; i < offset.size(); ++i) {
                __(glDrawElements(GL_TRIANGLES, count[i], GL_UNSIGNED_INT, (void *)(offset[i] * sizeof(GLuint))));
            }
        }
        __(glBindVertexArray(0));

        /* Unbind */
        _wireframeShader->detach();
    }
    renderTarget.unbind();

    return true;
}
Пример #6
0
  void draw(vec3d camera_position, vec3d light_position)
  {
		ALLEGRO_STATE previous_state;
		ALLEGRO_TRANSFORM transform;

		
		if (shader && shader_applies_transform)
		{
			// construct our entity's transform
			place.build_transform(&transform);

			// Now apply it to the shader
			shader->use();
			Shader::set_vec3("camera_position", camera_position);
			Shader::set_vec3("light_position", light_position);
			Shader::set_mat4("position_transform", &transform);

			Shader::set_bool("reflecting", cube_map_reflecting);

			Shader::set_sampler("diffuse_texture", diffuse_texture, 2);
			Shader::set_sampler("specular_texture", specular_texture, 3);
			Shader::set_sampler("normal_texture", normal_texture, 4);
			Shader::set_sampler_cube("cube_map_A", cube_map_A, 5);
			Shader::set_sampler_cube("cube_map_B", cube_map_B, 6);
		}
		else
		{
			// when not using the shader, we'll need to 
			// apply the transform directly here
			al_store_state(&previous_state, ALLEGRO_STATE_TRANSFORM);
			al_identity_transform(&transform);
			al_use_transform(&transform);
			place.start_transform();

			// also, we set the texture on this model directly
			if (diffuse_texture) model->set_texture(diffuse_texture);
		}



		// actually draw our model here
		if (model) model->draw();



		if (shader && shader_applies_transform)
		{
			Shader::stop();
		}
		else
		{
			place.restore_transform();
			al_restore_state(&previous_state);
		}
  }
bool LoadModel_Studio(FileSpecifier& Spec, Model3D& Model)
{
	ModelPtr = &Model;
	Model.Clear();
	
	if (DBOut)
	{
		// Name buffer
		const int BufferSize = 256;
		char Buffer[BufferSize];
		Spec.GetName(Buffer);
		fprintf(DBOut,"Loading 3D Studio Max model file %s\n",Buffer);
	}
	
	OpenedFile OFile;
	if (!Spec.Open(OFile))
	{	
		if (DBOut) fprintf(DBOut,"ERROR opening the file\n");
		return false;
	}
	
	ChunkHeaderData ChunkHeader;
	if (!ReadChunkHeader(OFile,ChunkHeader)) return false;
	if (ChunkHeader.ID != MASTER)
	{
		if (DBOut) fprintf(DBOut,"ERROR: not a 3DS Max model file\n");
		return false;
	}
	
	if (!ReadContainer(OFile,ChunkHeader,ReadMaster)) return false;
	
	return (!Model.Positions.empty() && !Model.VertIndices.empty());
}
Пример #8
0
/**
   Remove the argument model from the argument list
  */
bool ShadowableScene::removeModelFromList(string modelName, vector<Model3D*> &list)
{
   /// \todo loop through all the display lists and try to find the modelName
   Model3D *aModel;
   for (int index = 0; index < list.size(); index++)
   {
      aModel = list[index];
      if (aModel->getName() == modelName)
      {
         shadowCasterList.erase(&list[index]);
         return true;
      }
   }

   return false;
}
Пример #9
0
bool ModelDatabase::AddModel(Model3D model, string id)
{
	bool added;
	unsigned int size = mModels.size();
	Model3D temp;

	temp.Copy(model);

	mModels.insert(pair<string, Model3D>(id, temp));

	if(size > mModels.size())
		added = true;
	else
		added = false;

	return added;
}
Пример #10
0
/**
  Render the model list to the screen
*/
void ShadowableScene::renderModelList(const vector<Model3D*> &modelList)
{
   Model3D *aModel;
   for (int index = 0; index < modelList.size(); index++)
   {
      aModel = modelList[index];
      glPushMatrix();

      // Position the model
      Vector3D position(aModel->getPosition());
      glTranslatef(position.x, position.y, position.z);
      
      // Orient the model
      FTM rotations(aModel->getFTM());
      float tempMatrix[] = 
      { 
         rotations._00,rotations._01,rotations._02,rotations._03,
         rotations._10,rotations._11,rotations._12,rotations._13,
         rotations._20,rotations._21,rotations._22,rotations._23,
         rotations._30,rotations._31,rotations._32,rotations._33,
      };
      glMultMatrixf(tempMatrix);

      if (!aModel->isLit())
      {
         // Set the color (for non lit scenes)
         glColor3f(aModel->getRed(),aModel->getGreen(),aModel->getBlue());
         glDisable(GL_LIGHTING);
      }
      else
      {
         // Set the material props (for lit scenes)
         Material tempMaterial = aModel->getMaterial();
         float matSpecular[] = {tempMaterial.specularRed, tempMaterial.specularGreen, tempMaterial.specularBlue, tempMaterial.specularAlpha};
         glMaterialfv(GL_FRONT, GL_SPECULAR, matSpecular);
         float matShininess[] = {tempMaterial.shininess};
         glMaterialfv(GL_FRONT, GL_SHININESS, matShininess);
         float matAmbDiff[] = { tempMaterial.ambientDiffuseRed, tempMaterial.ambientDiffuseGreen, tempMaterial.ambientDiffuseBlue, tempMaterial.ambientDiffuseAlpha };         
         glMaterialfv(GL_FRONT, GL_AMBIENT_AND_DIFFUSE, matAmbDiff);
         glEnable(GL_LIGHTING);
      }

      // draw the model
      glCallList(aModel->getCallListId());
      
      glPopMatrix();
   }
}
Пример #11
0
bool OpenGLRenderer::renderToShadowMap(Model3D &model3D, Light &light, NormalShadowMapShader &shader)
{
    /* Calculate MVP matrix */
    glm::mat4 MVP = light.getProjectionMatrix() * light.getViewMatrix() * model3D.getModelMatrix();

    /* Calculate normal matrix */
    glm::mat3 normalMatrix = glm::transpose(glm::inverse(glm::mat3(model3D.getModelMatrix())));

    /* Cast the model into an internal type */
    OpenGLAsset3D *glObject = static_cast<OpenGLAsset3D *>(model3D.getAsset3D());

    glEnable(GL_DEPTH_TEST);
    glDepthFunc(GL_LESS);

    /* Bind the render target */
    light.getShadowMap()->bind();
    {
        /* Bind program to upload the uniform */
        shader.attach();

        /* Send our transformation to the currently bound shader, in the "MVP" uniform */
        shader.setUniformMat4("u_MVPMatrix", &MVP);

        /* Draw the model */
        __(glBindVertexArray(glObject->getVertexArrayID()));
        {
            std::vector<uint32_t> offset = glObject->getIndicesOffsets();
            std::vector<uint32_t> count = glObject->getIndicesCount();

            for (size_t i = 0; i < count.size(); ++i) {
                __(glDrawElements(GL_TRIANGLES, count[i], GL_UNSIGNED_INT, (void *)(offset[i] * sizeof(GLuint))));
            }
        }
        __(glBindVertexArray(0));

        /* Unbind */
        shader.detach();
    }
    light.getShadowMap()->unbind();

    return true;
}
Пример #12
0
// Draws the given model with gouraud shading.
void Rasterizer::DrawSolidShaded(Model3D& model)
{
	std::vector<Polygon3D> _polygonList = model.GetPolygonList();
	std::vector<Vertex> _vertexList = model.GetTransformedVertexList();
	
	// Iterate over and render each of the polygons in the list.
	for (unsigned int i = 0; i < _polygonList.size(); i++)
	{
		Polygon3D poly = _polygonList[i];
		if (poly.GetBackfacing() == true)
			continue;

		Vertex v1 = _vertexList[poly.GetVertexIndex(0)];
		Vertex v2 = _vertexList[poly.GetVertexIndex(1)];
		Vertex v3 = _vertexList[poly.GetVertexIndex(2)];
		
		// Fill the polygon using the polygons colour.
		FillPolygonShaded(v1, v2, v3, poly.GetColor());
	}
}
Пример #13
0
Galerkin::Galerkin(Model3D &_m,clVector &_uSol,
                   clVector &_vSol,
                   clVector &_wSol,
                   clVector &_cSol,
                   clMatrix &_gx,
                   clMatrix &_gy,
                   clMatrix &_gz)
{
    outflow = _m.getOutflow();
    numVerts = _m.getNumVerts();
    numNodes = _m.getNumNodes();
    numElems = _m.getNumElems();
    uSol = _uSol;
    vSol = _vSol;
    wSol = _wSol;
    cSol = _cSol;
    gx = _gx;
    gy = _gy;
    gz = _gz;
}
Пример #14
0
void ViewportWidget::paintGL(QGLPainter *painter) {
    if (!_models) return;

    glEnable(GL_BLEND);
    glEnable (GL_LINE_SMOOTH);
    glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
    glHint (GL_LINE_SMOOTH_HINT, GL_DONT_CARE);
    /* Draw the background video */
    drawVideoFrame(painter);

    /* Draw the models on top of the video */
    glEnable(GL_DEPTH_TEST);
    QListIterator<Model3D*> i(*_models);
    while(i.hasNext()) {
        Model3D *model = i.next();
        model->draw(painter);
    }

    glDisable(GL_DEPTH_TEST);
    glDisable (GL_LINE_SMOOTH);
    glDisable(GL_BLEND);
}
Пример #15
0
bool OpenGLRenderer::renderModelNormals(Model3D &model3D, Camera &camera, RenderTarget &renderTarget, float normalSize)
{
    /* Calculate MVP matrix */
    glm::mat4 MVP = camera.getPerspectiveMatrix() * camera.getViewMatrix() * model3D.getModelMatrix();

    /* Calculate normal matrix */
    glm::mat3 normalMatrix = glm::transpose(glm::inverse(glm::mat3(model3D.getModelMatrix())));

    /* Cast the model into an internal type */
    OpenGLAsset3D *glObject = static_cast<OpenGLAsset3D *>(model3D.getAsset3D());

    /* Bind the render target */
    renderTarget.bind();
    {
        /* Bind program to upload the uniform */
        _renderNormals.attach();

        _renderNormals.setUniformMat4("u_MVPMatrix", &MVP);
        _renderNormals.setUniformFloat("u_normalSize", normalSize);

        /* Draw the model */
        __(glBindVertexArray(glObject->getVertexArrayID()));
        {
            std::vector<uint32_t> offset = glObject->getIndicesOffsets();
            std::vector<uint32_t> count = glObject->getIndicesCount();

            for (size_t i = 0; i < offset.size(); ++i) {
                __(glDrawElements(GL_TRIANGLES, count[i], GL_UNSIGNED_INT, (void *)(offset[i] * sizeof(GLuint))));
            }
        }
        __(glBindVertexArray(0));

        /* Unbind */
        _renderNormals.detach();
    }
    renderTarget.unbind();

    return true;
}
Пример #16
0
// Draws the given model in wireframe mode.
void Rasterizer::DrawWireFrame(Model3D& model)
{
	std::vector<Polygon3D> _polygonList = model.GetPolygonList();
	std::vector<Vertex> _vertexList = model.GetTransformedVertexList();
	
	// Iterate over and render each of the polygons in the list.
	for (unsigned int i = 0; i < _polygonList.size(); i++)
	{
		Polygon3D poly = _polygonList[i];
		if (poly.GetBackfacing() == true)
			continue;

		Vertex v1 = _vertexList[poly.GetVertexIndex(0)];
		Vertex v2 = _vertexList[poly.GetVertexIndex(1)];
		Vertex v3 = _vertexList[poly.GetVertexIndex(2)];
	
		// Draw a line between each of the vertexs in the polygon.
		DrawLine(v1.GetX(), v1.GetY(), v2.GetX(), v2.GetY());
		DrawLine(v2.GetX(), v2.GetY(), v3.GetX(), v3.GetY());
		DrawLine(v1.GetX(), v1.GetY(), v3.GetX(), v3.GetY());

		_polygonsRendered++;
	}
}
Пример #17
0
bool LoadModel_Studio(FileSpecifier& Spec, Model3D& Model)
{
	ModelPtr = &Model;
	Model.Clear();
	
	Path = Spec.GetPath();
	logNote1("Loading 3D Studio Max model file %s",Path);
	
	OpenedFile OFile;
	if (!Spec.Open(OFile))
	{	
		logError1("ERROR opening %s",Path);
		return false;
	}
	
	ChunkHeaderData ChunkHeader;
	if (!ReadChunkHeader(OFile,ChunkHeader)) return false;
	if (ChunkHeader.ID != MASTER)
	{
		logError1("ERROR: not a 3DS Max model file: %s",Path);
		return false;
	}
	
	if (!ReadContainer(OFile,ChunkHeader,ReadMaster)) return false;
	
	if (Model.Positions.empty())
	{
		logError1("ERROR: no vertices found in %s",Path);
		return false;
	}
	if (Model.VertIndices.empty())
	{
		logError1("ERROR: no faces found in %s",Path);
		return false;
	}
	return true;
}
Пример #18
0
// Fills a polygon using a texture, gouraud shading and a normal map given 3 points and a color.
void Rasterizer::FillPolygonTexturedNormalMapped(Vertex v1, Vertex v2, Vertex v3, Gdiplus::Color color, Model3D& model, std::vector<DirectionalLight*> directionalLights, std::vector<AmbientLight*> ambientLights, std::vector<PointLight*> pointLights)
{
	ScanLine* _scanlines = new ScanLine[_height];

	BYTE* texture;
	Gdiplus::Color* palette;
	BYTE* normalTexture;
	Gdiplus::Color* normalPalette;
	int textureWidth;

	// Get the texture properties of the model.
	model.GetTexture(&texture, &palette, &textureWidth); 
	model.GetNormalMapTexture(&normalTexture, &normalPalette, &textureWidth); 
	
	// Set the scanlines to very high and very low values so
	// they will be set on the first set of interpolation.
	for (unsigned int i = 0; i < _height; i++)
	{
		_scanlines[i].xStart = 99999;
		_scanlines[i].xEnd = -99999;
	}

	// Interpolates between each of the vertexs of the polygon and sets the start
	// and end values for each of the scanlines it comes in contact with.
	InterpolateScanline(_scanlines, v1, v2);
	InterpolateScanline(_scanlines, v2, v3);
	InterpolateScanline(_scanlines, v3, v1);

	// Go through each scanline and each pixel in the scanline and 
	// sets its color.
	for (unsigned int y = 0; y < _height; y++)
	{
		// Work out the color and UV differences between the start and end of the scanline.
		float redColorDiff = (_scanlines[y].redEnd - _scanlines[y].redStart);
		float greenColorDiff = (_scanlines[y].greenEnd - _scanlines[y].greenStart);
		float blueColorDiff = (_scanlines[y].blueEnd - _scanlines[y].blueStart);
		float uCoordDiff = _scanlines[y].uEnd - _scanlines[y].uStart;
		float vCoordDiff = _scanlines[y].vEnd - _scanlines[y].vStart;
		float zCoordDiff = _scanlines[y].zEnd - _scanlines[y].zStart;

		float xNormalDiff = (_scanlines[y].xNormalEnd - _scanlines[y].xNormalStart);
		float yNormalDiff = (_scanlines[y].yNormalEnd - _scanlines[y].yNormalStart);
		float zNormalDiff = (_scanlines[y].zNormalEnd - _scanlines[y].zNormalStart);

		float xDiff = (_scanlines[y].pixelXEnd - _scanlines[y].pixelXStart);
		float yDiff = (_scanlines[y].pixelYEnd - _scanlines[y].pixelYStart);
		float zDiff = (_scanlines[y].pixelZEnd - _scanlines[y].pixelZStart);

		float diff = (_scanlines[y].xEnd - _scanlines[y].xStart) + 1;

		for (int x = (int)_scanlines[y].xStart; x <= (int)_scanlines[y].xEnd; x++)
		{
			if (x < 0 || x >= (int)_width)
				continue;

			int offset = (int)(x - _scanlines[y].xStart);
			
			// Work out the UV coordinate of the current pixel.
			float uCoord = _scanlines[y].uStart + ((uCoordDiff / diff) * offset);
			float vCoord = _scanlines[y].vStart + ((vCoordDiff / diff) * offset);
			float zCoord = _scanlines[y].zStart + ((zCoordDiff / diff) * offset);

			uCoord /= zCoord;
			vCoord /= zCoord;

			// Work out the normal of the pixel.
			float xNormal = _scanlines[y].xNormalStart + ((xNormalDiff / diff) * offset);
			float yNormal = _scanlines[y].yNormalStart + ((yNormalDiff / diff) * offset);
			float zNormal = _scanlines[y].zNormalStart + ((zNormalDiff / diff) * offset);

			// Work out the position of the pixel.
			float pixelX = _scanlines[y].pixelXStart + ((xDiff / diff) * offset);
			float pixelY = _scanlines[y].pixelYStart + ((yDiff / diff) * offset);
			float pixelZ = _scanlines[y].pixelZStart + ((zDiff / diff) * offset);

			// Work out the lighting colour of the current pixel.
			//float lightR = (_scanlines[y].redStart + ((redColorDiff / diff) * offset)) / 180.0f;
			//float lightG = (_scanlines[y].greenStart + ((greenColorDiff / diff) * offset)) / 180.0f;
			//float lightB = (_scanlines[y].blueStart + ((blueColorDiff / diff) * offset)) / 180.0f;	

			// Using the UV coordinate work out which pixel in the texture to use to draw this pixel.
			int pixelIndex = (int)vCoord * textureWidth + (int)uCoord;
			if (pixelIndex >= textureWidth * textureWidth || pixelIndex < 0)
			{
				pixelIndex = (textureWidth * textureWidth) - 1;
			}

			int paletteOffset = texture[pixelIndex]; 
			if (paletteOffset >= 255)
				paletteOffset = 255;

			Gdiplus::Color textureColor = palette[paletteOffset];

			// Work out the pixel colour of the normalmap.
			pixelIndex = (int)vCoord * textureWidth + (int)uCoord;
			if (pixelIndex >= textureWidth * textureWidth || pixelIndex < 0)
			{
				pixelIndex = (textureWidth * textureWidth) - 1;
			}

			paletteOffset = normalTexture[pixelIndex]; 
			if (paletteOffset >= 255)
				paletteOffset = 255;

			Gdiplus::Color normalTextureColor = normalPalette[paletteOffset];

			// Calculate normal lighting for the pixel.
			Vector3D heightMapVector = Vector3D(normalTextureColor.GetR() / 180.0f, normalTextureColor.GetG() / 180.0f, normalTextureColor.GetB() / 180.0f); 
			heightMapVector = Vector3D((heightMapVector.GetX() - 0.5f) * 2.0f, (heightMapVector.GetY() - 0.5f) * 2.0f, (heightMapVector.GetZ() - 0.5f) * 2.0f);

			// Work out he pixels normal and position.
			Vector3D pixelNormal = Vector3D(xNormal, yNormal, zNormal);//;Vector3D(heightMapVector.GetX(), heightMapVector.GetY(), heightMapVector.GetZ());
			Vertex pixelPosition = Vertex(pixelX, pixelY, pixelZ, 1, Gdiplus::Color::White, Vector3D(0, 0, 0), 0);

			heightMapVector = Vector3D((pixelNormal.GetX() * heightMapVector.GetX()) , 
										(pixelNormal.GetY() * heightMapVector.GetY()) , 
										(pixelNormal.GetZ() * heightMapVector.GetZ()) );

			// Calculate the sum dot product of all lighting vectors for this pixel and divide by the number
			// of lights.
			float lightDot = 0.0f;
			int count = 0;
			for (unsigned int j = 0; j < pointLights.size(); j++)
			{
				PointLight* light = pointLights[j];
				if (light->GetEnabled() == false)
					continue;
			
				// Work out vector to light source.
				Vector3D lightVector = Vertex::GetVector(pixelPosition, light->GetPosition());
				float distance = lightVector.GetLength();
				lightVector.Normalize();

				// Work out dot product.
				lightDot += Vector3D::DotProduct(heightMapVector, lightVector);
				count++;
			}
			for (unsigned int j = 0; j < directionalLights.size(); j++)
			{
				DirectionalLight* light = directionalLights[j];
				if (light->GetEnabled() == false)
					continue;
			
				// Work out vector to light source.
				Vector3D lightVector = Vertex::GetVector(pixelPosition, light->GetPosition());
				float distance = lightVector.GetLength();
				lightVector.Normalize();

				// Work out dot product.
				lightDot += Vector3D::DotProduct(heightMapVector, lightVector);
				count++;
			}
			lightDot /= count;

			// Adjust texture colour based on the lighting dot product.
			Gdiplus::Color pixelColor = textureColor;
			//pixelColor = model.CalculateLightingAmbientPerPixel(ambientLights, pixelPosition, pixelNormal, pixelColor);
			//pixelColor = model.CalculateLightingDirectionalPerPixel(directionalLights, pixelPosition, pixelNormal, pixelColor);
			//pixelColor = model.CalculateLightingPointPerPixel(pointLights, pixelPosition, pixelNormal, pixelColor);

			float lightR = (_scanlines[y].redStart + ((redColorDiff / diff) * offset)) / 180.0f;
			float lightG = (_scanlines[y].greenStart + ((greenColorDiff / diff) * offset)) / 180.0f;
			float lightB = (_scanlines[y].blueStart + ((blueColorDiff / diff) * offset)) / 180.0f;	

			// Apply the lighting value to the texture colour and use the result to set the colour of the current pixel.
			int finalR = (int)max(0, min(255, (lightR * textureColor.GetR()) - ((lightR * textureColor.GetR()) * lightDot) ));
			int finalG = (int)max(0, min(255, (lightG * textureColor.GetG()) - ((lightG * textureColor.GetG()) * lightDot) ));
			int finalB = (int)max(0, min(255, (lightB * textureColor.GetB()) - ((lightB * textureColor.GetB()) * lightDot) ));

			WritePixel(x, y, Gdiplus::Color(finalR, finalG, finalB));
		}
	}
	
	// Dispose of dynamic objects.
	delete[] _scanlines;

	_polygonsRendered++;
}
Пример #19
0
// Fills a polygon using a texture and gouraud shading given 3 points and a color.
void Rasterizer::FillPolygonTextured(Vertex v1, Vertex v2, Vertex v3, Gdiplus::Color color, Model3D& model)
{
	ScanLine* _scanlines = new ScanLine[_height];

	BYTE* texture;
	Gdiplus::Color* palette;
	int textureWidth;

	// Get the texture properties of the model.
	model.GetTexture(&texture, &palette, &textureWidth); 
	
	// Set the scanlines to very high and very low values so
	// they will be set on the first set of interpolation.
	for (unsigned int i = 0; i < _height; i++)
	{
		_scanlines[i].xStart = 99999;
		_scanlines[i].xEnd = -99999;
	}

	// Interpolates between each of the vertexs of the polygon and sets the start
	// and end values for each of the scanlines it comes in contact with.
	InterpolateScanline(_scanlines, v1, v2);
	InterpolateScanline(_scanlines, v2, v3);
	InterpolateScanline(_scanlines, v3, v1);

	// Go through each scanline and each pixel in the scanline and 
	// sets its color.
	for (unsigned int y = 0; y < _height; y++)
	{
		// Work out the color and UV differences between the start and end of the scanline.
		float redColorDiff = (_scanlines[y].redEnd - _scanlines[y].redStart);
		float greenColorDiff = (_scanlines[y].greenEnd - _scanlines[y].greenStart);
		float blueColorDiff = (_scanlines[y].blueEnd - _scanlines[y].blueStart);
		float uCoordDiff = _scanlines[y].uEnd - _scanlines[y].uStart;
		float vCoordDiff = _scanlines[y].vEnd - _scanlines[y].vStart;
		float zCoordDiff = _scanlines[y].zEnd - _scanlines[y].zStart;
		float diff = (_scanlines[y].xEnd - _scanlines[y].xStart) + 1;

		for (int x = (int)_scanlines[y].xStart; x <= (int)_scanlines[y].xEnd; x++)
		{
			if (x < 0 || x >= (int)_width)
				continue;

			int offset = (int)(x - _scanlines[y].xStart);
			
			// Work out the UV coordinate of the current pixel.
			float uCoord = _scanlines[y].uStart + ((uCoordDiff / diff) * offset);
			float vCoord = _scanlines[y].vStart + ((vCoordDiff / diff) * offset);
			float zCoord = _scanlines[y].zStart + ((zCoordDiff / diff) * offset);

			uCoord /= zCoord;
			vCoord /= zCoord;

			// Work out the lighting colour of the current pixel.
			float lightR = (_scanlines[y].redStart + ((redColorDiff / diff) * offset)) / 180.0f;
			float lightG = (_scanlines[y].greenStart + ((greenColorDiff / diff) * offset)) / 180.0f;
			float lightB = (_scanlines[y].blueStart + ((blueColorDiff / diff) * offset)) / 180.0f;	

			// Using the UV coordinate work out which pixel in the texture to use to draw this pixel.
			int pixelIndex = (int)vCoord * textureWidth + (int)uCoord;
			if (pixelIndex >= textureWidth * textureWidth || pixelIndex < 0)
			{
				pixelIndex = (textureWidth * textureWidth) - 1;
			}

			int paletteOffset = texture[pixelIndex]; 
			if (paletteOffset >= 255)
				paletteOffset = 255;

			Gdiplus::Color textureColor = palette[paletteOffset];

			// Apply the lighting value to the texture colour and use the result to set the colour of the current pixel.
			int finalR = (int)max(0, min(255, textureColor.GetR() * lightR));
			int finalG = (int)max(0, min(255, textureColor.GetG() * lightG));
			int finalB = (int)max(0, min(255, textureColor.GetB() * lightB));

			WritePixel(x, y, Gdiplus::Color(finalR, finalG, finalB));
		}
	}
	
	// Dispose of dynamic objects.
	delete[] _scanlines;

	_polygonsRendered++;
}
Пример #20
0
bool MD2Loader::LoadModel(const char* filename, Model3D& model, const char* textureFilename, const char* normalMapTextureFilename)
{
	ifstream   file;           

	// Try to open MD2 file
	file.open( filename, ios::in | ios::binary );
	if(file.fail())
	{
		return false;
	}
	// Read file header
	Md2Header header;         
	file.read( reinterpret_cast<char*>(&header), sizeof( Md2Header ) );
		
	// Verify that this is a MD2 file (check for the magic number and version number)
	if( (header.indent != MD2_IDENT) && (header.version != MD2_VERSION) )
	{
		// This is not a MD2 model
		file.close();
		return false;
	}

	// Allocate the memory we need
	Md2Triangle* triangles = new Md2Triangle[header.numTriangles];
	// We are only interested in the first frame 
	BYTE* frameBuffer = new BYTE[ header.frameSize ];
	Md2Frame* frame = reinterpret_cast<Md2Frame*>(frameBuffer);
	Md2TexCoord* texCoords = new Md2TexCoord[header.numTexCoords];

	// Read polygon data...
	file.seekg( header.offsetTriangles, ios::beg );
	file.read( reinterpret_cast<char*>(triangles), sizeof(Md2Triangle) * header.numTriangles );	
		
	// Read frame data...
	file.seekg( header.offsetFrames, ios::beg );
	file.read( reinterpret_cast<char*>(frame), header.frameSize );	

	// Read texture coordinate data
	file.seekg( header.offsetTexCoords, std::ios::beg );
	file.read( reinterpret_cast<char*>(texCoords), sizeof(Md2TexCoord) * header.numTexCoords );

	// Close the file 2.0517745e-038
	file.close();

	//----------------------------------------------------------------------------------------------

	// Initialize model textures.
	bool bHasTexture = false;

	// Attempt to load texture
	if ( textureFilename != 0 )
	{
		BYTE* pTexture = new BYTE[header.skinWidth * header.skinHeight];
		Gdiplus::Color* pPalette = new Gdiplus::Color[256];
		
		bHasTexture = LoadPCX(textureFilename, pTexture, pPalette, &header);
		if ( !bHasTexture )
		{
			delete(pTexture);
			delete(pPalette);
		}
		else
		{
			model.SetTexture(pTexture, pPalette, header.skinWidth);
		}
	}

	// Attempt to load normal map texture
	if ( normalMapTextureFilename != 0 )
	{
		BYTE* pTexture = new BYTE[header.skinWidth * header.skinHeight];
		Gdiplus::Color* pPalette = new Gdiplus::Color[256];
		
		bool valid = LoadPCX(normalMapTextureFilename, pTexture, pPalette, &header);
		if (!valid)
		{
			delete(pTexture);
			delete(pPalette);
		}
		else
		{
			model.SetNormalMapTexture(pTexture, pPalette, header.skinWidth);
		}
	}

	// Polygon array initialization
	for ( int i = 0; i < header.numTriangles; ++i )
	{
		// TODO - Put your own code here to create a new Polygon and store it in your list
		//
		// The following are the expressions you need to access each of the indexes into the list of vertices:
		//
		// Index 0:  triangles[i].vertexIndex[0]
		// Index 1:  triangles[i].vertexIndex[1]
		// Index 2:  triangles[i].vertexIndex[2]

		Polygon3D newPoly = Polygon3D(triangles[i].vertexIndex[0],
									  triangles[i].vertexIndex[1],
									  triangles[i].vertexIndex[2]);
		newPoly.SetUVIndex(0, triangles[i].uvIndex[0]);
		newPoly.SetUVIndex(1, triangles[i].uvIndex[1]);
		newPoly.SetUVIndex(2, triangles[i].uvIndex[2]);

		model.GetPolygonList().push_back(newPoly);
	}

	// Vertex array initialization
	for( int i = 0; i < header.numVertices; ++i )
	{
		// TODO - Put your own code here to create a new Vertex and store it in your list
		//
		// The following are the expressions you need to access each of the co-ordinates.
		// 
		// X co-ordinate:   frame->verts[i].v[0] * frame->scale[0]) + frame->translate[0]
		// Y co-ordinate:   frame->verts[i].v[2] * frame->scale[2]) + frame->translate[2]
		// Z co-ordinate:   frame->verts[i].v[1] * frame->scale[1]) + frame->translate[1]
		//
		// NOTE: We have to swap Y and Z over because Z is up in MD2 and we have Y as up-axis
		
		float x = (frame->verts[i].v[0] * frame->scale[0]) + frame->translate[0];
		float y = (frame->verts[i].v[2] * frame->scale[2]) + frame->translate[2];
		float z = (frame->verts[i].v[1] * frame->scale[1]) + frame->translate[1];

		Vertex vert = Vertex(x, y, z, 1.0f, Gdiplus::Color::Black, Vector3D(0,0,0), 0);
		model.GetVertexList().push_back(vert);
	}

	// Load UV coordinates
	if (bHasTexture)
	{
		for (int i = 0; i < header.numTexCoords; i++)
		{
			short u = texCoords[i].texCoord[0];			
			short v = texCoords[i].texCoord[1];

			UVCoordinate uvCoord;
			uvCoord.U = u;
			uvCoord.V = v;
			model.GetUVCoordinateList().push_back(uvCoord);
		}
	}

	// Rebuild model lists.
	model.RebuildTransformedVerticesList();

	// Free dynamically allocated memory
	delete [] triangles; // NOTE: this is 'array' delete. Must be sure to use this
	triangles = 0;

	delete [] frameBuffer;
	frameBuffer = 0;
	frame = 0;

	delete [] texCoords;
	texCoords = 0;

	return true;
}
Пример #21
0
bool OpenGLRenderer::renderModel3D(Model3D &model3D, Camera &camera, LightingShader &shader, DirectLight *sun,
                                   std::vector<PointLight *> &pointLights, std::vector<SpotLight *> &spotLights, float ambientK,
                                   RenderTarget &renderTarget, bool disableDepth)
{
    glm::mat4 biasMatrix(0.5, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.5, 0.5, 0.5, 1.0);
    GLuint textureUnit = 0;
    GLuint dummyTextureUnit = 0;

    __(glDepthRangef(camera.getNear(), camera.getFar()));

    /* Calculate MVP matrix */
    glm::mat4 MVP = camera.getPerspectiveMatrix() * camera.getViewMatrix() * model3D.getModelMatrix();

    /* Calculate normal matrix */
    glm::mat3 normalMatrix = glm::transpose(glm::inverse(glm::mat3(model3D.getModelMatrix())));

    /* Cast the model into an internal type */
    OpenGLAsset3D *glObject = static_cast<OpenGLAsset3D *>(model3D.getAsset3D());

    /* TODO: is this even used????? below we enable it always :P */
    if (disableDepth) {
        glDisable(GL_DEPTH_TEST);
    } else {
        glEnable(GL_DEPTH_TEST);
    }

    if (getWireframeMode() == Renderer::RENDER_WIREFRAME_ONLY) {
        __(glPolygonMode(GL_FRONT_AND_BACK, GL_LINE));
        __(glEnable(GL_LINE_SMOOTH));
        __(glDisable(GL_CULL_FACE));
    } else {
        __(glPolygonMode(GL_FRONT_AND_BACK, GL_FILL));
        __(glDisable(GL_LINE_SMOOTH));
        __(glEnable(GL_CULL_FACE));
    }

    /* Bind the render target */
    renderTarget.bind();
    {
        __(glEnable(GL_MULTISAMPLE));

        __(glEnable(GL_DEPTH_TEST));
        __(glBlendEquation(GL_FUNC_ADD));
        __(glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA));
        __(glEnable(GL_BLEND));

        /* Bind program to upload the uniform */
        shader.attach();

        /* Send our transformation to the currently bound shader, in the "MVP" uniform */
        shader.setUniformMat4("u_MVPMatrix", &MVP);
        shader.setUniformMat4("u_viewMatrix", &camera.getViewMatrix());
        shader.setUniformMat4("u_modelMatrix", &model3D.getModelMatrix());
        shader.setUniformMat3("u_normalMatrix", &normalMatrix);
        shader.setUniformTexture2D("u_diffuseMap", textureUnit++);
        shader.setUniformFloat("u_ambientK", ambientK);

        /* Activate and bind unit 0 for the dummy texture */
        dummyTextureUnit = textureUnit++;
        __(glActiveTexture(GL_TEXTURE0 + dummyTextureUnit));
        __(glBindTexture(GL_TEXTURE_2D, _dummyTexture));

        /* Set the sun light */
        if (sun != NULL) {
            glm::mat4 shadowMVP = sun->getProjectionMatrix() * sun->getViewMatrix() * model3D.getModelMatrix();
            shadowMVP = biasMatrix * shadowMVP;

            shader.setDirectLight(*sun);
            shader.setUniformUint("u_numDirectLights", 1);

            /* TODO: This has to be set in a matrix array */
            shader.setUniformMat4("u_shadowMVPDirectLight", &shadowMVP);
            shader.setUniformTexture2D("u_shadowMapDirectLight", textureUnit);

            __(glActiveTexture(GL_TEXTURE0 + textureUnit));
            if (model3D.isShadowReceiver()) {
                sun->getShadowMap()->bindDepth();
            } else {
                __(glBindTexture(GL_TEXTURE_2D, _noshadowTexture));
            }

            textureUnit++;
        } else {
            shader.setUniformUint("u_numDirectLights", 0);
            shader.setUniformTexture2D("u_shadowMapDirectLight", dummyTextureUnit);
        }

        /* Point lights */
        glm::mat4 *shadowMVPArray = new glm::mat4[pointLights.size()];
        GLuint texturesArray[OpenGLLightingShader::MAX_LIGHTS];

        for (uint32_t numLight = 0; numLight < pointLights.size(); ++numLight) {
            shader.setPointLight(numLight, *pointLights[numLight]);

            /* Calculate adjusted shadow map matrix */
            glm::mat4 shadowMVP =
                pointLights[numLight]->getProjectionMatrix() * pointLights[numLight]->getViewMatrix() * model3D.getModelMatrix();

            shadowMVPArray[numLight] = biasMatrix * shadowMVP;
            texturesArray[numLight] = textureUnit;

            __(glActiveTexture(GL_TEXTURE0 + textureUnit));
            if (model3D.isShadowReceiver()) {
                pointLights[numLight]->getShadowMap()->bindDepth();
            } else {
                __(glBindTexture(GL_TEXTURE_2D, _noshadowTexture));
            }

            textureUnit++;
        }
        for (uint32_t numLight = pointLights.size(); numLight < OpenGLLightingShader::MAX_LIGHTS; ++numLight) {
            texturesArray[numLight] = dummyTextureUnit;
        }

        shader.setUniformMat4("u_shadowMVPPointLight[0]", shadowMVPArray, pointLights.size());
        shader.setUniformTexture2DArray("u_shadowMapPointLight[0]", texturesArray, OpenGLLightingShader::MAX_LIGHTS);
        shader.setUniformUint("u_numPointLights", pointLights.size());

        /* Free the resources */
        delete[] shadowMVPArray;

        /* Spotlights */
        shadowMVPArray = new glm::mat4[spotLights.size()];

        for (uint32_t numLight = 0; numLight < spotLights.size(); ++numLight) {
            shader.setSpotLight(numLight, *spotLights[numLight]);

            /* Calculate adjusted shadow map matrix */
            glm::mat4 shadowMVP =
                spotLights[numLight]->getProjectionMatrix() * spotLights[numLight]->getViewMatrix() * model3D.getModelMatrix();

            shadowMVPArray[numLight] = biasMatrix * shadowMVP;
            texturesArray[numLight] = textureUnit;

            __(glActiveTexture(GL_TEXTURE0 + textureUnit));
            if (model3D.isShadowReceiver()) {
                spotLights[numLight]->getShadowMap()->bindDepth();
            } else {
                __(glBindTexture(GL_TEXTURE_2D, _noshadowTexture));
            }

            textureUnit++;
        }
        for (uint32_t numLight = spotLights.size(); numLight < OpenGLLightingShader::MAX_LIGHTS; ++numLight) {
            texturesArray[numLight] = dummyTextureUnit;
        }

        shader.setUniformMat4("u_shadowMVPSpotLight[0]", shadowMVPArray, spotLights.size());
        shader.setUniformTexture2DArray("u_shadowMapSpotLight[0]", texturesArray, OpenGLLightingShader::MAX_LIGHTS);
        shader.setUniformUint("u_numSpotLights", spotLights.size());

        /* Free the resources */
        delete[] shadowMVPArray;

        /* Set the shader custom parameters */
        shader.setCustomParams();

        /* Draw the model */
        __(glBindVertexArray(glObject->getVertexArrayID()));
        {
            __(glActiveTexture(GL_TEXTURE0));

            std::vector<Material> materials = glObject->getMaterials();
            std::vector<uint32_t> texturesIDs = glObject->getTexturesIDs();
            std::vector<uint32_t> offset = glObject->getIndicesOffsets();
            std::vector<uint32_t> count = glObject->getIndicesCount();

            for (size_t i = 0; i < materials.size(); ++i) {
                __(glBindTexture(GL_TEXTURE_2D, texturesIDs[i]));
                shader.setMaterial(materials[i]);

                __(glDrawElements(GL_TRIANGLES, count[i], GL_UNSIGNED_INT, (void *)(offset[i] * sizeof(GLuint))));
            }
        }
        __(glBindVertexArray(0));

        /* Unbind */
        shader.detach();
    }
    renderTarget.unbind();

    return true;
}
Пример #22
0
void OldGripper::updateGripper(
                               rw::models::WorkCell::Ptr wc,
                               rwsim::dynamics::DynamicWorkCell::Ptr dwc,
                               rw::models::Device::Ptr dev,
                               rwsim::dynamics::RigidDevice::Ptr ddev,
                               rw::kinematics::State& state,
                               MovableFrame::Ptr tcpFrame
                               ) {

    Geometry::Ptr baseGeometry = getBaseGeometry();
    Geometry::Ptr leftGeometry = getFingerGeometry();
    Geometry::Ptr rightGeometry = getFingerGeometry();

    // remove existing objects
    DEBUG << "- Removing objects..." << endl;
    //wc->removeObject(wc->findObject("gripper.Base").get());
    wc->removeObject(wc->findObject("gripper.LeftFinger").get());
    wc->removeObject(wc->findObject("gripper.RightFinger").get());
    DEBUG << "- Objects removed." << endl;

    // create and add new objects
    DEBUG << "- Adding new objects..." << endl;

    // if base is parametrized, the box has to be moved from origin by half its height
    /*Transform3D<> baseT;
    baseT = Transform3D<>(-0.5 * _basez * Vector3D<>::z());

    RigidObject* baseobj = new RigidObject(wc->findFrame("gripper.Base"));
    Model3D* basemodel = new Model3D("BaseModel");
    basemodel->addTriMesh(Model3D::Material("stlmat", 0.4f, 0.4f, 0.4f), *baseGeometry->getGeometryData()->getTriMesh());
    basemodel->setTransform(baseT);
    baseGeometry->setTransform(baseT);
    baseobj->addModel(basemodel);
    baseobj->addGeometry(baseGeometry);
    wc->add(baseobj);
    dwc->findBody("gripper.Base")->setObject(baseobj);*/

    RigidObject* leftobj = new RigidObject(wc->findFrame("gripper.LeftFinger"));
    Model3D* leftmodel = new Model3D("LeftModel");
    leftmodel->addTriMesh(Model3D::Material("stlmat", 0.4f, 0.4f, 0.4f), *leftGeometry->getGeometryData()->getTriMesh());
    leftmodel->setTransform(Transform3D<>());
    leftGeometry->setTransform(Transform3D<>());
    leftobj->addModel(leftmodel);
    leftobj->addGeometry(leftGeometry);
    wc->add(leftobj);
    dwc->findBody("gripper.LeftFinger")->setObject(leftobj);

    RigidObject* rightobj = new RigidObject(wc->findFrame("gripper.RightFinger"));
    Model3D* rightmodel = new Model3D("RightModel");
    rightmodel->addTriMesh(Model3D::Material("stlmat", 0.4f, 0.4f, 0.4f), *rightGeometry->getGeometryData()->getTriMesh());
    rightmodel->setTransform(Transform3D<>(Vector3D<>(), Rotation3D<>(1, 0, 0, 0, 1, 0, 0, 0, -1)));
    rightGeometry->setTransform(Transform3D<>(Vector3D<>(), Rotation3D<>(1, 0, 0, 0, 1, 0, 0, 0, -1)));
    rightobj->addModel(rightmodel);
    rightobj->addGeometry(rightGeometry);
    wc->add(rightobj);
    dwc->findBody("gripper.RightFinger")->setObject(rightobj);
    DEBUG << "Objects added." << endl;

    // set tcp
    tcpFrame->setTransform(Transform3D<>(Vector3D<>(0, 0, _length - _tcpoffset)), state);

    // set bounds
    double minOpening = 0.5 * _jawdist;

    dev->setBounds(make_pair(Q(1, minOpening), Q(1, minOpening + 0.5 * _stroke)));
    dev->setQ(Q(1, minOpening), state);

    // set force
    ddev->setMotorForceLimits(Q(2, _force, _force));

    DEBUG << "Gripper updated!" << endl;
}
Пример #23
0
int main(int argc, char **argv)
{
 PetscInitialize(&argc,&argv,PETSC_NULL,PETSC_NULL);
 //PetscInitializeNoArguments();

 // bogdan's thesis 2010 (Bhaga and Weber, JFM 1980)
 int iter = 1;
 //double Re = 6.53; // case 1
 double Re = 13.8487; // case 2
 //double Re = 32.78; // case 3
 double Sc = 1000;
 double We = 115.66;
 double Fr = 1.0;
 double c1 = 0.0;  // lagrangian
 double c2 = 1.0;  // smooth vel
 double c3 = 10.0;  // smooth coord (fujiwara)
 double d1 = 1.0;  // surface tangent velocity u_n=u-u_t 
 double d2 = 0.1;  // surface smooth cord (fujiwara)
 double alpha = 1.0;

 double mu_in = 0.0000178;
 double mu_out = 1.28;

 double rho_in = 1.225;
 double rho_out = 1350;

 double cfl = 0.8;

 string meshFile = "airWaterSugar.msh";
 //string meshFile = "test.msh";
 
 Solver *solverP = new PetscSolver(KSPGMRES,PCILU);
 //Solver *solverP = new PetscSolver(KSPGMRES,PCJACOBI);
 Solver *solverV = new PetscSolver(KSPCG,PCICC);
 //Solver *solverV = new PetscSolver(KSPCG,PCJACOBI);
 Solver *solverC = new PetscSolver(KSPCG,PCICC);

 const char *binFolder  = "./bin/";
 const char *vtkFolder  = "./vtk/";
 const char *mshFolder  = "./msh/";
 const char *datFolder  = "./dat/";
 string meshDir = (string) getenv("DATA_DIR");
 meshDir += "/gmsh/3d/rising/" + meshFile;
 const char *mesh = meshDir.c_str();

 Model3D m1;
 Simulator3D s1;

 if( *(argv+1) == NULL )     
 {
  cout << endl;
  cout << "--------------> STARTING FROM 0" << endl;
  cout << endl;

  const char *mesh1 = mesh;

  m1.readMSH(mesh1);
  m1.setInterfaceBC();
  m1.setTriEdge();
  m1.mesh2Dto3D();
  m1.setMapping();
#if NUMGLEU == 5
 m1.setMiniElement();
#else
 m1.setQuadElement();
#endif
  m1.setSurfaceConfig();
  m1.setInitSurfaceVolume();
  m1.setInitSurfaceArea();
  m1.setGenericBC();

  s1(m1);

  s1.setRe(Re);
  s1.setSc(Sc);
  s1.setWe(We);
  s1.setFr(Fr);
  s1.setC1(c1);
  s1.setC2(c2);
  s1.setC3(c3);
  s1.setD1(d1);
  s1.setD2(d2);
  s1.setAlpha(alpha);
  s1.setMu(mu_in,mu_out);
  s1.setRho(rho_in,rho_out);
  s1.setCfl(cfl);
  s1.init();
  s1.initHeatTransfer();
  s1.setDtALETwoPhase();
  s1.setSolverPressure(solverP);
  s1.setSolverVelocity(solverV);
  s1.setSolverConcentration(solverC);
 }
 else if( strcmp( *(argv+1),"restart") == 0 ) 
 {
  cout << endl;
  cout << "--------------> RE-STARTING..." << endl;
  cout << endl;

  // load surface mesh
  string aux = *(argv+2);
  string file = (string) "./msh/newMesh-" + *(argv+2) + (string) ".msh";
  const char *mesh2 = file.c_str();
  m1.readMSH(mesh2);
  m1.setInterfaceBC();
  m1.setTriEdge();
  m1.mesh2Dto3D();

  s1(m1);

  // load 3D mesh
  file = (string) "./vtk/sim-" + *(argv+2) + (string) ".vtk";
  const char *vtkFile = file.c_str();

  m1.readVTK(vtkFile);
  m1.setMapping();
#if NUMGLEU == 5
  m1.setMiniElement();
#else
  m1.setQuadElement();
#endif
  m1.readVTKHeaviside(vtkFile);
  m1.setSurfaceConfig();
  m1.setInitSurfaceVolume();
  m1.setInitSurfaceArea();
  m1.setGenericBC();

  s1(m1);

  s1.setSolverPressure(solverP);
  s1.setSolverVelocity(solverV);
  s1.setSolverConcentration(solverC);

  iter = s1.loadSolution("./","sim",atoi(*(argv+2)));
 }
 else if( strcmp( *(argv+1),"remesh") == 0 ) 
 {
  cout << endl;
  cout << "--------------> RE-MESHING & STARTING..." << endl;
  cout << endl;

  // load old mesh
  Model3D mOld;
  string file = (string) "./vtk/sim-" + *(argv+2) + (string) ".vtk";
  const char *vtkFile = file.c_str();
  mOld.readVTK(vtkFile);
  mOld.readVTKHeaviside(vtkFile);
  mOld.setMapping();

  // load surface mesh and create new mesh
  file = (string) "./msh/newMesh-" + *(argv+2) + (string) ".msh";
  const char *mesh2 = file.c_str();
  m1.readMSH(mesh2);
  m1.setInterfaceBC();
  m1.setTriEdge();
  m1.mesh2Dto3DOriginal();
  m1.setMapping();
#if NUMGLEU == 5
  m1.setMiniElement();
#else
  m1.setQuadElement();
#endif
  m1.setSurfaceConfig();
  m1.setInitSurfaceVolume();
  m1.setInitSurfaceArea();
  m1.setGenericBC();

  s1(m1);

  s1.setSolverPressure(solverP);
  s1.setSolverVelocity(solverV);
  s1.setSolverConcentration(solverC);
  iter = s1.loadSolution("./","sim",atoi(*(argv+2)));
  s1.applyLinearInterpolation(mOld);
 }
 else if( strcmp( *(argv+1),"restop") == 0 )  
 {
  cout << endl;
  cout << "--------------> RE-MESHING (NO ITERATION)..." << endl;
  cout << endl;

  // load old mesh
  Model3D mOld;
  string file = (string) "./vtk/sim-" + *(argv+2) + (string) ".vtk";
  const char *vtkFile = file.c_str();
  mOld.readVTK(vtkFile);
  mOld.readVTKHeaviside(vtkFile);
  mOld.setMapping();

  // load surface mesh and create new one
  file = (string) "./msh/newMesh-" + *(argv+2) + (string) ".msh";
  const char *mesh2 = file.c_str();
  m1.readMSH(mesh2);
  m1.setInterfaceBC();
  m1.setTriEdge();
  m1.mesh2Dto3DOriginal();
  m1.setMapping();
#if NUMGLEU == 5
  m1.setMiniElement();
#else
  m1.setQuadElement();
#endif
  m1.setSurfaceConfig();
  m1.setInitSurfaceVolume();
  m1.setInitSurfaceArea();

  s1(m1);
  //file = (string) "sim-" + *(argv+2);
  //const char *sol = file.c_str();
  iter = s1.loadSolution("./","sim",atoi(*(argv+2)));
  s1.applyLinearInterpolation(mOld);

  InOut saveEnd(m1,s1); // cria objeto de gravacao
  saveEnd.saveVTK(vtkFolder,"sim",atoi(*(argv+2)));
  saveEnd.saveMSH(mshFolder,"newMesh",atoi(*(argv+2)));
  saveEnd.saveSol(binFolder,"sim",atoi(*(argv+2)));
  //saveEnd.saveVTKSurface(vtkFolder,"sim",atoi(*(argv+2)));
  return 0;
 }
 // Point's distribution
 Helmholtz3D h1(m1);
 h1.setBC();
 h1.initRisingBubble();
 h1.assemble();
 h1.setk(0.2);
 h1.matMountC();
 h1.setUnCoupledCBC(); 
 h1.setCRHS();
 h1.unCoupledC();
 //h1.saveVTK(vtkFolder,"edge");
 h1.setModel3DEdgeSize();

 InOut save(m1,s1); // cria objeto de gravacao
 save.saveVTK(vtkFolder,"geometry");
 save.saveVTKSurface(vtkFolder,"geometry");
 save.saveMeshInfo(datFolder);
 save.saveInfo(datFolder,"info",mesh);

 int nIter = 3000;
 int nReMesh = 1;
 for( int i=1;i<=nIter;i++ )
 {
  for( int j=0;j<nReMesh;j++ )
  {

   cout << color(none,magenta,black);
   cout << "____________________________________ Iteration: " 
	    << iter << endl << endl;
   cout << resetColor();

   //s1.stepLagrangian();
   s1.stepALE();
   s1.setDtALETwoPhase();

   InOut save(m1,s1); // cria objeto de gravacao
   save.printSimulationReport();

   s1.movePoints();
   s1.assemble();
   s1.matMount();
   s1.matMountC();
   s1.setUnCoupledBC();
   s1.setUnCoupledCBC();
   s1.setRHS();
   s1.setCRHS();
   s1.setGravity("Z");
   //s1.setInterface();
   s1.setInterfaceGeo();
   s1.unCoupled();
   s1.unCoupledC();

   save.saveMSH(mshFolder,"newMesh",iter);
   save.saveVTK(vtkFolder,"sim",iter);
   save.saveVTKSurface(vtkFolder,"sim",iter);
   save.saveSol(binFolder,"sim",iter);
   save.saveBubbleInfo(datFolder);
   //save.crossSectionalVoidFraction(datFolder,"voidFraction",iter);

   s1.saveOldData();

   s1.timeStep();

   cout << color(none,magenta,black);
   cout << "________________________________________ END of " 
	    << iter << endl << endl;;
   cout << resetColor();

   iter++;
  }
  Helmholtz3D h2(m1,h1);
  h2.setBC();
  h2.initRisingBubble();
  h2.assemble();
  h2.matMountC();
  h2.setUnCoupledCBC(); 
  h2.setCRHS();
  h2.unCoupledC();
  h2.saveVTK(vtkFolder,"edge",iter-1);
  h2.saveChordalEdge(datFolder,"edge",iter-1);
  h2.setModel3DEdgeSize();

  Model3D mOld = m1; 

  /* *********** MESH TREATMENT ************* */
  // set normal and kappa values
  m1.setNormalAndKappa();
  m1.initMeshParameters();

  // 3D operations
  //m1.insert3dMeshPointsByDiffusion();
  m1.remove3dMeshPointsByDiffusion();
  //m1.removePointByVolume();
  //m1.removePointsByInterfaceDistance();
  //m1.remove3dMeshPointsByDistance();
  m1.remove3dMeshPointsByHeight();
  m1.delete3DPoints();

  // surface operations
  m1.smoothPointsByCurvature();

  m1.insertPointsByLength("curvature");
  //m1.insertPointsByCurvature("flat");
  //m1.removePointsByCurvature();
  //m1.insertPointsByInterfaceDistance("flat");
  m1.contractEdgesByLength("curvature");
  //m1.removePointsByLength();
  m1.flipTriangleEdges();

  m1.removePointsByNeighbourCheck();
  //m1.checkAngleBetweenPlanes();
  /* **************************************** */

  //m1.mesh2Dto3DOriginal();
  m1.mesh3DPoints();
  m1.setMapping();
#if NUMGLEU == 5
 m1.setMiniElement();
#else
 m1.setQuadElement();
#endif
  m1.setSurfaceConfig();
  m1.setInterfaceBC();
  m1.setGenericBC();

  Simulator3D s2(m1,s1);
  s2.applyLinearInterpolation(mOld);
  s1 = s2;
  s1.setSolverPressure(solverP);
  s1.setSolverVelocity(solverV);
  s1.setSolverConcentration(solverC);

  InOut saveEnd(m1,s1); // cria objeto de gravacao
  saveEnd.printMeshReport();
  saveEnd.saveMeshInfo(datFolder);
 }

 PetscFinalize();
 return 0;
}
Пример #24
0
std::vector<Model3D*> ModelUtils::LoadModel(const std::string& filePath)
{
   Assimp::Importer importer;
   const aiScene* scene = importer.ReadFile(filePath,
      aiProcessPreset_TargetRealtime_MaxQuality | aiProcess_OptimizeGraph | aiProcess_PreTransformVertices);

   if (scene == nullptr){
      fatal("Failed to load model file: %s\n%s", filePath.c_str(), importer.GetErrorString());
   }

   vector<Model3D*> models;

   for (unsigned int meshIndex = 0; meshIndex < scene->mNumMeshes; meshIndex++)
   {
      Model3D* model = new Model3D();
      vector<Vertex> vertices;
      vector<unsigned short> indices;
      aiMesh* mesh = scene->mMeshes[meshIndex];
      if (mesh->HasPositions())
      {
         for (unsigned int i = 0; i < mesh->mNumVertices; i++)
         {
            aiVector3D* u = nullptr;
            if (mesh->HasTextureCoords(i))
            {
               aiVector3D* u = mesh->mTextureCoords[i];
            }

            aiVector3D v = mesh->mVertices[i];
            //aiColor4D c = aiColor4D((float)rand() / RAND_MAX, (float)rand() / RAND_MAX, (float)rand() / RAND_MAX, 1.0f);
            //if (mesh->HasVertexColors(i))
            //{
            //   c = *mesh->mColors[i];
            //}

            if (u != nullptr)
            {
               vertices.push_back(Vertex(Position(v.x, v.y, v.z), TextureCoordinate(u->x, u->y)));
            }
            else
            {
               vertices.push_back(Vertex(Position(v.x, v.y, v.z), TextureCoordinate(0, 0)));
            }
         }
      }
      if (mesh->HasFaces())
      {
         for (unsigned int i = 0; i < mesh->mNumFaces; i++)
         {
            aiFace face = mesh->mFaces[i];
            for (unsigned int j = 0; j < face.mNumIndices; j++)
            {
               indices.push_back(face.mIndices[j]);
            }
         }
      }

      model->Init(vertices, indices);
      models.push_back(model);
   }
   return models;
}
bool LoadModel_QD3D(FileSpecifier& Spec, Model3D& Model)
{
	// Clear out the final model object
	Model.Clear();
	
	// Test for QD3D/Quesa's presence and initialize it if not present
	if (QD3D_Presence_Checked)
	{
		if (!QD3D_Present) return false;
	}
	else
	{
		QD3D_Presence_Checked = true;
		
		// MacOS QD3D; modify this for Quesa as appropriate
		if ((void*)Q3Initialize != (void*)kUnresolvedCFragSymbolAddress)
		{
			TQ3Status Success = Q3Initialize();
			QD3D_Present = (Success == kQ3Success);
		}
		
		// Do additional setup;
		// if the triangulator could not be created, then act as if
		// QD3D/Quesa had not been loaded
		if (QD3D_Present)
		{
			Q3Error_Register(QD3D_Error_Handler,0);
			QD3D_Present = CreateTriangulator();
		}
		if (!QD3D_Present)
			Q3Exit();
	}
	
	if (DBOut)
	{
		// Read buffer
		const int BufferSize = 256;
		char Buffer[BufferSize];
		Spec.GetName(Buffer);
		fprintf(DBOut,"Loading QuickDraw-3D model file %s\n",Buffer);
	}
	
	TQ3Object ModelObject = LoadModel(Spec);
	if (!ModelObject) return false;
	
	StartAccumulatingVertices();
	
	if (Q3View_StartRendering(TriangulatorView) == kQ3Failure)
	{
		if (DBOut) fprintf(DBOut,"ERROR: couldn't start triangulation 'rendering'\n");
		Q3Object_Dispose(ModelObject);
		return false;
	}
	do
	{
		Q3SubdivisionStyle_Submit(&TesselationData, TriangulatorView);
		if (Q3Object_Submit(ModelObject, TriangulatorView) == kQ3Failure)
		{
			if (DBOut) fprintf(DBOut,"ERROR: model could not be 'rendered'\n");
		}
	}
	while (Q3View_EndRendering(TriangulatorView) == kQ3ViewStatusRetraverse);

	// Done with the model
	Q3Object_Dispose(ModelObject);
	
	GetVerticesIntoModel(Model);
	
	return !Model.Positions.empty();
}
Пример #26
0
 virtual void draw()
 {
    if (!model) return;
    model->set_texture(texture);
    model->draw();
 }
Пример #27
0
void Logging::log(const char *msg, const Model3D &model)
{
    printf("[Model3D] %s\n", msg);
    log(" ", *model.getAsset3D());
}