void DataClass::sendBluetooth(Actuator* actuator, int timeDec, int valueDec) { _signalBluetooth(); Serial.println(DataLine(actuator->_name, FloatToString(actuator->_time, timeDec), FloatToString(actuator->_value, valueDec))); }
void DataClass::sendBluetooth(Measurement* measurement, int timeDec, int valueDec) { _signalBluetooth(); Serial.println(DataLine(measurement->_name, FloatToString(measurement->_time, timeDec), FloatToString(measurement->_value, valueDec))); }
void ReadData( const string& inputfile, const int diff, const int nvar, const int tgtIdx, const vector< int >& delay, const vector< int >& nlags, vector< vector< float > >& Examples, vector< string >& Timestamp ) { float NAFlag = -1000.0; cout << "# Determine number of attributes" << endl; // attributes = number of lags of each variable but does not include the target variable int nattributes = 0; for( int j=0; j<nvar; j++ ) { nattributes += nlags[j]; } cout << "# Number of attributes is: " << nattributes << endl; cout << "# Determine maximum memory" << endl; int max_memory = 0; for (int j=0; j< nvar; j++ ) { if( max_memory < delay[j]+nlags[j] ) { max_memory = delay[j]+nlags[j]; } } cout << "# Maximum memory is: " << max_memory << endl; //open training data file ifstream ifile( inputfile.c_str() ); // create stack of examples vector< stack< float > > Points(nvar); stack< string > Timestamp_loc; string FullLine; getline(ifile, FullLine); while( ifile ) { if (FullLine[0]!='#') { //cout << FullLine << endl; string tstamp; istringstream DataLine( FullLine ); DataLine >> tstamp; for( int i=0; i<nvar; i++) { string a; DataLine >> a; // Flag NA records if( a == "NA" || a=="0.000" || a == "RM") { Points[i].push(NAFlag); } else { Points[i].push( atof( a.c_str() ) ); } } Timestamp_loc.push( tstamp ); } getline(ifile, FullLine); }
String DataClass::dataLine(String name, String time, String value) { return DataLine(name, time, value); }
void DataClass::sendBluetooth(Sensor* sensor, int timeDec, int valueDec) { _signalBluetooth(); Serial.println(DataLine(sensor->_name, FloatToString(sensor->_timer.duration(), timeDec), FloatToString(sensor->readValue(), valueDec))); }
void DataClass::display(Measurement* measurement, int timeDec, int valueDec) { Serial.println(DataLine(measurement->_name, FloatToString(measurement->_time, timeDec), FloatToString(measurement->_value, valueDec))); }
void DataClass::display(Actuator* actuator, int timeDec, int valueDec) { Serial.println(DataLine(actuator->_name, FloatToString(actuator->_time, timeDec), FloatToString(actuator->_value, valueDec))); }
void DataClass::display(Sensor* sensor, int timeDec, int valueDec) { Serial.println(DataLine(sensor->_name, FloatToString(sensor->_timer.duration(), timeDec), FloatToString(sensor->readValue(), valueDec))); }
std::string TR::InitializeSystem(void) { if (textRenderer != 0) { return "System was already initialized."; } ClearAllRenderingErrors(); tempTex.Create(); //Transform matrices and render info. textRendererCam.GetViewTransform(viewMat); textRendererCam.GetOrthoProjection(projMat); textRendererInfo = RenderInfo(0.0f, &textRendererCam, &viewMat, &projMat); //Material. textRendererParams.clear(); SerializedMaterial matData(DrawingQuad::GetVertexInputData()); //Use a simple vertex shader that just uses world position -- in other words, // the visible range in world space is just the volume from {-1, -1, -1} to {1, 1, 1}. //It also outputs UVs for the fragment shader to use. DataLine vIn_Pos(VertexInputNode::GetInstance(), 0), vIn_UV(VertexInputNode::GetInstance(), 1); DataNode::Ptr objPosToWorld(new SpaceConverterNode(DataLine(VertexInputNode::GetInstanceName()), SpaceConverterNode::ST_OBJECT, SpaceConverterNode::ST_WORLD, SpaceConverterNode::DT_POSITION, "objPosToWorld")); DataNode::Ptr vertexPosOut(new CombineVectorNode(DataLine(objPosToWorld->GetName()), DataLine(1.0f))); matData.MaterialOuts.VertexPosOutput = vertexPosOut; //The fragment shader just samples from the texture containing the text char // and uses its "red" value because it's a grayscale texture. matData.MaterialOuts.VertexOutputs.push_back(ShaderOutput("vOut_UV", vIn_UV)); DataLine fIn_UV(FragmentInputNode::GetInstance(), 0); DataNode::Ptr textSampler(new TextureSample2DNode(fIn_UV, textSamplerName, "textSampler")); DataLine textSamplerRGBA(textSampler, TextureSample2DNode::GetOutputIndex(CO_AllChannels)); DataNode::Ptr textColor(new SwizzleNode(textSamplerRGBA, SwizzleNode::C_X, SwizzleNode::C_X, SwizzleNode::C_X, SwizzleNode::C_X, "swizzleTextSample")); matData.MaterialOuts.FragmentOutputs.push_back(ShaderOutput("fOut_Color", textColor)); BlendMode blending = BlendMode::GetTransparent(); ShaderGenerator::GeneratedMaterial genM = ShaderGenerator::GenerateMaterial(matData, textRendererParams, blending); if (!genM.ErrorMessage.empty()) { return "Error generating text renderer material: " + genM.ErrorMessage; } textRenderer = genM.Mat; return ""; }
bool BulletContent::Initialize(std::string& err) { typedef VertexPosUV BulletVertex; RenderIOAttributes vertIns = BulletVertex::GetVertexAttributes(); #pragma region Meshes for (unsigned int i = 0; i < B_NUMBER_OF_BULLETS; ++i) { bulletMesh.SubMeshes.push_back(MeshData(false, PT_TRIANGLE_LIST)); } std::vector<BulletVertex> vertices; std::vector<unsigned int> indices; Assimp::Importer importer; unsigned int flags = aiProcessPreset_TargetRealtime_MaxQuality; for (unsigned int i = 0; i < B_NUMBER_OF_BULLETS; ++i) { //Get the file for this bullet type. std::string file = "Content/Game/Meshes/Bullets/"; switch ((Bullets)i) { case B_PUNCHER: file += "Puncher.obj"; break; case B_TERRIBLE_SHOTGUN: file += "Terrible Shotgun.obj"; break; case B_SPRAY_N_PRAY: file += "Spray and Pray.obj"; break; case B_CLUSTER: file += "Cluster.obj"; break; default: assert(false); } const aiScene* scene = importer.ReadFile(file, flags); //Make sure the scene is valid. if (scene == 0) { err = "Error loading '" + file + "': " + importer.GetErrorString(); return false; } if (scene->mNumMeshes != 1) { err = "Mesh '" + file + "' has " + std::to_string(scene->mNumMeshes) + " meshes in it"; return false; } aiMesh* mesh = scene->mMeshes[0]; //Make sure the mesh is valid. assert(mesh->HasFaces()); if (!mesh->HasPositions() || !mesh->HasTextureCoords(0)) { err = "Mesh '" + file + "' is missing positions or UVs!"; return false; } //Populate the vertex/index buffer data. vertices.resize(mesh->mNumVertices); for (unsigned int j = 0; j < mesh->mNumVertices; ++j) { vertices[j].Pos = *(Vector3f*)(&mesh->mVertices[j].x); vertices[j].UV = *(Vector2f*)(&mesh->mTextureCoords[0][j].x); } indices.resize(mesh->mNumFaces * 3); for (unsigned int j = 0; j < mesh->mNumFaces; ++j) { aiFace& fce = mesh->mFaces[j]; if (fce.mNumIndices != 3) { err = "A face in mesh '" + file + "' has a non-tri face with " + std::to_string(fce.mNumIndices) + " indices!"; return false; } indices[(j * 3)] = fce.mIndices[0]; indices[(j * 3) + 1] = fce.mIndices[1]; indices[(j * 3) + 2] = fce.mIndices[2]; } //Create the vertex/index buffers. bulletMesh.SubMeshes[i].SetVertexData(vertices, MeshData::BUF_STATIC, vertIns); bulletMesh.SubMeshes[i].SetIndexData(indices, MeshData::BUF_STATIC); } #pragma endregion #pragma region Textures { Array2D<Vector4b> values(1, 1, Vector4b((unsigned char)255, 255, 255, 255)); defaultTex.Create(); defaultTex.SetColorData(values); } #pragma endregion #pragma region Materials { SerializedMaterial serMat; serMat.VertexInputs = vertIns; DataLine vIn_Pos(VertexInputNode::GetInstance(), 0), vIn_UV(VertexInputNode::GetInstance(), 1); DataNode::Ptr screenPos = SpaceConverterNode::ObjPosToScreenPos(vIn_Pos, "screenPos"); serMat.MaterialOuts.VertexPosOutput = DataLine(screenPos, 1); serMat.MaterialOuts.VertexOutputs.push_back(ShaderOutput("fIn_UV", vIn_UV)); DataLine fIn_UV(FragmentInputNode::GetInstance(), 0); DataNode::Ptr tex(new TextureSample2DNode(fIn_UV, UNIFORM_TEXTURE, "texSample")); DataLine texRGB(tex, TextureSample2DNode::GetOutputIndex(CO_AllColorChannels)); DataNode::Ptr colorParam(new ParamNode(3, UNIFORM_COLOR)); DataNode::Ptr finalRGB(new MultiplyNode(texRGB, colorParam, "finalRGB")); DataNode::Ptr finalColor(new CombineVectorNode(finalRGB, 1.0f, "finalColor")); serMat.MaterialOuts.FragmentOutputs.push_back(ShaderOutput("fOut_Color", finalColor)); auto genMat = ShaderGenerator::GenerateMaterial(serMat, bulletParams, BlendMode::GetOpaque()); if (!genMat.ErrorMessage.empty()) { err = "Error generating bullet material: " + genMat.ErrorMessage; return false; } bulletMat = genMat.Mat; } #pragma endregion return true; }
void MaterialOutputs::ClearData(void) { VertexPosOutput = DataLine(); VertexOutputs.clear(); FragmentOutputs.clear(); }
PostProcessChain::PostProcessChain(std::vector<std::shared_ptr<PostProcessEffect>> effectChain, unsigned int screenWidth, unsigned int screenHeight, bool useMipmaps, const TextureSampleSettings2D & renderTargetSettings, PixelSizes pixelSize, RenderTargetManager & manager) : rtManager(manager), rt1(RenderTargetManager::ERROR_ID), rt2(RenderTargetManager::ERROR_ID), ct1(renderTargetSettings, pixelSize, useMipmaps), ct2(renderTargetSettings, pixelSize, useMipmaps) { //TODO: Change to using a simple vector of the following struct instead of the "pass groups" thing. struct MiniEffect { public: PostProcessEffect* Effect; unsigned int Pass; MiniEffect(void) : Effect(0), Pass(0) { } MiniEffect(PostProcessEffect * effect, unsigned int pass = 1) : Effect(effect), Pass(pass) { assert(effect != 0 && Pass > 0); } bool IsBreak(void) const { return Effect == 0 || Pass == 0; } }; //First separate the effects into "pass groups" -- a chain of effects grouped by pass. //Multi-pass effects are each in their own group. //All passes don't need any kind of transformation for the vertices. std::vector<DataLine> vectorBuilder; vectorBuilder.insert(vectorBuilder.end(), DataLine(VertexInputNode::GetInstanceName(), 0)); vectorBuilder.insert(vectorBuilder.end(), DataLine(1.0f)); DataNode::Ptr combineToPos4(new CombineVectorNode(vectorBuilder, "ppeVertPosOut")); //Set up DataNode material data structures. DataNode::ClearMaterialData(); DataNode::VertexIns = DrawingQuad::GetAttributeData(); DataNode::MaterialOuts.VertexPosOutput = DataLine(combineToPos4->GetName()); //Build each pass group. std::vector<std::vector<PostProcessEffect::PpePtr>> passGroups; unsigned int passGroup = 0; totalPasses = 0; for (unsigned int effect = 0; effect < effectChain.size(); ++effect) { //If this is the start of a new pass, create the collection of effects for it. if (passGroup >= passGroups.size()) { totalPasses += 1; passGroups.insert(passGroups.end(), std::vector<PostProcessEffect::PpePtr>()); } //If this effect only has one pass, just put it in the current pass group. if (effectChain[effect]->NumbPasses == 1) { passGroups[passGroup].insert(passGroups[passGroup].end(), effectChain[effect]); } //Otherwise, create a new pass group just for this effect. else { totalPasses += effectChain[effect]->NumbPasses - 2; if (effect == effectChain.size() - 1) totalPasses += 1; if (passGroups[passGroup].size() > 0) { passGroup += 1; passGroups.insert(passGroups.end(), std::vector<std::shared_ptr<PostProcessEffect>>()); } passGroups[passGroup].insert(passGroups[passGroup].end(), effectChain[effect]); passGroup += 1; } } //Assemble each pass group into a material. for (passGroup = 0; passGroup < passGroups.size(); ++passGroup) { assert(passGroups[passGroup].size() > 0); DataNode::MaterialOuts.FragmentOutputs.clear(); DataNode::MaterialOuts.VertexOutputs.clear(); //If this is a multi-pass group, create the multiple passes. if (passGroups[passGroup][0]->NumbPasses > 1) { assert(passGroups[passGroup].size() == 1); PostProcessEffect::PpePtr effct = passGroups[passGroup][0]; effct->ChangePreviousEffect(); DataNode::MaterialOuts.FragmentOutputs.insert(DataNode::MaterialOuts.FragmentOutputs.end(), ShaderOutput("out_FinalColor", DataLine(effct->GetName(), PostProcessEffect::GetColorOutputIndex()))); //If there is a group before/after this, skip the first/last pass, since it will be lumped in with that other group. unsigned int startPass = 1, endPass = effct->NumbPasses; if (passGroup > 0) startPass += 1; if (passGroup < passGroups.size() - 1) endPass -= 1; for (unsigned int pass = startPass; pass <= endPass; ++pass) { effct->CurrentPass = pass; DataNode::MaterialOuts.VertexOutputs.clear(); DataNode::MaterialOuts.VertexOutputs.insert(DataNode::MaterialOuts.VertexOutputs.end(), ShaderOutput("fIn_UV", DataLine(VertexInputNode::GetInstanceName(), 1))); effct->OverrideVertexOutputs(DataNode::MaterialOuts.VertexOutputs); UniformDictionary unfs; ShaderGenerator::GeneratedMaterial genM = ShaderGenerator::GenerateMaterial(unfs, RenderingModes::RM_Opaque); if (!genM.ErrorMessage.empty()) { errorMsg = "Error generating shaders for pass #" + std::to_string(pass) + " of multi-pass effect '" + effct->GetName() + "': " + genM.ErrorMessage; return; } materials.insert(materials.end(), std::shared_ptr<Material>(genM.Mat)); params.AddUniforms(unfs, true); uniforms.insert(uniforms.end(), UniformDictionary()); if (materials[materials.size() - 1]->HasError()) { errorMsg = std::string() + "Error creating pass #" + std::to_string(pass) + " of multi-pass effect '" + effct->GetName() + "': " + materials[materials.size() - 1]->GetErrorMsg(); return; } } } //Otherwise, build up this pass using multiple effects. else { PostProcessEffect::PpePtr prev, current; //If there was a multi-pass effect before this group, put its final pass at the beginning of this group. if (passGroup > 0 && passGroups[passGroup - 1][0]->NumbPasses > 1) { prev = passGroups[passGroup - 1][0]; prev->CurrentPass = prev->NumbPasses; prev->ChangePreviousEffect(); } //Set each effect to happen on top of the previous. for (unsigned int effect = 0; effect < passGroups[passGroup].size(); ++effect) { current = passGroups[passGroup][effect]; current->CurrentPass = 1; current->ChangePreviousEffect(prev); prev = current; } assert(current.get() != 0); //If the next pass group is a multi-pass effect, put its first pass on the end of this group. if (passGroup < passGroups.size() - 1 && passGroups[passGroup + 1][0]->NumbPasses > 1) { current = passGroups[passGroup + 1][0]; current->CurrentPass = 1; current->ChangePreviousEffect(prev); prev = current; } //Now create the material. DataNode::MaterialOuts.VertexOutputs.clear(); DataNode::MaterialOuts.FragmentOutputs.clear(); DataNode::MaterialOuts.VertexOutputs.insert(DataNode::MaterialOuts.VertexOutputs.end(), ShaderOutput("fIn_UV", DataLine(VertexInputNode::GetInstanceName(), 1))); DataNode::MaterialOuts.FragmentOutputs.insert(DataNode::MaterialOuts.FragmentOutputs.end(), ShaderOutput("out_FinalColor", DataLine(current->GetName(), current->GetColorOutputIndex()))); current->OverrideVertexOutputs(DataNode::MaterialOuts.VertexOutputs); UniformDictionary unfs; ShaderGenerator::GeneratedMaterial genM = ShaderGenerator::GenerateMaterial(unfs, RenderingModes::RM_Opaque); if (!genM.ErrorMessage.empty()) { errorMsg = std::string() + "Error generating shaders for material #" + std::to_string(materials.size()) + ": " + genM.ErrorMessage; return; } materials.insert(materials.end(), std::shared_ptr<Material>(genM.Mat)); params.AddUniforms(unfs, true); uniforms.insert(uniforms.end(), UniformDictionary()); if (materials[materials.size() - 1]->HasError()) { errorMsg = std::string() + "Error creating material #" + std::to_string(materials.size()) + ": " + materials[materials.size() - 1]->GetErrorMsg(); return; } } } //Create needed render targets for rendering the post-process effect. if (materials.size() > 0) { ct1.Create(renderTargetSettings, useMipmaps, pixelSize); ct1.ClearData(screenWidth, screenHeight); rt1 = rtManager.CreateRenderTarget(PixelSizes::PS_16U_DEPTH); if (rt1 == RenderTargetManager::ERROR_ID) { errorMsg = "Error creating first render target: " + rtManager.GetError(); return; } rtManager[rt1]->SetColorAttachment(RenderTargetTex(&ct1), true); } if (materials.size() > 1) { ct2.Create(renderTargetSettings, useMipmaps, pixelSize); ct2.ClearData(screenWidth, screenHeight); rt2 = rtManager.CreateRenderTarget(PixelSizes::PS_16U_DEPTH); if (rt2 == RenderTargetManager::ERROR_ID) { errorMsg = "Error creating second render target: " + rtManager.GetError(); return; } rtManager[rt2]->SetColorAttachment(RenderTargetTex(&ct2), true); } }