Esempio n. 1
0
void Delaunay3DWindow::CreateTetra(int index)
{
    std::vector<int> const& dindices = mDelaunay.GetIndices();

    VertexFormat vformat;
    vformat.Bind(VA_POSITION, DF_R32G32B32_FLOAT, 0);
    vformat.Bind(VA_COLOR, DF_R32G32B32A32_FLOAT, 0);

    std::shared_ptr<VertexBuffer> vbuffer(new VertexBuffer(vformat,
        static_cast<unsigned int>(mVertices.size())));
    vbuffer->SetUsage(Resource::DYNAMIC_UPDATE);
    Vertex* vertex = vbuffer->Get<Vertex>();
    for (int j = 0; j < 4; ++j)
    {
        vertex[j].position = mVertices[dindices[4 * index + j]];
        vertex[j].color = mLightGray;
    }

    std::shared_ptr<IndexBuffer> ibuffer(new IndexBuffer(IP_TRIMESH, 4,
        sizeof(unsigned int)));
    unsigned int* indices = ibuffer->Get<unsigned int>();
    indices[0] = 0;  indices[1] = 1;  indices[2] = 2;
    indices[3] = 0;  indices[4] = 3;  indices[5] = 1;
    indices[6] = 0;  indices[7] = 2;  indices[8] = 3;
    indices[9] = 3;  indices[10] = 2;  indices[11] = 1;

    mWireTetra[index] = std::make_shared<Visual>(vbuffer, ibuffer,
        mVCEffect);
    mScene->AttachChild(mWireTetra[index]);
}
void MinimumVolumeSphere3DWindow::CreateScene()
{
    // Create the points.
    std::mt19937 mte;
    std::uniform_real_distribution<float> rnd(-1.0f, 1.0f);
    for (auto& v : mVertices)
    {
        v = { rnd(mte), rnd(mte), rnd(mte) };
    }

    VertexFormat vformat;
    vformat.Bind(VA_POSITION, DF_R32G32B32_FLOAT, 0);

    MeshFactory mf;
    mf.SetVertexFormat(vformat);

    std::shared_ptr<ConstantColorEffect> effect;
    for (int i = 0; i < NUM_POINTS; ++i)
    {
        mPoints[i] = mf.CreateSphere(6, 6, 0.01f);
        effect = std::make_shared<ConstantColorEffect>(mProgramFactory,
            Vector4<float>({ 0.5f, 0.5f, 0.5f, 1.0f }));
        mPoints[i]->SetEffect(effect);
        mCameraRig.Subscribe(mPoints[i]->worldTransform,
            effect->GetPVWMatrixConstant());

        std::shared_ptr<VertexBuffer> vbuffer = mPoints[i]->GetVertexBuffer();
        Vector3<float>* vertex = vbuffer->Get<Vector3<float>>();
        Vector3<float> offset = mVertices[i];
        for (unsigned int j = 0; j < vbuffer->GetNumElements(); ++j)
        {
            vertex[j] += offset;
        }
    }

    // Create the segments.
    std::shared_ptr<VertexBuffer> vbuffer(new VertexBuffer(vformat, 12));
    vbuffer->SetUsage(Resource::DYNAMIC_UPDATE);
    std::shared_ptr<IndexBuffer> ibuffer(new IndexBuffer(
        IP_POLYSEGMENT_DISJOINT, 6));
    effect = std::make_shared<ConstantColorEffect>(mProgramFactory,
        Vector4<float>({ 0.5f, 0.0f, 0.0f, 1.0f }));
    mSegments = std::make_shared<Visual>(vbuffer, ibuffer, effect);
    mCameraRig.Subscribe(mSegments->worldTransform,
        effect->GetPVWMatrixConstant());
    mSegments->Update();

    // Create the sphere.
    mSphere = mf.CreateSphere(16, 16, 1.0f);

    effect = std::make_shared<ConstantColorEffect>(mProgramFactory,
        Vector4<float>({ 0.0f, 0.0f, 0.5f, 1.0f }));

    mSphere->SetEffect(effect);
    mCameraRig.Subscribe(mSphere->worldTransform,
        effect->GetPVWMatrixConstant());
}
Esempio n. 3
0
void TexturingWindow::CreateScene()
{
    // Create a vertex buffer for a two-triangles square.  The PNG is stored
    // in left-handed coordinates.  The texture coordinates are chosen to
    // reflect the texture in the y-direction.
    struct Vertex
    {
        Vector3<float> position;
        Vector2<float> tcoord;
    };
    VertexFormat vformat;
    vformat.Bind(VA_POSITION, DF_R32G32B32_FLOAT, 0);
    vformat.Bind(VA_TEXCOORD, DF_R32G32_FLOAT, 0);
    std::shared_ptr<VertexBuffer> vbuffer(new VertexBuffer(vformat, 4));
    Vertex* vertex = vbuffer->Get<Vertex>();
    vertex[0].position = { 0.0f, 0.0f, 0.0f };
    vertex[0].tcoord = { 0.0f, 1.0f };
    vertex[1].position = { 1.0f, 0.0f, 0.0f };
    vertex[1].tcoord = { 1.0f, 1.0f };
    vertex[2].position = { 0.0f, 1.0f, 0.0f };
    vertex[2].tcoord = { 0.0f, 0.0f };
    vertex[3].position = { 1.0f, 1.0f, 0.0f };
    vertex[3].tcoord = { 1.0f, 0.0f };

    // Create an indexless buffer for a triangle mesh with two triangles.
    std::shared_ptr<IndexBuffer> ibuffer(new IndexBuffer(IP_TRISTRIP, 2));

    // Create an effect for the vertex and pixel shaders.  The texture is
    // bilinearly filtered and the texture coordinates are clamped to [0,1]^2.
    std::shared_ptr<Texture2> myTexture(WICFileIO::Load(
        mEnvironment.GetPath("StoneWall.png"), false));
    std::shared_ptr<Texture2Effect> effect =
        std::make_shared<Texture2Effect>(mProgramFactory, myTexture,
        SamplerState::MIN_L_MAG_L_MIP_P, SamplerState::CLAMP,
        SamplerState::CLAMP);

    // Create the geometric object for drawing.  Translate it so that its
    // center of mass is at the origin.  This supports virtual trackball
    // motion about the object "center".
    mSquare = std::make_shared<Visual>(vbuffer, ibuffer, effect);
    mSquare->localTransform.SetTranslation(-0.5f, -0.5f, 0.0f);

    // Enable automatic updates of pvw-matrices and w-matrices.
    mCameraRig.Subscribe(mSquare->worldTransform,
        effect->GetPVWMatrixConstant());

#if defined(SAVE_RENDERING_TO_DISK)
    mTarget = std::make_shared<DrawTarget>(1, DF_R8G8B8A8_UNORM, mXSize,
        mYSize);
    mTarget->GetRTTexture(0)->SetCopyType(Resource::COPY_STAGING_TO_CPU);
#endif

    mTrackball.Attach(mSquare);
    mTrackball.Update();
}
Esempio n. 4
0
//----------------------------------------------------------------------------
void PerformanceAMDWindow::CreateScene()
{
    struct Vertex
    {
        Vector3<float> position;
        Vector2<float> tcoord;
    };
    VertexFormat vformat;
    vformat.Bind(VA_POSITION, DF_R32G32B32_FLOAT, 0);
    vformat.Bind(VA_TEXCOORD, DF_R32G32_FLOAT, 0);
    unsigned int const numTriangles = 1024;
    unsigned int const numVertices = 3 * numTriangles;
    std::shared_ptr<VertexBuffer> vbuffer(new VertexBuffer(vformat,
        numVertices));

    // Randomly generate positions and texture coordinates.
    std::mt19937 mte;
    std::uniform_real_distribution<float> unirnd(0.0f, 1.0f);
    std::uniform_real_distribution<float> symrnd(-1.0f, 1.0f);
    Vertex* vertex = vbuffer->Get<Vertex>();
    for (unsigned int i = 0; i < numVertices; ++i)
    {
        for (int j = 0; j < 3; ++j)
        {
            vertex[i].position[j] = symrnd(mte);
        }
        for (int j = 0; j < 2; ++j)
        {
            vertex[i].tcoord[j] = unirnd(mte);
        }
    }

    // The vertices are not indexed.  Each consecutive triple is a triangle.
    std::shared_ptr<IndexBuffer> ibuffer(new IndexBuffer(IP_TRIMESH,
        numTriangles));

    // Use a standard texture effect.
    std::shared_ptr<Texture2Effect> effect(new Texture2Effect(
        mProgramFactory, mBlurredTexture, SamplerState::MIN_L_MAG_L_MIP_P,
        SamplerState::CLAMP, SamplerState::CLAMP));
    mPVWMatrix = effect->GetPVWMatrixConstant();

    mTriangles.reset(new Visual(vbuffer, ibuffer, effect));
    SubscribeCW(mTriangles, mPVWMatrix);

    EnableObjectMotion();
}
bool MultipleRenderTargetsWindow::CreateScene()
{
    // Create a visual effect that populates the draw target.
    std::string filename = mEnvironment.GetPath("MultipleRenderTargets.hlsl");
    std::shared_ptr<VisualProgram> program =
        mProgramFactory.CreateFromFiles(filename, filename, "");
    if (!program)
    {
        return false;
    }

    std::shared_ptr<ConstantBuffer> cbuffer(new ConstantBuffer(
        sizeof(Matrix4x4<float>), true));
    program->GetVShader()->Set("PVWMatrix", cbuffer);

    std::shared_ptr<PixelShader> pshader = program->GetPShader();
    std::shared_ptr<ConstantBuffer> farNearRatio(new ConstantBuffer(
        sizeof(float), false));
    pshader->Set("FarNearRatio", farNearRatio);
    farNearRatio->SetMember("farNearRatio",
        mCamera->GetDMax() / mCamera->GetDMin());

    std::string path = mEnvironment.GetPath("StoneWall.png");
    std::shared_ptr<Texture2> baseTexture(WICFileIO::Load(path, true));
    baseTexture->AutogenerateMipmaps();
    pshader->Set("baseTexture", baseTexture);

    std::shared_ptr<SamplerState> baseSampler(new SamplerState());
    baseSampler->filter = SamplerState::MIN_L_MAG_L_MIP_L;
    baseSampler->mode[0] = SamplerState::CLAMP;
    baseSampler->mode[1] = SamplerState::CLAMP;
    pshader->Set("baseSampler", baseSampler);

    std::shared_ptr<VisualEffect> effect =
        std::make_shared<VisualEffect>(program);

    // Create a vertex buffer for a two-triangle square.  The PNG is stored
    // in left-handed coordinates.  The texture coordinates are chosen to
    // reflect the texture in the y-direction.
    struct Vertex
    {
        Vector3<float> position;
        Vector2<float> tcoord;
    };
    VertexFormat vformat;
    vformat.Bind(VA_POSITION, DF_R32G32B32_FLOAT, 0);
    vformat.Bind(VA_TEXCOORD, DF_R32G32_FLOAT, 0);
    std::shared_ptr<VertexBuffer> vbuffer(new VertexBuffer(vformat, 4));
    Vertex* vertex = vbuffer->Get<Vertex>();
    vertex[0].position = { -1.0f, -1.0f, 0.0f };
    vertex[0].tcoord = { 0.0f, 1.0f };
    vertex[1].position = { 1.0f, -1.0f, 0.0f };
    vertex[1].tcoord = { 1.0f, 1.0f };
    vertex[2].position = { -1.0f, 1.0f, 0.0f };
    vertex[2].tcoord = { 0.0f, 0.0f };
    vertex[3].position = { 1.0f, 1.0f, 0.0f };
    vertex[3].tcoord = { 1.0f, 0.0f };

    // Create an indexless buffer for a triangle mesh with two triangles.
    std::shared_ptr<IndexBuffer> ibuffer(new IndexBuffer(IP_TRISTRIP, 2));

    // Create the geometric object for drawing and enable automatic updates
    // of pvw-matrices and w-matrices.
    mSquare = std::make_shared<Visual>(vbuffer, ibuffer, effect);
    mTrackball.Attach(mSquare);
    mTrackball.Update();
    mCameraRig.Subscribe(mSquare->worldTransform, cbuffer);
    return true;
}
Esempio n. 6
0
void PickingWindow::CreateScene()
{
    std::string path = mEnvironment.GetPath("Checkerboard.png");
    std::shared_ptr<Texture2> texture(WICFileIO::Load(path, false));

    mScene = std::make_shared<Node>();

    VertexFormat vformat0;
    vformat0.Bind(VA_POSITION, DF_R32G32B32_FLOAT, 0);
    vformat0.Bind(VA_TEXCOORD, DF_R32G32_FLOAT, 0);

    // The torus and dodecahedron are created by the mesh factory in which
    // the 'visual' model bounds are computed.  The points and segments
    // primitives are created explicitly here, so we need to compute their
    // model bounds to be used by the picking system.
    MeshFactory mf;
    mf.SetVertexFormat(vformat0);

    mTorus = mf.CreateTorus(16, 16, 4.0f, 1.0f);
    std::shared_ptr<Texture2Effect> effect = std::make_shared<Texture2Effect>(
        mProgramFactory, texture, SamplerState::MIN_L_MAG_L_MIP_P,
        SamplerState::CLAMP, SamplerState::CLAMP);
    mTorus->SetEffect(effect);
    mCameraRig.Subscribe(mTorus->worldTransform,
        effect->GetPVWMatrixConstant());
    mScene->AttachChild(mTorus);

    mDodecahedron = mf.CreateDodecahedron();
    effect = std::make_shared<Texture2Effect>(mProgramFactory, texture,
        SamplerState::MIN_L_MAG_L_MIP_P, SamplerState::CLAMP,
        SamplerState::CLAMP);
    mDodecahedron->SetEffect(effect);
    mCameraRig.Subscribe(mDodecahedron->worldTransform,
        effect->GetPVWMatrixConstant());
    mScene->AttachChild(mDodecahedron);

    VertexFormat vformat1;
    vformat1.Bind(VA_POSITION, DF_R32G32B32_FLOAT, 0);
    std::shared_ptr<VertexBuffer> vbuffer(new VertexBuffer(vformat1, 4));
    Vector3<float>* vertices = vbuffer->Get<Vector3<float>>();
    vertices[0] = { 1.0f, 1.0f, 4.0f };
    vertices[1] = { 1.0f, 2.0f, 5.0f };
    vertices[2] = { 2.0f, 2.0f, 6.0f };
    vertices[3] = { 2.0f, 1.0f, 7.0f };
    std::shared_ptr<IndexBuffer> ibuffer(new IndexBuffer(IP_POLYPOINT, 4));
    std::shared_ptr<ConstantColorEffect> cceffect =
        std::make_shared<ConstantColorEffect>(mProgramFactory,
        Vector4<float>({ 0.5f, 0.0f, 0.0f, 1.0f }));
    mPoints = std::make_shared<Visual>(vbuffer, ibuffer, cceffect);
    mPoints->UpdateModelBound();
    mCameraRig.Subscribe(mPoints->worldTransform,
        cceffect->GetPVWMatrixConstant());
    mScene->AttachChild(mPoints);

    vbuffer = std::make_shared<VertexBuffer>(vformat1, 4);
    vertices = vbuffer->Get<Vector3<float>>();
    vertices[0] = { -1.0f, -1.0f, 4.0f };
    vertices[1] = { -1.0f, -2.0f, 5.0f };
    vertices[2] = { -2.0f, -1.0f, 6.0f };
    vertices[3] = { -2.0f, -2.0f, 7.0f };
    ibuffer = std::make_shared<IndexBuffer>(IP_POLYSEGMENT_CONTIGUOUS, 3,
        sizeof(int));
    ibuffer->SetSegment(0, 0, 1);
    ibuffer->SetSegment(1, 1, 2);
    ibuffer->SetSegment(2, 2, 3);
    cceffect = std::make_shared<ConstantColorEffect>(mProgramFactory,
        Vector4<float>({ 0.0f, 0.0f, 0.5f, 1.0f }));
    mSegments = std::make_shared<Visual>(vbuffer, ibuffer, cceffect);
    mSegments->UpdateModelBound();
    mCameraRig.Subscribe(mSegments->worldTransform,
        cceffect->GetPVWMatrixConstant());
    mScene->AttachChild(mSegments);

    for (int i = 0; i < SPHERE_BUDGET; ++i)
    {
        mSphere[i] = mf.CreateSphere(8, 8, 0.125f);
        cceffect = std::make_shared<ConstantColorEffect>(mProgramFactory,
            Vector4<float>({ 0.0f, 0.0f, 0.0f, 1.0f }));
        mSphere[i]->SetEffect(cceffect);
        mCameraRig.Subscribe(mSphere[i]->worldTransform,
            cceffect->GetPVWMatrixConstant());
        mScene->AttachChild(mSphere[i]);
    }

    mTrackball.Attach(mScene);
    mTrackball.Update();
}
//----------------------------------------------------------------------------
bool StructuredBuffersWindow::CreateScene()
{
    // Create the shaders and associated resources
    HLSLDefiner definer;
    definer.SetInt("WINDOW_WIDTH", mXSize);
    std::shared_ptr<VertexShader> vshader(ShaderFactory::CreateVertex(
        mEnvironment.GetPath("StructuredBuffers.hlsl"), definer));
    if (!vshader)
    {
        return false;
    }

    std::shared_ptr<PixelShader> pshader(ShaderFactory::CreatePixel(
        mEnvironment.GetPath("StructuredBuffers.hlsl"), definer));
    if (!pshader)
    {
        return false;
    }

    std::shared_ptr<ConstantBuffer> cbuffer(new ConstantBuffer(
        sizeof(Matrix4x4<float>), true));
    vshader->Set("PVWMatrix", cbuffer);

    // Create the pixel shader and associated resources.
    std::string path = mEnvironment.GetPath("StoneWall.png");
    std::shared_ptr<Texture2> baseTexture(WICFileIO::Load(path, false));
    pshader->Set("baseTexture", baseTexture);

    std::shared_ptr<SamplerState> baseSampler(new SamplerState());
    baseSampler->filter = SamplerState::MIN_L_MAG_L_MIP_P;
    baseSampler->mode[0] = SamplerState::CLAMP;
    baseSampler->mode[1] = SamplerState::CLAMP;
    pshader->Set("baseSampler", baseSampler);

    mDrawnPixels.reset(new StructuredBuffer(mXSize*mYSize,
        sizeof(Vector4<float>)));
    mDrawnPixels->SetUsage(Resource::SHADER_OUTPUT);
    mDrawnPixels->SetCopyType(Resource::COPY_BIDIRECTIONAL);
    memset(mDrawnPixels->GetData(), 0, mDrawnPixels->GetNumBytes());
    pshader->Set("drawnPixels", mDrawnPixels);

    // Create the visual effect for the square.
    std::shared_ptr<VisualEffect> effect(new VisualEffect(vshader, pshader));

    // Create a vertex buffer for a single triangle.  The PNG is stored in
    // left-handed coordinates.  The texture coordinates are chosen to reflect
    // the texture in the y-direction.
    struct Vertex
    {
        Vector3<float> position;
        Vector2<float> tcoord;
    };
    VertexFormat vformat;
    vformat.Bind(VA_POSITION, DF_R32G32B32_FLOAT, 0);
    vformat.Bind(VA_TEXCOORD, DF_R32G32_FLOAT, 0);
    std::shared_ptr<VertexBuffer> vbuffer(new VertexBuffer(vformat, 4));
    Vertex* vertex = vbuffer->Get<Vertex>();
    vertex[0].position = Vector3<float>(0.0f, 0.0f, 0.0f);
    vertex[0].tcoord = Vector2<float>(0.0f, 1.0f);
    vertex[1].position = Vector3<float>(1.0f, 0.0f, 0.0f);
    vertex[1].tcoord = Vector2<float>(1.0f, 1.0f);
    vertex[2].position = Vector3<float>(0.0f, 1.0f, 0.0f);
    vertex[2].tcoord = Vector2<float>(0.0f, 0.0f);
    vertex[3].position = Vector3<float>(1.0f, 1.0f, 0.0f);
    vertex[3].tcoord = Vector2<float>(1.0f, 0.0f);

    // Create an indexless buffer for a triangle mesh with two triangles.
    std::shared_ptr<IndexBuffer> ibuffer(new IndexBuffer(IP_TRISTRIP, 2));

    // Create the geometric object for drawing.  Translate it so that its
    // center of mass is at the origin.  This supports virtual trackball
    // motion about the object "center".
    mSquare.reset(new Visual(vbuffer, ibuffer, effect));
    mSquare->localTransform.SetTranslation(-0.5f, -0.5f, 0.0f);
    mSquare->Update();

    // Enable automatic updates of pvw-matrices and w-matrices.
    SubscribeCW(mSquare, cbuffer);

    // The structured buffer is written in the pixel shader.  This texture
    // will receive a copy of it so that we can write the results to disk
    // as a PNG file.
    mDrawnPixelsTexture = new Texture2(DF_R8G8B8A8_UNORM, mXSize, mYSize);
    return true;
}
Esempio n. 8
0
bool ConvexHull3DWindow::LoadData()
{
    std::string filename = "data";
    if (mCurrentFile < 10)
    {
        filename += "0";
    }
    filename += std::to_string(mCurrentFile) + ".txt";
    std::string path = mEnvironment.GetPath(filename);
    if (path == "")
    {
        return false;
    }

    std::ifstream input(path);
    if (!input)
    {
        return false;
    }

    Vector3<float> center{ 0.0f, 0.0f, 0.0f };
    unsigned int numVertices;
    input >> numVertices;
    std::vector<Vector3<float>> vertices(numVertices);
    for (auto& v : vertices)
    {
        for (int j = 0; j < 3; ++j)
        {
            input >> v[j];
        }
        center += v;
    }
    input.close();
    center /= static_cast<float>(numVertices);

    float radius = 0.0f;
    for (auto const& v : vertices)
    {
        Vector3<float> diff = v - center;
        float length = Length(diff);
        if (length > radius)
        {
            radius = length;
        }
    }

    // The worst-case number of words for UIntegerFP32<N> for 'float' input
    // to ConvexHull3 is N = 27.  For 'double', it is 'N = 197'.
    ConvexHull3<float, BSNumber<UIntegerFP32<27>>> ch;
    if (numVertices < 4 || !ch(numVertices, &vertices[0], 0.0f))
    {
        if (mMesh)
        {
            mTrackball.Detach(mMesh);
            mTrackball.Update();
            mCameraRig.Unsubscribe(mMesh->worldTransform);
            mMesh = nullptr;
        }

        mMessage = "File = " + std::to_string(mCurrentFile) +
            " has intrinsic dimension " + std::to_string(ch.GetDimension());
        return false;
    }
#if defined(GTE_COLLECT_BSNUMBER_STATISTICS)
    std::cout << "max size = " << gte::gBSNumberMaxSize << std::endl;
#endif

    std::vector<TriangleKey<true>> const& triangles = ch.GetHullUnordered();

    std::mt19937 mte;
    std::uniform_real_distribution<float> rnd(0.0f, 1.0f);

    struct Vertex
    {
        Vector3<float> position;
        Vector4<float> color;
    };
    VertexFormat vformat;
    vformat.Bind(VA_POSITION, DF_R32G32B32_FLOAT, 0);
    vformat.Bind(VA_COLOR, DF_R32G32B32A32_FLOAT, 0);
    std::shared_ptr<VertexBuffer> vbuffer(new VertexBuffer(vformat,
        numVertices));
    Vertex* vertex = vbuffer->Get<Vertex>();
    for (unsigned int i = 0; i < numVertices; ++i)
    {
        vertex[i].position = vertices[i];
        vertex[i].color[0] = rnd(mte);
        vertex[i].color[1] = rnd(mte);
        vertex[i].color[2] = rnd(mte);
        vertex[i].color[3] = 1.0f;
    }

    unsigned int numPrimitives = static_cast<unsigned int>(triangles.size());
    std::shared_ptr<IndexBuffer> ibuffer(new IndexBuffer(IP_TRIMESH,
        numPrimitives, sizeof(unsigned int)));
    Memcpy(ibuffer->GetData(), &triangles[0], ibuffer->GetNumBytes());

    // Update all information associated with the mesh transforms.
    if (mMesh)
    {
        mTrackball.Detach(mMesh);
        mTrackball.Update();
        mCameraRig.Unsubscribe(mMesh->worldTransform);
    }
    mMesh = std::make_shared<Visual>(vbuffer, ibuffer, mEffect);
    mMesh->localTransform.SetTranslation(-center);
    mMesh->worldTransform = mMesh->localTransform;
    mCameraRig.Subscribe(mMesh->worldTransform,
        mEffect->GetPVWMatrixConstant());

    // Move the camera for a centered view of the mesh.
    Vector4<float> camPosition = Vector4<float>{0.0f, 0.0f, 0.0f, 1.0f}
        - 2.5f*radius*mCamera->GetDVector();
    mCamera->SetPosition(camPosition);

    // Update the message for display.
    mMessage =
        "File = " + std::to_string(mCurrentFile) + " , " +
        "Vertices = " + std::to_string(numVertices) + " , " +
        "Triangles =" + std::to_string(numPrimitives);

    mTrackball.Attach(mMesh);
    mTrackball.Update();
    mCameraRig.UpdatePVWMatrices();
    return true;
}
Esempio n. 9
0
bool GeometryShadersWindow::CreateScene()
{
    std::string filename;
#if defined(USE_DRAW_DIRECT)
    filename = mEnvironment.GetPath("RandomSquares.hlsl");
#else
    filename = mEnvironment.GetPath("RandomSquaresIndirect.hlsl");
#endif
    std::shared_ptr<VisualProgram> program =
        mProgramFactory.CreateFromFiles(filename, filename, filename);
    if (!program)
    {
        return false;
    }

    // Create particles used by direct and indirect drawing.
    struct Vertex
    {
        Vector3<float> position;
        Vector3<float> color;
        float size;
    };

    // Use a Mersenne twister engine for random numbers.
    std::mt19937 mte;
    std::uniform_real_distribution<float> symr(-1.0f, 1.0f);
    std::uniform_real_distribution<float> unir(0.0f, 1.0f);
    std::uniform_real_distribution<float> posr(0.01f, 0.1f);

    int const numParticles = 128;
    std::vector<Vertex> particles(numParticles);
    for (auto& particle : particles)
    {
        particle.position = { symr(mte), symr(mte), symr(mte) };
        particle.color = { unir(mte), unir(mte), unir(mte) };
        particle.size = posr(mte);
    }

    // Create the constant buffer used by direct and indirect drawing.
    mMatrices = std::make_shared<ConstantBuffer>(
        2 * sizeof(Matrix4x4<float>), true);
    program->GetGShader()->Set("Matrices", mMatrices);

#if defined(USE_DRAW_DIRECT)
    // Create a mesh for direct drawing.
    VertexFormat vformat;
    vformat.Bind(VA_POSITION, DF_R32G32B32_FLOAT, 0);
    vformat.Bind(VA_COLOR, DF_R32G32B32_FLOAT, 0);
    vformat.Bind(VA_TEXCOORD, DF_R32_FLOAT, 0);
    std::shared_ptr<VertexBuffer> vbuffer(new VertexBuffer(vformat,
        numParticles));
    Memcpy(vbuffer->GetData(), &particles[0], numParticles*sizeof(Vertex));
#else
    // Create a mesh for indirect drawing.
    std::shared_ptr<VertexBuffer> vbuffer(new VertexBuffer(numParticles));
    mParticles = std::make_shared<StructuredBuffer>(numParticles,
        sizeof(Vertex));
    Memcpy(mParticles->GetData(), &particles[0], numParticles*sizeof(Vertex));
    gshader->Set("particles", mParticles);
#endif

    std::shared_ptr<IndexBuffer> ibuffer(new IndexBuffer(IP_POLYPOINT,
        numParticles));

    std::shared_ptr<VisualEffect> effect =
        std::make_shared<VisualEffect>(program);

    mMesh = std::make_shared<Visual>(vbuffer, ibuffer, effect);
    return true;
}
Esempio n. 10
0
void ResultsRouterThread::operator ()()
{
	//Connect to the analytic output queue
	bool bConnectResult = mqPtr->connectTo(_analyticOutQueueAddress,ZMQ_PULL);

	if(bConnectResult){

		//cout << "ResultsRouterThread::operator: "<<"Results router thread for analytic instance " << _iAnalyticInstId << " started." << endl;
		//cout << "Results router thread for analytic instance " << _iAnalyticInstId << " started." << endl;

		cout << "Results router thread : Start reading the images from the analytic process  : " << _iAnalyticInstId << endl << endl;

		AnalyticResultGateway _analyticResultGateway;
		Image imageResultObj;

		ostringstream filePath;
		filePath << "/usr/local/opencctv/images/";
		filePath << _iAnalyticInstId;
		filePath << "/";

		//TODO : Add the error handling to mkdir
		mkdir(filePath.str().c_str(), 0775);

		int iFrameCount = 1;

		//Read the output results and store in the OpenCCTV database
		while(1)		{

			//Read serialized results image object
			string serializedImageStr = mqPtr->read();

			//Initialize with received serialized string data
			std::istringstream ibuffer(serializedImageStr);

			//De-serialize and create image object
			boost::archive::text_iarchive iarchive(ibuffer);
			iarchive & imageResultObj;

			//TODO Define the image store location in a config file
			//1. Write the images to the path /usr/local/opencctv/images
			Mat matResultObj = JpegImage::toOpenCvMat(imageResultObj);

			ostringstream filename;
			filename << filePath.str();
			filename << imageResultObj.getTimestamp() << "_";
			filename << iFrameCount;
			filename << ".jpg";

			imwrite(filename.str(), matResultObj);

			++iFrameCount;

			//2. Store the image detais in the results DB
			_analyticResultGateway.insertResults(_iAnalyticInstId,imageResultObj.getTimestamp(),imageResultObj.getResult(),filename.str());

		}

	}else{
		cerr << "ResultsRouterThread:operator: Error connecting to the analytic output queue......." << endl;
	}
}
void MinimumVolumeBox3DWindow::CreateScene()
{
    mScene = std::make_shared<Node>();

    std::mt19937 mte;
    std::uniform_real_distribution<float> rnd(-1.0f, 1.0f);
    Vector3<float> center{ 0.0f, 0.0f, 0.0f };
    Vector3<float> extent{ 1.0f, 0.25f, 0.125f };
    Vector3<float> axis[3] = {
        { 1.0f, 1.0f, 0.0f },
        { -1.0f, 1.0f, 0.0f },
        { 0.0f, 0.0f, 1.0f }
    };
    Normalize(axis[0]);
    Normalize(axis[1]);
    Normalize(axis[2]);
    for (auto& v : mVertices)
    {
        float theta = rnd(mte) * (float)GTE_C_TWO_PI;
        float phi = rnd(mte) * (float)GTE_C_PI;
        float radius = 0.5f * (rnd(mte) + 1.0f);
        float x = extent[0] * cos(theta) * sin(phi);
        float y = extent[1] * sin(theta) * sin(phi);
        float z = extent[2] * cos(phi);
        v = center + radius * (x * axis[0] + y * axis[1] + z * axis[2]);
    }

    struct Vertex
    {
        Vector3<float> position;
        Vector4<float> color;
    };
    VertexFormat vformat;
    vformat.Bind(VA_POSITION, DF_R32G32B32_FLOAT, 0);
    vformat.Bind(VA_COLOR, DF_R32G32B32A32_FLOAT, 0);
    std::shared_ptr<VertexBuffer> vbuffer(new VertexBuffer(vformat,
        NUM_POINTS));
    Vertex* vertex = vbuffer->Get<Vertex>();
    for (int i = 0; i < NUM_POINTS; ++i)
    {
        vertex[i].position[0] = (float)mVertices[i][0];
        vertex[i].position[1] = (float)mVertices[i][1];
        vertex[i].position[2] = (float)mVertices[i][2];
        vertex[i].color[0] = 0.5f * (rnd(mte) + 1.0f);
        vertex[i].color[1] = 0.5f * (rnd(mte) + 1.0f);
        vertex[i].color[2] = 0.5f * (rnd(mte) + 1.0f);
        vertex[i].color[3] = 1.0f;
    }

    std::shared_ptr<IndexBuffer> ibuffer(new IndexBuffer(IP_POLYPOINT,
        NUM_POINTS));

    std::shared_ptr<VertexColorEffect> effect =
        std::make_shared<VertexColorEffect>(mProgramFactory);

    mPoints = std::make_shared<Visual>(vbuffer, ibuffer, effect);
    mCameraRig.Subscribe(mPoints->worldTransform,
        effect->GetPVWMatrixConstant());
    mScene->AttachChild(mPoints);

    // Choose the number of threads to use.  The default constructor for
    // MinimumVolumeBox3 uses a default of 1, in which case all computations
    // are on the main thread.  The timings below are for a 64-bit release
    // build (no debugger attached) on Intel Core i7-3930K CPUs running at
    // 3.20 GHz.
    unsigned int numThreads = 1;

#if 0
    // Compute the convex hull internally using arbitrary precision
    // arithmetic.  This is slower than computing the hull explicitly using
    // the maximum fixed precision; see the other conditional block of code.
    Timer timer;
    typedef BSRational<UIntegerAP32> MVBRational;
    MinimumVolumeBox3<float, MVBRational> mvb3(numThreads);
    OrientedBox3<float> minBox = mvb3(NUM_POINTS, &mVertices[0]);
    std::cout << "mvb3 seconds = " << timer.GetSeconds() << std::endl;
    // numThreads = 1, seconds = 7.09
    // numThreads = 2, seconds = 6.22
#else
    // If mVertices were to use 'double', you would need the template type
    // UIntegerFP32<167> to compute the convex hull.
    Timer timer;
    typedef BSNumber<UIntegerFP32<27>> CHRational;
    ConvexHull3<float, CHRational> ch3(numThreads);
    ch3(NUM_POINTS, &mVertices[0], 0.0f);
    std::vector<TriangleKey<true>> const& triangles = ch3.GetHullUnordered();
    int const numIndices = static_cast<int>(3 * triangles.size());
    int const* indices = static_cast<int const*>(&triangles[0].V[0]);
    typedef BSRational<UIntegerAP32> MVBRational;
    MinimumVolumeBox3<float, MVBRational> mvb3(numThreads);
    OrientedBox3<float> minBox = mvb3(NUM_POINTS, &mVertices[0], numIndices,
        indices);
    std::cout << "mvb3 seconds = " << timer.GetSeconds() << std::endl;
    // numThreads = 1, seconds = 2.69
    // numThreads = 2, seconds = 2.01
#endif

    std::vector<int> const& hull = mvb3.GetHull();
    ibuffer = std::make_shared<IndexBuffer>(IP_TRIMESH,
        static_cast<int>(hull.size() / 3), sizeof(int));
    Memcpy(ibuffer->GetData(), &hull[0], ibuffer->GetNumBytes());
    mPolytope = std::make_shared<Visual>(vbuffer, ibuffer, effect);
    mScene->AttachChild(mPolytope);

    MeshFactory mf;
    mf.SetVertexFormat(vformat);
    mBoxMesh = mf.CreateBox(1.0f, 1.0f, 1.0f);
    vbuffer = mBoxMesh->GetVertexBuffer();
    vertex = vbuffer->Get<Vertex>();
    std::array<Vector3<float>, 8> corner;
    minBox.GetVertices(corner);
    for (int i = 0; i < 8; ++i)
    {
        vertex[i].position[0] = corner[i][0];
        vertex[i].position[1] = corner[i][1];
        vertex[i].position[2] = corner[i][2];
        vertex[i].color[0] = 0.5f * (rnd(mte) + 1.0f);
        vertex[i].color[1] = 0.5f * (rnd(mte) + 1.0f);
        vertex[i].color[2] = 0.5f * (rnd(mte) + 1.0f);
        vertex[i].color[3] = 1.0f;
    }
    mBoxMesh->SetEffect(effect);
    mScene->AttachChild(mBoxMesh);

    mTrackball.Attach(mScene);
    mTrackball.Update();
}
Esempio n. 12
0
bool TextureArraysWindow::CreateScene()
{
    // Create the shaders and associated resources.
    std::string filename = mEnvironment.GetPath("TextureArrays.hlsl");
    std::shared_ptr<VisualProgram> program =
        mProgramFactory.CreateFromFiles(filename, filename, "");
    if (!program)
    {
        return false;
    }

    // Create a vertex buffer for a single triangle.  The PNG is stored in
    // left-handed coordinates.  The texture coordinates are chosen to reflect
    // the texture in the y-direction.
    struct Vertex
    {
        Vector3<float> position;
        Vector2<float> tcoord;
    };
    VertexFormat vformat;
    vformat.Bind(VA_POSITION, DF_R32G32B32_FLOAT, 0);
    vformat.Bind(VA_TEXCOORD, DF_R32G32_FLOAT, 0);
    std::shared_ptr<VertexBuffer> vbuffer(new VertexBuffer(vformat, 4));
    Vertex* vertex = vbuffer->Get<Vertex>();
    vertex[0].position = { 0.0f, 0.0f, 0.0f };
    vertex[0].tcoord = { 0.0f, 1.0f };
    vertex[1].position = { 1.0f, 0.0f, 0.0f };
    vertex[1].tcoord = { 1.0f, 1.0f };
    vertex[2].position = { 0.0f, 1.0f, 0.0f };
    vertex[2].tcoord = { 0.0f, 0.0f };
    vertex[3].position = { 1.0f, 1.0f, 0.0f };
    vertex[3].tcoord = { 1.0f, 0.0f };

    // Create an indexless buffer for a triangle mesh with two triangles.
    std::shared_ptr<IndexBuffer> ibuffer(new IndexBuffer(IP_TRISTRIP, 2));

    // Create an effect for the vertex and pixel shaders.  The texture is
    // bilinearly filtered and the texture coordinates are clamped to [0,1]^2.
    std::shared_ptr<ConstantBuffer> cbuffer(new ConstantBuffer(
        sizeof(Matrix4x4<float>), true));
    program->GetVShader()->Set("PVWMatrix", cbuffer);

    std::shared_ptr<PixelShader> pshader = program->GetPShader();
    std::shared_ptr<Texture1Array> t1array(new Texture1Array(2,
        DF_R8G8B8A8_UNORM, 2));
    unsigned int* t1data = t1array->Get<unsigned int>();
    t1data[0] = 0xFF000000;
    t1data[1] = 0xFFFFFFFF;
    pshader->Set("myTexture1", t1array);

    Texture2* stoneTexture = WICFileIO::Load(
        mEnvironment.GetPath("StoneWall.png"), false);
    std::shared_ptr<Texture2Array> t2array(new Texture2Array(2,
        DF_R8G8B8A8_UNORM, 256, 256));
    unsigned char* t2data = t2array->Get<unsigned char>();
    size_t const numBytes = stoneTexture->GetNumBytes();
    Memcpy(t2data, stoneTexture->GetData(), numBytes);
    t2data += numBytes;
    delete stoneTexture;
    for (size_t i = 0; i < numBytes; ++i)
    {
        *t2data++ = static_cast<unsigned char>(rand() % 256);
    }
    pshader->Set("myTexture2", t2array);

    std::shared_ptr<SamplerState> samplerState(new SamplerState());
    samplerState->filter = SamplerState::MIN_L_MAG_L_MIP_P;
    samplerState->mode[0] = SamplerState::CLAMP;
    samplerState->mode[1] = SamplerState::CLAMP;
    pshader->Set("mySampler", samplerState);

    std::shared_ptr<VisualEffect> effect =
        std::make_shared<VisualEffect>(program);

    // Create the geometric object for drawing.  Translate it so that its
    // center of mass is at the origin.  This supports virtual trackball
    // motion about the object "center".
    mSquare = std::make_shared<Visual>(vbuffer, ibuffer, effect);
    mSquare->localTransform.SetTranslation(-0.5f, -0.5f, 0.0f);

    // Enable automatic updates of pvw-matrices and w-matrices.
    mCameraRig.Subscribe(mSquare->worldTransform, cbuffer);

    mTrackball.Attach(mSquare);
    mTrackball.Update();
    return true;
}