Example #1
0
// Film Method Definitions
Film::Film(const Point2i &resolution, const Bounds2f &cropWindow,
           std::unique_ptr<Filter> filt, Float diagonal,
           const std::string &filename, Float scale)
    : fullResolution(resolution),
      diagonal(diagonal * .001),
      filter(std::move(filt)),
      filename(filename),
      scale(scale) {
    // Compute film image bounds
    croppedPixelBounds =
        Bounds2i(Point2i(std::ceil(fullResolution.x * cropWindow.pMin.x),
                         std::ceil(fullResolution.y * cropWindow.pMin.y)),
                 Point2i(std::ceil(fullResolution.x * cropWindow.pMax.x),
                         std::ceil(fullResolution.y * cropWindow.pMax.y)));

    // Allocate film image storage
    pixels = std::unique_ptr<Pixel[]>(new Pixel[croppedPixelBounds.Area()]);

    // Precompute filter weight table
    int offset = 0;
    for (int y = 0; y < filterTableWidth; ++y) {
        for (int x = 0; x < filterTableWidth; ++x, ++offset) {
            Point2f p;
            p.x = (x + 0.5f) * filter->radius.x / filterTableWidth;
            p.y = (y + 0.5f) * filter->radius.y / filterTableWidth;
            filterTable[offset] = filter->Evaluate(p);
        }
    }
}
Example #2
0
std::unique_ptr<FilmTile> Film::GetFilmTile(const Bounds2i &sampleBounds) {
    // Bound image pixels that samples in _sampleBounds_ contribute to
    Vector2f halfPixel = Vector2f(0.5f, 0.5f);
    Bounds2f floatBounds = (Bounds2f)sampleBounds;
    Point2i p0 = (Point2i)Ceil(floatBounds.pMin - halfPixel - filter->radius);
    Point2i p1 = (Point2i)Floor(floatBounds.pMax - halfPixel + filter->radius) +
                 Point2i(1, 1);
    Bounds2i tilePixelBounds = Intersect(Bounds2i(p0, p1), croppedPixelBounds);
    return std::unique_ptr<FilmTile>(new FilmTile(
        tilePixelBounds, filter->radius, filterTable, filterTableWidth));
}
Example #3
0
void Camera::setResolution(glm::vec2 resolution)
{
    resX = resolution.x;
    resY = resolution.y;
    
    updateProjectionMatrix();
    
    m_aspectRatio = resX / resY;
    m_rasterBounds = Bounds2i( glm::vec2 ( -resX / 2.0, -resY / 2.0 ), glm::vec2 ( resX / 2.0, resY / 2.0 ) );
    m_area = sensorArea();
}
Example #4
0
void RealisticCamera::RenderExitPupil(Float sx, Float sy,
                                      const char *filename) const {
    Point3f pFilm(sx, sy, 0);

    const int nSamples = 2048;
    Float *image = new Float[3 * nSamples * nSamples];
    Float *imagep = image;

    for (int y = 0; y < nSamples; ++y) {
        Float fy = (Float)y / (Float)(nSamples - 1);
        Float ly = Lerp(fy, -RearElementRadius(), RearElementRadius());
        for (int x = 0; x < nSamples; ++x) {
            Float fx = (Float)x / (Float)(nSamples - 1);
            Float lx = Lerp(fx, -RearElementRadius(), RearElementRadius());

            Point3f pRear(lx, ly, LensRearZ());

            if (lx * lx + ly * ly > RearElementRadius() * RearElementRadius()) {
                *imagep++ = 1;
                *imagep++ = 1;
                *imagep++ = 1;
            } else if (TraceLensesFromFilm(Ray(pFilm, pRear - pFilm),
                                           nullptr)) {
                *imagep++ = 0.5f;
                *imagep++ = 0.5f;
                *imagep++ = 0.5f;
            } else {
                *imagep++ = 0.f;
                *imagep++ = 0.f;
                *imagep++ = 0.f;
            }
        }
    }

    WriteImage(filename, image,
               Bounds2i(Point2i(0, 0), Point2i(nSamples, nSamples)),
               Point2i(nSamples, nSamples));
    delete[] image;
}
Example #5
0
std::vector<TestIntegrator> GetIntegrators() {
  std::vector<TestIntegrator> integrators;

  Point2i resolution(10, 10);
  AnimatedTransform identity(new Transform, 0, new Transform, 1);

  for (auto scene : GetScenes()) {
    // Path tracing integrators
    for (auto sampler : GetSamplers(Bounds2i(Point2i(0,0), resolution))) {
      std::unique_ptr<Filter> filter(new BoxFilter(Vector2f(0.5, 0.5)));
      Film *film = new Film(resolution, Bounds2f(Point2f(0,0), Point2f(1,1)),
                            std::move(filter), 1., "test.exr", 1.);
      std::shared_ptr<Camera> camera = std::make_shared<PerspectiveCamera>(
          identity, Bounds2f(Point2f(-1,-1), Point2f(1,1)), 0., 1.,
          0., 10., 45, film, nullptr);

      Integrator *integrator = new PathIntegrator(8, camera, sampler.first);
      integrators.push_back({integrator, film,
              "Path, depth 8, Perspective, " + sampler.second + ", " +
              scene.description, scene});
    }

    for (auto sampler : GetSamplers(Bounds2i(Point2i(0,0), resolution))) {
      std::unique_ptr<Filter> filter(new BoxFilter(Vector2f(0.5, 0.5)));
      Film *film = new Film(resolution, Bounds2f(Point2f(0,0), Point2f(1,1)),
                            std::move(filter), 1., "test.exr", 1.);
      std::shared_ptr<Camera> camera = std::make_shared<OrthographicCamera>(
          identity, Bounds2f(Point2f(-.1,-.1), Point2f(.1,.1)), 0., 1.,
          0., 10., film, nullptr);

      Integrator *integrator = new PathIntegrator(8, camera, sampler.first);
      integrators.push_back({integrator, film,
              "Path, depth 8, Ortho, " + sampler.second + ", " +
              scene.description, scene});
    }

      // Volume path tracing integrators
    for (auto sampler : GetSamplers(Bounds2i(Point2i(0,0), resolution))) {
      std::unique_ptr<Filter> filter(new BoxFilter(Vector2f(0.5, 0.5)));
      Film *film = new Film(resolution, Bounds2f(Point2f(0,0), Point2f(1,1)),
                            std::move(filter), 1., "test.exr", 1.);
      std::shared_ptr<Camera> camera = std::make_shared<PerspectiveCamera>(
          identity, Bounds2f(Point2f(-1,-1), Point2f(1,1)), 0., 1.,
          0., 10., 45, film, nullptr);

      Integrator *integrator = new VolPathIntegrator(8, camera, sampler.first);
      integrators.push_back({integrator, film,
              "VolPath, depth 8, Perspective, " + sampler.second + ", " +
              scene.description, scene});
    }
    for (auto sampler : GetSamplers(Bounds2i(Point2i(0,0), resolution))) {
      std::unique_ptr<Filter> filter(new BoxFilter(Vector2f(0.5, 0.5)));
      Film *film = new Film(resolution, Bounds2f(Point2f(0,0), Point2f(1,1)),
                            std::move(filter), 1., "test.exr", 1.);
      std::shared_ptr<Camera> camera = std::make_shared<OrthographicCamera>(
          identity, Bounds2f(Point2f(-.1,-.1), Point2f(.1,.1)), 0., 1.,
          0., 10., film, nullptr);

      Integrator *integrator = new VolPathIntegrator(8, camera, sampler.first);
      integrators.push_back({integrator, film,
              "VolPath, depth 8, Ortho, " + sampler.second + ", " +
              scene.description, scene});
    }

    // BDPT
    for (auto sampler : GetSamplers(Bounds2i(Point2i(0,0), resolution))) {
      std::unique_ptr<Filter> filter(new BoxFilter(Vector2f(0.5, 0.5)));
      Film *film = new Film(resolution, Bounds2f(Point2f(0,0), Point2f(1,1)),
                            std::move(filter), 1., "test.exr", 1.);
      std::shared_ptr<Camera> camera = std::make_shared<PerspectiveCamera>(
          identity, Bounds2f(Point2f(-1,-1), Point2f(1,1)), 0., 1.,
          0., 10., 45, film, nullptr);

      Integrator *integrator = new BDPTIntegrator(sampler.first, camera, 6,
                                                  false, false);
      integrators.push_back({integrator, film,
              "BDPT, depth 8, Perspective, " + sampler.second + ", " +
              scene.description, scene});
    }
#if 0
    // Ortho camera not currently supported with BDPT.
    for (auto sampler : GetSamplers(Bounds2i(Point2i(0,0), resolution))) {
      std::unique_ptr<Filter> filter(new BoxFilter(Vector2f(0.5, 0.5)));
      Film *film = new Film(resolution, Bounds2f(Point2f(0,0), Point2f(1,1)),
                            std::move(filter), 1., "test.exr", 1.);
      std::shared_ptr<Camera> camera = std::make_shared<OrthographicCamera>(
          identity, Bounds2f(Point2f(-.1,-.1), Point2f(.1,.1)), 0., 1.,
          0., 10., film, nullptr);

      Integrator *integrator = new BDPTIntegrator(sampler.first, camera, 8,
                                            false, false);
      integrators.push_back({integrator, film,
              "BDPT, depth 8, Ortho, " + sampler.second + ", " +
              scene.description, scene});
    }
#endif

    // MLT
    {
      std::unique_ptr<Filter> filter(new BoxFilter(Vector2f(0.5, 0.5)));
      Film *film = new Film(resolution, Bounds2f(Point2f(0,0), Point2f(1,1)),
                            std::move(filter), 1., "test.exr", 1.);
      std::shared_ptr<Camera> camera = std::make_shared<PerspectiveCamera>(
          identity, Bounds2f(Point2f(-1,-1), Point2f(1,1)), 0., 1.,
          0., 10., 45, film, nullptr);

      Integrator *integrator =
          new MLTIntegrator(camera, 8 /* depth */, 100000 /* n bootstrap */,
                          1000 /* nchains */, 1024 /* mutations per pixel */,
                          0.01 /* sigma */, 0.3 /* large step prob */);
      integrators.push_back({integrator, film,
              "MLT, depth 8, Perspective, " + scene.description,
              scene});
    }
  }

  return integrators;
}
Example #6
0
void MLTIntegrator::Render(const Scene &scene) {
    lightDistr =
        std::unique_ptr<Distribution1D>(ComputeLightSamplingCDF(scene));
    Film &film = *camera->film;
    // Generate bootstrap samples and compute $b$
    int bootstrapSamples = nBootstrap * (maxDepth + 1);
    std::unique_ptr<Float[]> bootstrapWeights(new Float[bootstrapSamples]);
    {
        ProgressReporter progress(nBootstrap, "Generating bootstrap paths");
        ParallelFor([&](int k) {
            // Generate a single bootstrap sample
            MemoryArena arena;
            for (int depth = 0; depth <= maxDepth; ++depth) {
                uint32_t uIndex = k * (maxDepth + 1) + depth;
                MLTSampler sampler(mutationsPerPixel, uIndex, sigma,
                                   largeStepProb);
                Point2f samplePos;
                bootstrapWeights[uIndex] =
                    L(scene, arena, sampler, depth, &samplePos).y();
            }
            progress.Update();
        }, nBootstrap);
        progress.Done();
    }
    Distribution1D bootstrap(bootstrapWeights.get(), bootstrapSamples);
    Float b = bootstrap.funcInt * (maxDepth + 1);

    // Run _nChains_ Markov Chains in parallel
    int64_t nTotalMutations =
        mutationsPerPixel * (int64_t)film.GetSampleBounds().Area();
    {
        StatTimer timer(&renderingTime);
        ProgressReporter progress(nTotalMutations / 100, "Rendering");
        ParallelFor([&](int k) {
            int64_t nChainMutations =
                std::min((k + 1) * nTotalMutations / nChains, nTotalMutations) -
                k * nTotalMutations / nChains;
            MemoryArena arena;
            std::unique_ptr<FilmTile> filmTile = film.GetFilmTile(Bounds2i(
                    film.croppedPixelBounds.pMin, film.croppedPixelBounds.pMin));
            // Select initial state from the set of bootstrap samples
            RNG rng(PCG32_DEFAULT_STATE, k);
            int bootstrapIndex = bootstrap.SampleDiscrete(rng.UniformFloat());
            int depth = bootstrapIndex % (maxDepth + 1);

            // Initialize local variables for selected state
            MLTSampler sampler(mutationsPerPixel, bootstrapIndex, sigma,
                               largeStepProb);
            Point2f currentPos, proposalPos;
            Spectrum currentL, proposalL;
            currentL = L(scene, arena, sampler, depth, &currentPos);

            // Run the Markov Chain for _nChainMutations_ steps
            for (int64_t i = 0; i != nChainMutations; ++i) {
                sampler.Begin();
                proposalL = L(scene, arena, sampler, depth, &proposalPos);
                // Compute the acceptance rate
                Float accept = std::min((Float)1, proposalL.y() / currentL.y());

                // Splat both current and proposed samples to _FilmTile_
                if (accept > 0)
                    filmTile->AddSplat(proposalPos,
                                       proposalL * accept / proposalL.y());
                filmTile->AddSplat(currentPos,
                                   currentL * (1 - accept) / currentL.y());

                // Accept or reject the proposal
                if (rng.UniformFloat() < accept) {
                    currentPos = proposalPos;
                    currentL = proposalL;
                    sampler.Accept();
                    ++acceptedMutations;
                } else {
                    sampler.Reject();
                }
                ++totalMutations;
                if (i % 100 == 0) progress.Update();
            }
            film.MergeFilmTile(std::move(filmTile));
        }, nChains);
        progress.Done();
    }
    film.WriteImage(b / mutationsPerPixel);
}