Ejemplo n.º 1
0
float Camera::generateRay( const CameraSample& cameraSample, Ray *o_ray ) const
{
    if ( o_ray ==  nullptr ) {
        return 0.0f;
    }
    
    const glm::vec2 &pixel = cameraSample.pixelSample;
    
    glm::vec3 position = Transform::transform( glm::vec3( pixel.x, pixel.y, 0.0 ), m_rasterToCamera) ;
    Ray ray( glm::vec3( 0.0 ), glm::normalize( position ) );
    
    if ( m_aperature > 0.0f ) {
        
        // Sample point on the lens
        glm::vec2 lensPoint = m_aperature * concentricSampleDisk( cameraSample.lensSample );
        
        float ft = m_focalLength / -ray.getDirection().z;
        glm::vec3 focusPoint = ray.getPoint( ft );
        
        ray = Ray(
            glm::vec3( lensPoint.x, lensPoint.y, 0 ),
            glm::normalize( focusPoint - ray.getPosition() )
        );
    }
    
    *o_ray = Transform::transform( ray, m_cameraToWorld );
    
    return 1.0f;
}
Ejemplo n.º 2
0
		void StratifiedSampler::prepareSamples(int nAASamplesSqrt, int nAOSamplesSqrt) {
			int nSqrt = nAASamplesSqrt*nAOSamplesSqrt;
			aoSamples = QVector<Vector3f>(nSqrt*nSqrt);
			aaSamples = QVector<Vector3f>(nAASamplesSqrt*nAASamplesSqrt);
			lensSamples = QVector<Vector3f>(nAASamplesSqrt*nAASamplesSqrt);
			int count = 0;
			for (int i = 0; i < nSqrt; i++) {
				for (int j = 0; j < nSqrt; j++) {
					// we need a uniform number in the interval
					// [i/nSqrt;(i+1)/nSqrt]
					double x = rg->getDouble( ((double)i)/(double)nSqrt,((double)(i+1.0))/(double)nSqrt);
					double y = rg->getDouble( ((double)j)/(double)nSqrt,((double)(j+1.0))/(double)nSqrt);
					aoSamples[count++] = sampleSphere(x,y);		
				}	
			}

			count = 0;
			for (int i = 0; i < nAASamplesSqrt; i++) {
				for (int j = 0; j < nAASamplesSqrt; j++) {
					// we need a uniform number in the interval
					// [i/nSqrt;(i+1)/nSqrt]
					double x = rg->getDouble( ((double)i)/(double)nAASamplesSqrt,((double)(i+1.0))/(double)nAASamplesSqrt);
					double y = rg->getDouble( ((double)j)/(double)nAASamplesSqrt,((double)(j+1.0))/(double)nAASamplesSqrt);
					aaSamples[count] = Vector3f(x-0.5,y-0.5,1);
					x = rg->getDouble( ((double)i)/(double)nSqrt,((double)(i+1.0))/(double)nSqrt);
					y = rg->getDouble( ((double)j)/(double)nSqrt,((double)(j+1.0))/(double)nSqrt);
					lensSamples[count++] = concentricSampleDisk(x,y);
				}	
			}
			// We randomize the samples to avoid coherence.
			aaSamples = rg->randomize(aaSamples);
			lensSamples = rg->randomize(lensSamples);

		};
Ejemplo n.º 3
0
double PerspectiveCamera::generateRay(const CameraSample &sample,
                                      Ray *ray) const {
    auto cameraLength = 1.0_um;

    // Generate raster and camera samples
    Point3D Pras(sample.imageX*cameraLength, sample.imageY*cameraLength, 0);
    Point3D Pcamera;
    RasterToCamera(Pras, &Pcamera);
    *ray = Ray(Point3D(), normalize(Length3D(Pcamera)));
    // Modify ray for depth of field
    if (lensRadius > 0.0_um) {
        // Sample point on lens
        double lensU, lensV;
        concentricSampleDisk(sample.lensU, sample.lensV, &lensU, &lensV);

        Length x = lensU * lensRadius;
        Length y = lensV * lensRadius;

        // Compute point on plane of focus
        double ft = focalDistance / ray->m_direction.z;
        Point3D Pfocus = (*ray)(ft);

        // Update ray for effect of lens
        ray->m_origin = Point3D(x, y, 0.0_um);
        ray->m_direction = normalize(Pfocus - ray->m_origin);
    }
    ray->m_time = sample.time;
    CameraToWorld(*ray, ray);
    return 1.f;
}
Ejemplo n.º 4
0
		Vector3f ProgressiveStratifiedSampler::getLensSample(int ix) {
			if (ix>=(aaSamplesSqrt*aaSamplesSqrt)) throw 1;
			int index = aaOrder[ix];
			int i = index / aaSamplesSqrt;
			int j = index % aaSamplesSqrt;
			double x = rg->getDouble( ((double)i)/(double)aaSamplesSqrt,((double)(i+1.0))/(double)aaSamplesSqrt);
			double y = rg->getDouble( ((double)j)/(double)aaSamplesSqrt,((double)(j+1.0))/(double)aaSamplesSqrt);
			//return rg->getUniform2D();
			return concentricSampleDisk(x,y);
			
		}
Ejemplo n.º 5
0
bool render(std::shared_ptr<std::vector<u_char> > row_image,
            std::shared_ptr<RenderInfo> info) {
  bool do_dof = info->lens_radius > 0.0;

  // 画面中心から奥に伸びるベクトル
  Vec3f to_far_z = info->camera.posToWorld(Vec3f(info->size.x() / 2, info->size.y() / 2, 1.0),
                                           Affinef::Identity(), info->viewport);
  to_far_z.normalize();
  
  for (int iy = 0; iy < info->size.y(); ++iy) {
    std::vector<Pixel> image(info->size.x());

    for (int ix = 0; ix < info->size.x(); ++ix) {
      Pixel sub_pixel = Pixel::Zero();
      
      for (int i = 0; i < info->subpixel_num; ++i) {

        for (int h = 0; h < info->sample_num; ++h) {
          // 1ピクセル内で乱数が完結するよう調節
          Qmc render_random(h + i * info->sample_num + (ix + iy * info->size.x()) * (info->sample_num * info->subpixel_num));
          
          Real r1 = 2.0 * render_random.next();
          Real r2 = 2.0 * render_random.next();
        
          Real x = ix + ((r1 < 1.0) ? std::sqrt(r1) - 1.0 : 1.0 - std::sqrt(2.0 - r1));
          Real y = iy + ((r2 < 1.0) ? std::sqrt(r2) - 1.0 : 1.0 - std::sqrt(2.0 - r2));
        
          // 画面最前→最奥へ伸びる線分を計算
          Vec3f ray_start = info->camera.posToWorld(Vec3f(x, y, 0.0),
                                                    Affinef::Identity(), info->viewport);
          Vec3f ray_end = info->camera.posToWorld(Vec3f(x, y, 1.0),
                                                  Affinef::Identity(), info->viewport);

          Vec3f ray_vec = (ray_end - ray_start).normalized();
          
          if (do_dof) {
            // レンズの屈折をシミュレーション(被写界深度)
            // SOURCE:https://github.com/githole/simple-pathtracer/tree/simple-pathtracer-DOF

            // フォーカスが合う位置
            Real ft = std::abs(info->focal_distance / to_far_z.dot(ray_vec));
            Vec3f focus_pos = ray_start + ray_vec * ft;

            // 適当に決めたレンズの通過位置とフォーカスが合う位置からRayを作り直す(屈折効果)
            Vec2f lens = concentricSampleDisk(render_random.next(), render_random.next()) * info->lens_radius;
            ray_start.x() += lens.x();
            ray_start.y() += lens.y();
            ray_vec = (focus_pos - ray_start).normalized();
          }
          
          sub_pixel += rayTrace(ray_start, ray_vec,
                                0,
                                info->recursive_depth,
                                false,
                                info->model,
                                info->bvh_node,
                                info->bg,
                                render_random);
        }
      }
      image[ix] = sub_pixel / (info->sample_num * info->subpixel_num);
    }

    {
      // 1ライン毎にイメージを生成
      // 0.0~1.0のピクセルの値を0~255へ正規化
      int index = iy * info->size.x() * 3;
      Real exposure = info->exposure;
      std::for_each(image.begin(), image.end(),
                    [&row_image, &index, &exposure](const Pixel& pixel) {
                      (*row_image)[index + 0] = expose(pixel.x(), exposure) * 255;
                      (*row_image)[index + 1] = expose(pixel.y(), exposure) * 255;
                      (*row_image)[index + 2] = expose(pixel.z(), exposure) * 255;
                      index += 3;
                    });
    }
  }

  return true;
}