DirectLightingIntegrator::DirectLightingIntegrator( const ShadingContext& shading_context, const LightSampler& light_sampler, const ShadingPoint& shading_point, const Vector3d& outgoing, const BSDF& bsdf, const void* bsdf_data, const int bsdf_sampling_modes, const int light_sampling_modes, const size_t bsdf_sample_count, const size_t light_sample_count, const bool indirect) : m_shading_context(shading_context) , m_light_sampler(light_sampler) , m_shading_point(shading_point) , m_point(shading_point.get_point()) , m_geometric_normal(shading_point.get_geometric_normal()) , m_shading_basis(shading_point.get_shading_basis()) , m_time(shading_point.get_time()) , m_outgoing(outgoing) , m_bsdf(bsdf) , m_bsdf_data(bsdf_data) , m_bsdf_sampling_modes(bsdf_sampling_modes) , m_light_sampling_modes(light_sampling_modes) , m_bsdf_sample_count(bsdf_sample_count) , m_light_sample_count(light_sample_count) , m_indirect(indirect) { assert(is_normalized(outgoing)); }
void add_back_lighting( const InputValues& values, SamplingContext& sampling_context, const PixelContext& pixel_context, const ShadingContext& shading_context, const ShadingPoint& shading_point, Spectrum& radiance, SpectrumStack& aovs) const { const Vector3d& p = shading_point.get_point(); const Vector3d& n = shading_point.get_original_shading_normal(); const Vector3d& d = shading_point.get_ray().m_dir; // Construct a ray perpendicular to the other side of the surface. ShadingRay back_ray(shading_point.get_ray()); back_ray.m_tmax *= norm(d); back_ray.m_dir = dot(d, n) > 0.0 ? -n : n; back_ray.m_org = p - back_ray.m_tmax * back_ray.m_dir; ShadingPoint back_shading_point(shading_point); back_shading_point.set_ray(back_ray); Spectrum back_radiance(0.0f); SpectrumStack back_aovs(aovs.size(), 0.0f); // Compute back lighting. for (size_t i = 0; i < m_back_lighting_samples; ++i) { shading_context.get_lighting_engine()->compute_lighting( sampling_context, pixel_context, shading_context, back_shading_point, back_radiance, back_aovs); } // Apply translucency factor. back_radiance *= values.m_translucency; back_aovs *= values.m_translucency; // Divide by the number of samples. const float rcp_sample_count = 1.0f / static_cast<float>(m_back_lighting_samples); back_radiance *= rcp_sample_count; back_aovs *= rcp_sample_count; // Add back lighting contribution. radiance += back_radiance; aovs += back_aovs; }
size_t SubsurfaceSampler::sample( SamplingContext& sampling_context, const ShadingPoint& outgoing_point, const BSSRDF& bssrdf, const void* bssrdf_data, SubsurfaceSample samples[], const size_t max_sample_count) { assert(max_sample_count > 0); // Sample the diffusion profile. BSSRDFSample bssrdf_sample(sampling_context); if (!bssrdf.sample(bssrdf_data, bssrdf_sample)) return 0; // Reject points too far away. // This introduces negligible bias in comparison to the other approximations. const Vector2d& point(bssrdf_sample.get_point()); const double radius2 = square_norm(point); const double rmax2 = bssrdf_sample.get_rmax2(); if (radius2 > rmax2) return 0; // Evaluate the PDF of the diffusion profile. const double radius = sqrt(radius2); const double bssrdf_sample_pdf = bssrdf.evaluate_pdf(bssrdf_data, bssrdf_sample.get_channel(), radius); // Pick a sampling basis. sampling_context.split_in_place(1, 1); Axis sampling_axis; Basis3d sampling_basis; double sampling_basis_pdf; pick_sampling_basis( outgoing_point.get_shading_basis(), sampling_context.next_double2(), sampling_axis, sampling_basis, sampling_basis_pdf); // Compute height of sample point on (positive) hemisphere of radius Rmax. assert(rmax2 >= radius2); const double h = sqrt(rmax2 - radius2); // Compute sphere entry and exit points. Vector3d entry_point, exit_point; entry_point = exit_point = outgoing_point.get_point(); entry_point += sampling_basis.transform_to_parent(Vector3d(point[0], +h, point[1])); exit_point += sampling_basis.transform_to_parent(Vector3d(point[0], -h, point[1])); assert(feq(norm(exit_point - entry_point), 2.0 * h, 1.0e-9)); // Build a probe ray inscribed inside the sphere of radius Rmax. ShadingRay probe_ray( entry_point, -sampling_basis.get_normal(), 0.0, 2.0 * h, outgoing_point.get_time(), VisibilityFlags::ProbeRay, outgoing_point.get_ray().m_depth + 1); const Material* material = outgoing_point.get_material(); ShadingPoint shading_points[2]; size_t shading_point_index = 0; ShadingPoint* parent_shading_point = 0; size_t sample_count = 0; // Trace the ray and return all intersections (or up to max_sample_count of them) found inside the sphere. while (true) { // Continue tracing the ray. shading_points[shading_point_index].clear(); if (!m_shading_context.get_intersector().trace( probe_ray, shading_points[shading_point_index], parent_shading_point)) break; // Only consider points lying on surfaces with the same material as the outgoing point. if (shading_points[shading_point_index].get_material() == material) { // Execute the OSL shader if we have one. Needed for bump mapping. #ifdef APPLESEED_WITH_OSL if (material->has_osl_surface()) { sampling_context.split_in_place(1, 1); m_shading_context.execute_osl_bump( *material->get_osl_surface(), shading_points[shading_point_index], sampling_context.next_double2()); } #endif SubsurfaceSample& sample = samples[sample_count++]; sample.m_point = shading_points[shading_point_index]; // Compute sample probability. sample.m_probability = bssrdf_sample_pdf * sampling_basis_pdf * abs(dot(sampling_basis.get_normal(), sample.m_point.get_geometric_normal())); // todo: or shading normal? // Weight sample probability with multiple importance sampling. sample.m_probability /= compute_mis_weight( bssrdf, bssrdf_data, bssrdf_sample.get_channel(), sampling_basis, sampling_axis, sample.m_probability, outgoing_point.get_point(), sample.m_point.get_point(), sample.m_point.get_geometric_normal()); // todo: or shading normal? // Return the relative index of refraction. sample.m_eta = bssrdf_sample.get_eta(); if (sample_count == max_sample_count) break; } // Move the ray's origin past the hit surface. probe_ray.m_org = shading_points[shading_point_index].get_point(); probe_ray.m_tmax = norm(exit_point - probe_ray.m_org); // Swap the current and parent shading points. parent_shading_point = &shading_points[shading_point_index]; shading_point_index = 1 - shading_point_index; } return sample_count; }
void DiagnosticSurfaceShader::evaluate( SamplingContext& sampling_context, const PixelContext& pixel_context, const ShadingContext& shading_context, const ShadingPoint& shading_point, ShadingResult& shading_result) const { switch (m_shading_mode) { case Color: { shading_result.set_main_to_opaque_pink_linear_rgba(); const Material* material = shading_point.get_material(); if (material) { const Material::RenderData& material_data = material->get_render_data(); #ifdef APPLESEED_WITH_OSL // Execute the OSL shader if there is one. if (material_data.m_shader_group) { shading_context.execute_osl_shading( *material_data.m_shader_group, shading_point); } #endif if (material_data.m_bsdf) { InputEvaluator input_evaluator(shading_context.get_texture_cache()); material_data.m_bsdf->evaluate_inputs( shading_context, input_evaluator, shading_point); const Vector3d direction = -normalize(shading_point.get_ray().m_dir); material_data.m_bsdf->evaluate( input_evaluator.data(), false, false, shading_point.get_geometric_normal(), shading_point.get_shading_basis(), direction, direction, ScatteringMode::All, shading_result.m_main.m_color); shading_result.m_color_space = ColorSpaceSpectral; } } } break; case Coverage: shading_result.set_main_to_linear_rgb(Color3f(1.0f)); break; case Barycentric: shading_result.set_main_to_linear_rgb( vector2_to_color(shading_point.get_bary())); break; case UV: shading_result.set_main_to_linear_rgb( uvs_to_color(shading_point.get_uv(0))); break; case Tangent: case Bitangent: case ShadingNormal: { #ifdef APPLESEED_WITH_OSL const Material* material = shading_point.get_material(); if (material) { const Material::RenderData& material_data = material->get_render_data(); // Execute the OSL shader if there is one. if (material_data.m_shader_group) { sampling_context.split_in_place(2, 1); shading_context.execute_osl_bump( *material_data.m_shader_group, shading_point, sampling_context.next_vector2<2>()); } } #endif const Vector3d v = m_shading_mode == ShadingNormal ? shading_point.get_shading_basis().get_normal() : m_shading_mode == Tangent ? shading_point.get_shading_basis().get_tangent_u() : shading_point.get_shading_basis().get_tangent_v(); shading_result.set_main_to_linear_rgb(vector3_to_color(v)); } break; case GeometricNormal: shading_result.set_main_to_linear_rgb( vector3_to_color(shading_point.get_geometric_normal())); break; case OriginalShadingNormal: shading_result.set_main_to_linear_rgb( vector3_to_color(shading_point.get_original_shading_normal())); break; case WorldSpacePosition: { const Vector3d& p = shading_point.get_point(); shading_result.set_main_to_linear_rgb( Color3f(Color3d(p.x, p.y, p.z))); } break; case Sides: shading_result.set_main_to_linear_rgb( shading_point.get_side() == ObjectInstance::FrontSide ? Color3f(0.0f, 0.0f, 1.0f) : Color3f(1.0f, 0.0f, 0.0f)); break; case Depth: shading_result.set_main_to_linear_rgb( Color3f(static_cast<float>(shading_point.get_distance()))); break; case ScreenSpaceWireframe: { // Initialize the shading result to the background color. shading_result.set_main_to_linear_rgba(Color4f(0.0f, 0.0f, 0.8f, 0.5f)); if (shading_point.is_triangle_primitive()) { // Film space thickness of the wires. const double SquareWireThickness = square(0.00025); // Retrieve the time, the scene and the camera. const double time = shading_point.get_time().m_absolute; const Scene& scene = shading_point.get_scene(); const Camera& camera = *scene.get_camera(); // Compute the film space coordinates of the intersection point. Vector2d point_ndc; camera.project_point(time, shading_point.get_point(), point_ndc); // Loop over the triangle edges. for (size_t i = 0; i < 3; ++i) { // Retrieve the end points of this edge. const size_t j = (i + 1) % 3; const Vector3d vi = shading_point.get_vertex(i); const Vector3d vj = shading_point.get_vertex(j); // Compute the film space coordinates of the edge's end points. Vector2d vi_ndc, vj_ndc; if (!camera.project_segment(time, vi, vj, vi_ndc, vj_ndc)) continue; // Compute the film space distance from the intersection point to the edge. const double d = square_distance_point_segment(point_ndc, vi_ndc, vj_ndc); // Shade with the wire's color if the hit point is close enough to the edge. if (d < SquareWireThickness) { shading_result.set_main_to_linear_rgba(Color4f(1.0f)); break; } } } else { assert(shading_point.is_curve_primitive()); // todo: implement. } } break; case WorldSpaceWireframe: { // Initialize the shading result to the background color. shading_result.set_main_to_linear_rgba(Color4f(0.0f, 0.0f, 0.8f, 0.5f)); if (shading_point.is_triangle_primitive()) { // World space thickness of the wires. const double SquareWireThickness = square(0.0015); // Retrieve the world space intersection point. const Vector3d& point = shading_point.get_point(); // Loop over the triangle edges. for (size_t i = 0; i < 3; ++i) { // Retrieve the end points of this edge. const size_t j = (i + 1) % 3; const Vector3d& vi = shading_point.get_vertex(i); const Vector3d& vj = shading_point.get_vertex(j); // Compute the world space distance from the intersection point to the edge. const double d = square_distance_point_segment(point, vi, vj); // Shade with the wire's color if the hit point is close enough to the edge. if (d < SquareWireThickness) { shading_result.set_main_to_linear_rgba(Color4f(1.0f)); break; } } } else { assert(shading_point.is_curve_primitive()); // todo: implement. } } break; case AmbientOcclusion: { // Compute the occlusion. const double occlusion = compute_ambient_occlusion( sampling_context, sample_hemisphere_uniform<double>, shading_context.get_intersector(), shading_point, m_ao_max_distance, m_ao_samples); // Return a gray scale value proportional to the accessibility. const float accessibility = static_cast<float>(1.0 - occlusion); shading_result.set_main_to_linear_rgb(Color3f(accessibility)); } break; case AssemblyInstances: shading_result.set_main_to_linear_rgb( integer_to_color(shading_point.get_assembly_instance().get_uid())); break; case ObjectInstances: shading_result.set_main_to_linear_rgb( integer_to_color(shading_point.get_object_instance().get_uid())); break; case Regions: { const uint32 h = mix_uint32( static_cast<uint32>(shading_point.get_object_instance().get_uid()), static_cast<uint32>(shading_point.get_region_index())); shading_result.set_main_to_linear_rgb(integer_to_color(h)); } break; case Primitives: { const uint32 h = mix_uint32( static_cast<uint32>(shading_point.get_object_instance().get_uid()), static_cast<uint32>(shading_point.get_region_index()), static_cast<uint32>(shading_point.get_primitive_index())); shading_result.set_main_to_linear_rgb(integer_to_color(h)); } break; case Materials: { const Material* material = shading_point.get_material(); if (material) shading_result.set_main_to_linear_rgb(integer_to_color(material->get_uid())); else shading_result.set_main_to_opaque_pink_linear_rgba(); } break; case RaySpread: { const ShadingRay& ray = shading_point.get_ray(); if (!ray.m_has_differentials) break; const Material* material = shading_point.get_material(); if (material) { const Material::RenderData& material_data = material->get_render_data(); #ifdef APPLESEED_WITH_OSL // Execute the OSL shader if there is one. if (material_data.m_shader_group) { shading_context.execute_osl_shading( *material_data.m_shader_group, shading_point); } #endif if (material_data.m_bsdf) { const Dual3d outgoing( -ray.m_dir, ray.m_dir - ray.m_rx.m_dir, ray.m_dir - ray.m_ry.m_dir); InputEvaluator input_evaluator(shading_context.get_texture_cache()); material_data.m_bsdf->evaluate_inputs( shading_context, input_evaluator, shading_point); const void* bsdf_data = input_evaluator.data(); BSDFSample sample(shading_point, outgoing); material_data.m_bsdf->sample( sampling_context, bsdf_data, false, false, sample); if (!sample.m_incoming.has_derivatives()) break; // The 3.0 factor is chosen so that ray spread from Lambertian BRDFs is approximately 1. const double spread = max( norm(sample.m_incoming.get_dx()), norm(sample.m_incoming.get_dy())) * 3.0; shading_result.set_main_to_linear_rgb( Color3f(static_cast<float>(spread))); } } } break; case FacingRatio: { const Vector3d& normal = shading_point.get_shading_normal(); const Vector3d& view = shading_point.get_ray().m_dir; const double facing = abs(dot(normal, view)); shading_result.set_main_to_linear_rgb( Color3f(static_cast<float>(facing))); } break; default: assert(false); shading_result.set_main_to_transparent_black_linear_rgba(); break; } }
bool Intersector::do_trace_same_material( const ShadingRay& ray, const ShadingPoint& parent_shading_point, const bool offset_origin, ShadingPoint& shading_point) const { ShadingRay up_ray(ray); ShadingRay down_ray(ray); down_ray.m_dir = -down_ray.m_dir; if (offset_origin) { parent_shading_point.refine_and_offset(); const Vector3d offset = parent_shading_point.get_offset_point(down_ray.m_dir) - parent_shading_point.get_point(); up_ray.m_org += offset; } const Material* parent_material = parent_shading_point.get_material(); // Trace the ray. ShadingPoint up_shading_point; trace_back_sides( up_ray, up_shading_point); // Discard objects with different materials. if (up_shading_point.hit()) { if (up_shading_point.get_opposite_material() != parent_material) up_shading_point.clear(); } // Trace the opposite ray. ShadingPoint down_shading_point; trace_back_sides( down_ray, down_shading_point); // Discard objects with different materials. if (down_shading_point.hit()) { if (down_shading_point.get_opposite_material() != parent_material) down_shading_point.clear(); } // Keep the nearest hit, if any. if (up_shading_point.hit() && down_shading_point.hit()) { shading_point = up_shading_point.get_distance() < down_shading_point.get_distance() ? up_shading_point : down_shading_point; return true; } else if (up_shading_point.hit()) { shading_point = up_shading_point; return true; } else if (down_shading_point.hit()) { shading_point = down_shading_point; return true; } return false; }