void apply_aerial_perspective( const InputValues& values, const ShadingContext& shading_context, const PixelContext& pixel_context, const ShadingPoint& shading_point, ShadingResult& shading_result) const { Spectrum sky_color; if (m_aerial_persp_mode == AerialPerspSkyColor) sky_color = values.m_aerial_persp_sky_color; else { // Retrieve the environment shader of the scene. const Scene& scene = shading_point.get_scene(); const EnvironmentShader* environment_shader = scene.get_environment()->get_environment_shader(); if (environment_shader) { // Execute the environment shader to obtain the sky color in the direction of the ray. InputEvaluator input_evaluator(shading_context.get_texture_cache()); const ShadingRay& ray = shading_point.get_ray(); const Vector3d direction = normalize(ray.m_dir); ShadingResult sky; environment_shader->evaluate( shading_context, pixel_context, input_evaluator, direction, sky); sky_color = sky.m_main.m_color; } else sky_color.set(0.0f); } // Compute the blend factor. const double d = shading_point.get_distance() * m_aerial_persp_rcp_distance; const double k = m_aerial_persp_intensity * exp(d); const double blend = min(k, 1.0); // Blend the shading result and the sky color. sky_color *= static_cast<float>(blend); shading_result.m_main.m_color *= static_cast<float>(1.0 - blend); shading_result.m_main.m_color += sky_color; }
void EmbreeScene::intersect(ShadingPoint& shading_point) const { RTCIntersectContext context; rtcInitIntersectContext(&context); RTCRayHit rayhit; shading_ray_to_embree_ray(shading_point.get_ray(), rayhit.ray); rayhit.hit.geomID = RTC_INVALID_GEOMETRY_ID; rtcIntersect1(m_scene, &context, &rayhit); if (rayhit.hit.geomID != RTC_INVALID_GEOMETRY_ID) { assert(rayhit.hit.geomID < m_geometry_container.size()); const auto& geometry_data = m_geometry_container[rayhit.hit.geomID]; assert(geometry_data); shading_point.m_bary[0] = rayhit.hit.u; shading_point.m_bary[1] = rayhit.hit.v; shading_point.m_object_instance_index = geometry_data->m_object_instance_idx; // TODO: remove regions shading_point.m_primitive_index = rayhit.hit.primID; shading_point.m_primitive_type = ShadingPoint::PrimitiveTriangle; shading_point.m_ray.m_tmax = rayhit.ray.tfar; const uint32 v0_idx = geometry_data->m_primitives[rayhit.hit.primID * 3]; const uint32 v1_idx = geometry_data->m_primitives[rayhit.hit.primID * 3 + 1]; const uint32 v2_idx = geometry_data->m_primitives[rayhit.hit.primID * 3 + 2]; if (geometry_data->m_motion_steps_count > 1) { const uint32 last_motion_step_idx = geometry_data->m_motion_steps_count - 1; const uint32 motion_step_begin_idx = static_cast<uint32>(rayhit.ray.time * last_motion_step_idx); const uint32 motion_step_end_idx = motion_step_begin_idx + 1; const uint32 motion_step_begin_offset = motion_step_begin_idx * geometry_data->m_vertices_count; const uint32 motion_step_end_offset = motion_step_end_idx * geometry_data->m_vertices_count; const float motion_step_begin_time = static_cast<float>(motion_step_begin_idx) / last_motion_step_idx; // Linear interpolation coefficients. const float p = (rayhit.ray.time - motion_step_begin_time) * last_motion_step_idx; const float q = 1.0f - p; assert(p > 0.0f && p <= 1.0f); const TriangleType triangle( Vector3d( geometry_data->m_vertices[motion_step_begin_offset + v0_idx] * q + geometry_data->m_vertices[motion_step_end_offset + v0_idx] * p), Vector3d( geometry_data->m_vertices[motion_step_begin_offset + v1_idx] * q + geometry_data->m_vertices[motion_step_end_offset + v1_idx] * p), Vector3d( geometry_data->m_vertices[motion_step_begin_offset + v2_idx] * q + geometry_data->m_vertices[motion_step_end_offset + v2_idx] * p)); shading_point.m_triangle_support_plane.initialize(triangle); } else { const TriangleType triangle( Vector3d(geometry_data->m_vertices[v0_idx]), Vector3d(geometry_data->m_vertices[v1_idx]), Vector3d(geometry_data->m_vertices[v2_idx])); shading_point.m_triangle_support_plane.initialize(triangle); } } }
void DiagnosticSurfaceShader::evaluate( SamplingContext& sampling_context, const PixelContext& pixel_context, const ShadingContext& shading_context, const ShadingPoint& shading_point, ShadingResult& shading_result) const { switch (m_shading_mode) { case Color: { shading_result.set_main_to_opaque_pink_linear_rgba(); const Material* material = shading_point.get_material(); if (material) { const Material::RenderData& material_data = material->get_render_data(); #ifdef APPLESEED_WITH_OSL // Execute the OSL shader if there is one. if (material_data.m_shader_group) { shading_context.execute_osl_shading( *material_data.m_shader_group, shading_point); } #endif if (material_data.m_bsdf) { InputEvaluator input_evaluator(shading_context.get_texture_cache()); material_data.m_bsdf->evaluate_inputs( shading_context, input_evaluator, shading_point); const Vector3d direction = -normalize(shading_point.get_ray().m_dir); material_data.m_bsdf->evaluate( input_evaluator.data(), false, false, shading_point.get_geometric_normal(), shading_point.get_shading_basis(), direction, direction, ScatteringMode::All, shading_result.m_main.m_color); shading_result.m_color_space = ColorSpaceSpectral; } } } break; case Coverage: shading_result.set_main_to_linear_rgb(Color3f(1.0f)); break; case Barycentric: shading_result.set_main_to_linear_rgb( vector2_to_color(shading_point.get_bary())); break; case UV: shading_result.set_main_to_linear_rgb( uvs_to_color(shading_point.get_uv(0))); break; case Tangent: case Bitangent: case ShadingNormal: { #ifdef APPLESEED_WITH_OSL const Material* material = shading_point.get_material(); if (material) { const Material::RenderData& material_data = material->get_render_data(); // Execute the OSL shader if there is one. if (material_data.m_shader_group) { sampling_context.split_in_place(2, 1); shading_context.execute_osl_bump( *material_data.m_shader_group, shading_point, sampling_context.next_vector2<2>()); } } #endif const Vector3d v = m_shading_mode == ShadingNormal ? shading_point.get_shading_basis().get_normal() : m_shading_mode == Tangent ? shading_point.get_shading_basis().get_tangent_u() : shading_point.get_shading_basis().get_tangent_v(); shading_result.set_main_to_linear_rgb(vector3_to_color(v)); } break; case GeometricNormal: shading_result.set_main_to_linear_rgb( vector3_to_color(shading_point.get_geometric_normal())); break; case OriginalShadingNormal: shading_result.set_main_to_linear_rgb( vector3_to_color(shading_point.get_original_shading_normal())); break; case WorldSpacePosition: { const Vector3d& p = shading_point.get_point(); shading_result.set_main_to_linear_rgb( Color3f(Color3d(p.x, p.y, p.z))); } break; case Sides: shading_result.set_main_to_linear_rgb( shading_point.get_side() == ObjectInstance::FrontSide ? Color3f(0.0f, 0.0f, 1.0f) : Color3f(1.0f, 0.0f, 0.0f)); break; case Depth: shading_result.set_main_to_linear_rgb( Color3f(static_cast<float>(shading_point.get_distance()))); break; case ScreenSpaceWireframe: { // Initialize the shading result to the background color. shading_result.set_main_to_linear_rgba(Color4f(0.0f, 0.0f, 0.8f, 0.5f)); if (shading_point.is_triangle_primitive()) { // Film space thickness of the wires. const double SquareWireThickness = square(0.00025); // Retrieve the time, the scene and the camera. const double time = shading_point.get_time().m_absolute; const Scene& scene = shading_point.get_scene(); const Camera& camera = *scene.get_camera(); // Compute the film space coordinates of the intersection point. Vector2d point_ndc; camera.project_point(time, shading_point.get_point(), point_ndc); // Loop over the triangle edges. for (size_t i = 0; i < 3; ++i) { // Retrieve the end points of this edge. const size_t j = (i + 1) % 3; const Vector3d vi = shading_point.get_vertex(i); const Vector3d vj = shading_point.get_vertex(j); // Compute the film space coordinates of the edge's end points. Vector2d vi_ndc, vj_ndc; if (!camera.project_segment(time, vi, vj, vi_ndc, vj_ndc)) continue; // Compute the film space distance from the intersection point to the edge. const double d = square_distance_point_segment(point_ndc, vi_ndc, vj_ndc); // Shade with the wire's color if the hit point is close enough to the edge. if (d < SquareWireThickness) { shading_result.set_main_to_linear_rgba(Color4f(1.0f)); break; } } } else { assert(shading_point.is_curve_primitive()); // todo: implement. } } break; case WorldSpaceWireframe: { // Initialize the shading result to the background color. shading_result.set_main_to_linear_rgba(Color4f(0.0f, 0.0f, 0.8f, 0.5f)); if (shading_point.is_triangle_primitive()) { // World space thickness of the wires. const double SquareWireThickness = square(0.0015); // Retrieve the world space intersection point. const Vector3d& point = shading_point.get_point(); // Loop over the triangle edges. for (size_t i = 0; i < 3; ++i) { // Retrieve the end points of this edge. const size_t j = (i + 1) % 3; const Vector3d& vi = shading_point.get_vertex(i); const Vector3d& vj = shading_point.get_vertex(j); // Compute the world space distance from the intersection point to the edge. const double d = square_distance_point_segment(point, vi, vj); // Shade with the wire's color if the hit point is close enough to the edge. if (d < SquareWireThickness) { shading_result.set_main_to_linear_rgba(Color4f(1.0f)); break; } } } else { assert(shading_point.is_curve_primitive()); // todo: implement. } } break; case AmbientOcclusion: { // Compute the occlusion. const double occlusion = compute_ambient_occlusion( sampling_context, sample_hemisphere_uniform<double>, shading_context.get_intersector(), shading_point, m_ao_max_distance, m_ao_samples); // Return a gray scale value proportional to the accessibility. const float accessibility = static_cast<float>(1.0 - occlusion); shading_result.set_main_to_linear_rgb(Color3f(accessibility)); } break; case AssemblyInstances: shading_result.set_main_to_linear_rgb( integer_to_color(shading_point.get_assembly_instance().get_uid())); break; case ObjectInstances: shading_result.set_main_to_linear_rgb( integer_to_color(shading_point.get_object_instance().get_uid())); break; case Regions: { const uint32 h = mix_uint32( static_cast<uint32>(shading_point.get_object_instance().get_uid()), static_cast<uint32>(shading_point.get_region_index())); shading_result.set_main_to_linear_rgb(integer_to_color(h)); } break; case Primitives: { const uint32 h = mix_uint32( static_cast<uint32>(shading_point.get_object_instance().get_uid()), static_cast<uint32>(shading_point.get_region_index()), static_cast<uint32>(shading_point.get_primitive_index())); shading_result.set_main_to_linear_rgb(integer_to_color(h)); } break; case Materials: { const Material* material = shading_point.get_material(); if (material) shading_result.set_main_to_linear_rgb(integer_to_color(material->get_uid())); else shading_result.set_main_to_opaque_pink_linear_rgba(); } break; case RaySpread: { const ShadingRay& ray = shading_point.get_ray(); if (!ray.m_has_differentials) break; const Material* material = shading_point.get_material(); if (material) { const Material::RenderData& material_data = material->get_render_data(); #ifdef APPLESEED_WITH_OSL // Execute the OSL shader if there is one. if (material_data.m_shader_group) { shading_context.execute_osl_shading( *material_data.m_shader_group, shading_point); } #endif if (material_data.m_bsdf) { const Dual3d outgoing( -ray.m_dir, ray.m_dir - ray.m_rx.m_dir, ray.m_dir - ray.m_ry.m_dir); InputEvaluator input_evaluator(shading_context.get_texture_cache()); material_data.m_bsdf->evaluate_inputs( shading_context, input_evaluator, shading_point); const void* bsdf_data = input_evaluator.data(); BSDFSample sample(shading_point, outgoing); material_data.m_bsdf->sample( sampling_context, bsdf_data, false, false, sample); if (!sample.m_incoming.has_derivatives()) break; // The 3.0 factor is chosen so that ray spread from Lambertian BRDFs is approximately 1. const double spread = max( norm(sample.m_incoming.get_dx()), norm(sample.m_incoming.get_dy())) * 3.0; shading_result.set_main_to_linear_rgb( Color3f(static_cast<float>(spread))); } } } break; case FacingRatio: { const Vector3d& normal = shading_point.get_shading_normal(); const Vector3d& view = shading_point.get_ray().m_dir; const double facing = abs(dot(normal, view)); shading_result.set_main_to_linear_rgb( Color3f(static_cast<float>(facing))); } break; default: assert(false); shading_result.set_main_to_transparent_black_linear_rgba(); break; } }
void add_back_lighting( const InputValues& values, SamplingContext& sampling_context, const PixelContext& pixel_context, const ShadingContext& shading_context, const ShadingPoint& shading_point, Spectrum& radiance, SpectrumStack& aovs) const { const Vector3d& p = shading_point.get_point(); const Vector3d& n = shading_point.get_original_shading_normal(); const Vector3d& d = shading_point.get_ray().m_dir; // Construct a ray perpendicular to the other side of the surface. ShadingRay back_ray(shading_point.get_ray()); back_ray.m_tmax *= norm(d); back_ray.m_dir = dot(d, n) > 0.0 ? -n : n; back_ray.m_org = p - back_ray.m_tmax * back_ray.m_dir; ShadingPoint back_shading_point(shading_point); back_shading_point.set_ray(back_ray); Spectrum back_radiance(0.0f); SpectrumStack back_aovs(aovs.size(), 0.0f); /* #ifdef WITH_OSL // Execute the OSL shader, if we have one. const Material* material = back_shading_point.get_material(); if (material && material->get_osl_surface_shader()) { shading_context.execute_osl_shadergroup( *material->get_osl_surface_shader(), back_shading_point); } #endif */ // Compute back lighting. for (size_t i = 0; i < m_back_lighting_samples; ++i) { shading_context.get_lighting_engine()->compute_lighting( sampling_context, pixel_context, shading_context, back_shading_point, back_radiance, back_aovs); } // Apply translucency factor. back_radiance *= values.m_translucency; back_aovs *= values.m_translucency; // Divide by the number of samples. const float rcp_sample_count = 1.0f / static_cast<float>(m_back_lighting_samples); back_radiance *= rcp_sample_count; back_aovs *= rcp_sample_count; // Add back lighting contribution. radiance += back_radiance; aovs += back_aovs; }
size_t SubsurfaceSampler::sample( SamplingContext& sampling_context, const ShadingPoint& outgoing_point, const BSSRDF& bssrdf, const void* bssrdf_data, SubsurfaceSample samples[], const size_t max_sample_count) { assert(max_sample_count > 0); // Sample the diffusion profile. BSSRDFSample bssrdf_sample(sampling_context); if (!bssrdf.sample(bssrdf_data, bssrdf_sample)) return 0; // Reject points too far away. // This introduces negligible bias in comparison to the other approximations. const Vector2d& point(bssrdf_sample.get_point()); const double radius2 = square_norm(point); const double rmax2 = bssrdf_sample.get_rmax2(); if (radius2 > rmax2) return 0; // Evaluate the PDF of the diffusion profile. const double radius = sqrt(radius2); const double bssrdf_sample_pdf = bssrdf.evaluate_pdf(bssrdf_data, bssrdf_sample.get_channel(), radius); // Pick a sampling basis. sampling_context.split_in_place(1, 1); Axis sampling_axis; Basis3d sampling_basis; double sampling_basis_pdf; pick_sampling_basis( outgoing_point.get_shading_basis(), sampling_context.next_double2(), sampling_axis, sampling_basis, sampling_basis_pdf); // Compute height of sample point on (positive) hemisphere of radius Rmax. assert(rmax2 >= radius2); const double h = sqrt(rmax2 - radius2); // Compute sphere entry and exit points. Vector3d entry_point, exit_point; entry_point = exit_point = outgoing_point.get_point(); entry_point += sampling_basis.transform_to_parent(Vector3d(point[0], +h, point[1])); exit_point += sampling_basis.transform_to_parent(Vector3d(point[0], -h, point[1])); assert(feq(norm(exit_point - entry_point), 2.0 * h, 1.0e-9)); // Build a probe ray inscribed inside the sphere of radius Rmax. ShadingRay probe_ray( entry_point, -sampling_basis.get_normal(), 0.0, 2.0 * h, outgoing_point.get_time(), VisibilityFlags::ProbeRay, outgoing_point.get_ray().m_depth + 1); const Material* material = outgoing_point.get_material(); ShadingPoint shading_points[2]; size_t shading_point_index = 0; ShadingPoint* parent_shading_point = 0; size_t sample_count = 0; // Trace the ray and return all intersections (or up to max_sample_count of them) found inside the sphere. while (true) { // Continue tracing the ray. shading_points[shading_point_index].clear(); if (!m_shading_context.get_intersector().trace( probe_ray, shading_points[shading_point_index], parent_shading_point)) break; // Only consider points lying on surfaces with the same material as the outgoing point. if (shading_points[shading_point_index].get_material() == material) { // Execute the OSL shader if we have one. Needed for bump mapping. #ifdef APPLESEED_WITH_OSL if (material->has_osl_surface()) { sampling_context.split_in_place(1, 1); m_shading_context.execute_osl_bump( *material->get_osl_surface(), shading_points[shading_point_index], sampling_context.next_double2()); } #endif SubsurfaceSample& sample = samples[sample_count++]; sample.m_point = shading_points[shading_point_index]; // Compute sample probability. sample.m_probability = bssrdf_sample_pdf * sampling_basis_pdf * abs(dot(sampling_basis.get_normal(), sample.m_point.get_geometric_normal())); // todo: or shading normal? // Weight sample probability with multiple importance sampling. sample.m_probability /= compute_mis_weight( bssrdf, bssrdf_data, bssrdf_sample.get_channel(), sampling_basis, sampling_axis, sample.m_probability, outgoing_point.get_point(), sample.m_point.get_point(), sample.m_point.get_geometric_normal()); // todo: or shading normal? // Return the relative index of refraction. sample.m_eta = bssrdf_sample.get_eta(); if (sample_count == max_sample_count) break; } // Move the ray's origin past the hit surface. probe_ray.m_org = shading_points[shading_point_index].get_point(); probe_ray.m_tmax = norm(exit_point - probe_ray.m_org); // Swap the current and parent shading points. parent_shading_point = &shading_points[shading_point_index]; shading_point_index = 1 - shading_point_index; } return sample_count; }