ColorRGB *ProceduralTexture::cobblestone(int stoneSize, int stoneNoise, ColorHSV color, int colorRange, float edgeIntensity, int edgeSize, float edgeOpacity, float edgeSmooth, int stoneLayers, float smoothness, float stoneBrightness, bool createNormalMap){ unsigned char *ct = generateCelluarTexture(stoneSize); gaussianBlur(ct, edgeSmooth); brightnessContrast(ct, 0, edgeIntensity); unsigned char *pn = generatePerlinNoise(stoneNoise); postEffect(pn, stoneLayers, smoothness); brightnessContrast(pn, stoneBrightness, 1); mix(pn, ct, edgeSize, edgeOpacity); delete[] ct; if (createNormalMap) generateNormalMap(pn); ColorRGB *cct = generateCelluarTexture(stoneSize, color, colorRange); mix(cct, pn); delete[] pn; return cct; }
// pbrt划分渲染队列独立任务的渲染管线 // |Task.run()| |Task.run()| ... |Task.run()| // ------------------------------------------ // Task.run() // sample = RandomSampler.getSubSampler() // ray = Camera.generateRayDiffererntial(sample) // intersection = Scene.Intersect(ray) // spectrum = Intergrator.Li(intersection) // File.AddSample(spectrum, sample) // Reporter.Update() void a3SamplerRenderer::render(const a3Scene* scene) { if(!begin()) return; int imageWidth = camera->image->width; int imageHeight = camera->image->height; #pragma omp parallel for schedule(dynamic) for(int x = 0; x < imageWidth; x++) { a3Log::info("Spp:%d Rendering: %8.2f \r", spp, (double) x / imageWidth * 100); for(int y = 0; y < imageHeight; y++) { a3Spectrum color; // 当前采样位置 a3CameraSample sampleTentFilter, sample; for(int z = 0; z < spp; z++) { // 获取下一个采样位置 sampler->getMoreSamples(x, y, &sample, &sampleTentFilter); // memory allocating a3Ray ray; // 生成光线 camera->castRay(&sampleTentFilter, &ray); color += integrator->li(ray, *scene) / spp; } // 临时空间中setColor colorList[x + y * imageWidth] = color; } } a3Log::print("\n"); t3Timer timer; timer.start(); // 后期特效处理 postEffect(); timer.end(); a3Log::info("Post Effec Cost: %f\n", timer.difference()); // 保存真实渲染图像文件 camera->image->write(); end(); }