MTS_NAMESPACE_BEGIN ref<Bitmap> BidirectionalUtils::renderDirectComponent(Scene *scene, int sceneResID, int sensorResID, RenderQueue *queue, const RenderJob *job, size_t directSamples) { ref<PluginManager> pluginMgr = PluginManager::getInstance(); ref<Scheduler> scheduler = Scheduler::getInstance(); const Film *film = scene->getFilm(); Integrator *integrator = scene->getIntegrator(); /* Render the direct illumination component separately */ ref<Bitmap> directImage = new Bitmap(Bitmap::ERGBA, Bitmap::EFloat32, film->getCropSize()); bool hasMedia = scene->getMedia().size() > 0; size_t pixelSamples = directSamples; Properties integratorProps(hasMedia ? "volpath" : "direct"); if (hasMedia) { /* Render with a volumetric path tracer */ integratorProps.setInteger("maxDepth", 2); } else { /* No participating media-> we can use the 'direct' plugin, which has somewhat better control over where to place shading/pixel samples */ int shadingSamples = 1; while (pixelSamples > 8) { pixelSamples /= 2; shadingSamples *= 2; } integratorProps.setSize("shadingSamples", shadingSamples); } ref<Integrator> directIntegrator = static_cast<Integrator *> (pluginMgr-> createObject(Integrator::m_theClass, integratorProps)); /* Create a low discrepancy sampler instance for every core */ Properties samplerProps("ldsampler"); samplerProps.setSize("sampleCount", pixelSamples); ref<Sampler> ldSampler = static_cast<Sampler *> (pluginMgr-> createObject(Sampler::m_theClass, samplerProps)); ldSampler->configure(); directIntegrator->configure(); directIntegrator->configureSampler(scene, ldSampler); std::vector<SerializableObject *> samplers(scheduler->getCoreCount()); for (size_t i=0; i<scheduler->getCoreCount(); ++i) { ref<Sampler> clonedSampler = ldSampler->clone(); clonedSampler->incRef(); samplers[i] = clonedSampler.get(); } int ldSamplerResID = scheduler->registerMultiResource(samplers); for (size_t i=0; i<scheduler->getCoreCount(); ++i) samplers[i]->decRef(); integrator->incRef(); scene->setIntegrator(directIntegrator); bool success = directIntegrator->render(scene, queue, job, sceneResID, sensorResID, ldSamplerResID); scene->setIntegrator(integrator); integrator->decRef(); scheduler->unregisterResource(ldSamplerResID); if (success) { ref<Bitmap> bitmap = new Bitmap( Bitmap::ESpectrum, Bitmap::EFloat, film->getCropSize()); film->develop(Point2i(0, 0), film->getCropSize(), Point2i(0, 0), bitmap); return bitmap; } else { return NULL; } }
ref<Bitmap> BidirectionalUtils::mltLuminancePass(Scene *scene, int sceneResID, RenderQueue *queue, int sizeFactor, ref<RenderJob> &nestedJob) { ref<PluginManager> pluginMgr = PluginManager::getInstance(); ref<Scheduler> scheduler = Scheduler::getInstance(); Properties integratorProps = scene->getIntegrator()->getProperties(); Vector2i origCropSize = scene->getFilm()->getCropSize(); Vector2i origSize = scene->getFilm()->getSize(); Vector2i reducedSize = Vector2i( std::max(1, origSize.x / sizeFactor), std::max(1, origSize.y / sizeFactor)); Vector2i reducedCropSize = Vector2i( std::max(1, origCropSize.x / sizeFactor), std::max(1, origCropSize.y / sizeFactor)); Point2i reducedCropOffset = scene->getFilm()->getCropOffset()/sizeFactor; size_t sampleCount = scene->getSampler()->getSampleCount(); const Sensor *sensor = scene->getSensor(); Properties filmProps("hdrfilm"); filmProps.setInteger("width", reducedSize.x, false); filmProps.setInteger("height", reducedSize.y, false); filmProps.setInteger("cropWidth", reducedCropSize.x, false); filmProps.setInteger("cropHeight", reducedCropSize.y, false); filmProps.setInteger("cropOffsetX", reducedCropOffset.x, false); filmProps.setInteger("cropOffsetY", reducedCropOffset.x, false); ref<Film> nestedFilm = static_cast<Film *>( pluginMgr->createObject(Film::m_theClass, filmProps)); nestedFilm->configure(); /* Use a higher number of mutations/pixel compared to the second stage */ Properties samplerProps("independent"); samplerProps.setSize("sampleCount", sampleCount * sizeFactor); ref<Sampler> nestedSampler = static_cast<Sampler *>( pluginMgr->createObject(Sampler::m_theClass, samplerProps)); nestedSampler->configure(); std::vector<SerializableObject *> samplers(scheduler->getCoreCount()); for (size_t i=0; i<scheduler->getCoreCount(); ++i) { ref<Sampler> clonedSampler = nestedSampler->clone(); clonedSampler->incRef(); samplers[i] = clonedSampler.get(); } int nestedSamplerResID = scheduler->registerMultiResource(samplers); for (size_t i=0; i<scheduler->getCoreCount(); ++i) samplers[i]->decRef(); /* Configure the sensor */ Properties sensorProps = sensor->getProperties(); ref<Sensor> nestedSensor = static_cast<Sensor *> (pluginMgr->createObject(Sensor::m_theClass, sensorProps)); nestedSensor->addChild(nestedSampler); nestedSensor->addChild(nestedFilm); nestedSensor->configure(); int nestedSensorResID = scheduler->registerResource(nestedSensor); integratorProps.setBoolean("firstStage", true, false); ref<Integrator> nestedIntegrator = static_cast<Integrator *> (pluginMgr-> createObject(Integrator::m_theClass, integratorProps)); ref<Scene> nestedScene = new Scene(scene); nestedScene->setSensor(nestedSensor); nestedScene->setIntegrator(nestedIntegrator); nestedScene->configure(); nestedScene->initialize(); nestedJob = new RenderJob("mlti", nestedScene, queue, sceneResID, nestedSensorResID, nestedSamplerResID); nestedJob->start(); if (!nestedJob->wait()) { nestedJob = NULL; scheduler->unregisterResource(nestedSensorResID); scheduler->unregisterResource(nestedSamplerResID); return NULL; } nestedJob = NULL; scheduler->unregisterResource(nestedSensorResID); scheduler->unregisterResource(nestedSamplerResID); /* Instantiate a Gaussian reconstruction filter */ ref<ReconstructionFilter> rfilter = static_cast<ReconstructionFilter *> ( PluginManager::getInstance()->createObject( MTS_CLASS(ReconstructionFilter), Properties("gaussian"))); rfilter->configure(); /* Develop the rendered image into a luminance bitmap */ ref<Bitmap> luminanceMap = new Bitmap(Bitmap::ELuminance, Bitmap::EFloat, reducedCropSize); nestedFilm->develop(Point2i(0, 0), reducedCropSize, Point2i(0, 0), luminanceMap); /* Up-sample the low resolution luminance map */ luminanceMap = luminanceMap->resample(rfilter, ReconstructionFilter::EClamp, ReconstructionFilter::EClamp, origCropSize, 0.0f, std::numeric_limits<Float>::infinity()); return luminanceMap; }
void RenderSettingsDialog::apply(SceneContext *ctx) { Scene *scene = new Scene(ctx->scene); ref<Sensor> oldSensor = scene->getSensor(); Film *oldFilm = oldSensor->getFilm(); Properties filmProps = oldSensor->getFilm()->getProperties(); ref<PluginManager> pluginMgr = PluginManager::getInstance(); /* Temporarily set up a new file resolver */ ref<Thread> thread = Thread::getThread(); ref<FileResolver> oldResolver = thread->getFileResolver(); ref<FileResolver> newResolver = oldResolver->clone(); newResolver->prependPath(fs::absolute(scene->getSourceFile()).parent_path()); thread->setFileResolver(newResolver); /* Configure the reconstruction filter */ Properties rFilterProps(getPluginName(ui->rFilterBox)); if (m_rFilterNode != NULL) m_rFilterNode->putProperties(rFilterProps); ref<ReconstructionFilter> rFilter = static_cast<ReconstructionFilter *> (pluginMgr->createObject(MTS_CLASS(ReconstructionFilter), rFilterProps)); rFilter->configure(); /* Configure the sampler */ Properties samplerProps(getPluginName(ui->samplerBox)); if (m_samplerNode != NULL) m_samplerNode->putProperties(samplerProps); ref<Sampler> sampler = static_cast<Sampler *> (pluginMgr->createObject(MTS_CLASS(Sampler), samplerProps)); sampler->configure(); /* Configure the integrator */ Properties integratorProps(getPluginName(ui->integratorBox)); if (m_integratorNode != NULL) m_integratorNode->putProperties(integratorProps); ref<Integrator> integrator = static_cast<Integrator *> (pluginMgr->createObject(MTS_CLASS(Integrator), integratorProps)); integrator->configure(); if (ui->icBox->isChecked()) { Properties icProps("irrcache"); if (m_icNode != NULL) m_icNode->putProperties(icProps); ref<Integrator> ic = static_cast<Integrator *> (pluginMgr->createObject(MTS_CLASS(Integrator), icProps)); ic->addChild(integrator); ic->configure(); integrator = ic; } if (ui->aiBox->isChecked()) { Properties aiProps("adaptive"); if (m_aiNode != NULL) m_aiNode->putProperties(aiProps); ref<Integrator> ai = static_cast<Integrator *> (pluginMgr->createObject(MTS_CLASS(Integrator), aiProps)); ai->addChild(integrator); ai->configure(); integrator = ai; } QStringList resolution = ui->resolutionBox->currentText().split('x'); SAssert(resolution.size() == 2); Vector2i cropSize( std::max(1, resolution[0].toInt()), std::max(1, resolution[1].toInt())); /* Configure the film */ Vector2i oldSize = oldFilm->getSize(); Vector2i oldCropSize = oldFilm->getCropSize(); Point2i oldCropOffset = oldFilm->getCropOffset(); Vector2i size(math::roundToInt((oldSize.x * cropSize.x / (Float) oldCropSize.x)), math::roundToInt((oldSize.y * cropSize.y / (Float) oldCropSize.y))); Point2i cropOffset(math::roundToInt((oldCropOffset.x * cropSize.x / (Float) oldCropSize.x)), math::roundToInt((oldCropOffset.y * cropSize.y / (Float) oldCropSize.y))); filmProps.setInteger("width", size.x, false); filmProps.setInteger("height", size.y, false); /* g-pt and g-bdpt only work with multifilm. */ if (getPluginName(ui->integratorBox) == "gbdpt" || getPluginName(ui->integratorBox) == "gpt") filmProps.setPluginName("multifilm"); else filmProps.setPluginName("hdrfilm"); if (size.x != cropSize.x || size.y != cropSize.y || cropOffset.x != 0 || cropOffset.y != 0) { filmProps.setInteger("cropWidth", cropSize.x, false); filmProps.setInteger("cropHeight", cropSize.y, false); filmProps.setInteger("cropOffsetX", cropOffset.x, false); filmProps.setInteger("cropOffsetY", cropOffset.y, false); } else { filmProps.removeProperty("cropWidth"); filmProps.removeProperty("cropHeight"); filmProps.removeProperty("cropOffsetX"); filmProps.removeProperty("cropOffsetY"); } ctx->originalSize = cropSize; ref<Film> film = static_cast<Film *> (pluginMgr->createObject( MTS_CLASS(Film), filmProps)); film->addChild(rFilter); film->configure(); if (cropSize.x != ctx->framebuffer->getWidth() || cropSize.y != ctx->framebuffer->getHeight()) { ctx->framebuffer = new Bitmap(Bitmap::ERGBA, Bitmap::EFloat32, cropSize); ctx->framebuffer->clear(); ctx->mode = EPreview; } /* Configure the sensor */ Properties sensorProps = oldSensor->getProperties(); if (oldSensor->getClass()->derivesFrom(MTS_CLASS(PerspectiveCamera))) { sensorProps.removeProperty("focalLength"); sensorProps.setString("fovAxis", "y", false); sensorProps.setFloat("fov", static_cast<const PerspectiveCamera *>(oldSensor.get())->getYFov(), false); } ref<Sensor> newSensor = static_cast<Sensor *> (pluginMgr->createObject(MTS_CLASS(Sensor), sensorProps)); newSensor->addChild(sampler); newSensor->addChild(film); newSensor->setMedium(oldSensor->getMedium()); newSensor->configure(); /* Update the scene with the newly constructed elements */ scene->removeSensor(oldSensor); scene->addSensor(newSensor); scene->setSensor(newSensor); scene->setSampler(sampler); scene->setIntegrator(integrator); scene->configure(); ctx->scene = scene; thread->setFileResolver(oldResolver); }