Eigen::Matrix4f DX11CameraTrackingMultiRes::align(ID3D11DeviceContext* context, ID3D11ShaderResourceView* inputSRV, ID3D11ShaderResourceView* inputNormalsSRV, ID3D11ShaderResourceView* inputColorsSRV, ID3D11ShaderResourceView* modelSRV, ID3D11ShaderResourceView* modelNormalsSRV, ID3D11ShaderResourceView* modelColorsSRV, Eigen::Matrix4f& deltaTransform, unsigned int level, unsigned int maxInnerIter, unsigned maxOuterIter, float distThres, float normalThres, float condThres, float angleThres, float earlyOut, ICPErrorLog* errorLog) { float lastICPError = -1.0f; for(unsigned int i = 0; i<maxOuterIter; i++) { D3DXVECTOR3 mean; float meanStDev; float nValidCorres; LinearSystemConfidence currConfWiReject; LinearSystemConfidence currConfNoReject; if (errorLog) { //run ICP without correspondence rejection (must be run before because it needs the old delta transform) float dThresh = 1000.0f; float nThresh = 0.0f; computeCorrespondences(context, inputSRV, inputNormalsSRV, inputColorsSRV, modelSRV, modelNormalsSRV, modelColorsSRV, mean, meanStDev, nValidCorres, deltaTransform, level, dThresh, nThresh); computeBestRigidAlignment(context, inputSRV, inputNormalsSRV, mean, meanStDev, nValidCorres, deltaTransform, level, maxInnerIter, condThres, angleThres, currConfNoReject); errorLog->addCurrentICPIteration(currConfNoReject, level); } //standard correspondence search and alignment computeCorrespondences(context, inputSRV, inputNormalsSRV, inputColorsSRV, modelSRV, modelNormalsSRV, modelColorsSRV, mean, meanStDev, nValidCorres, deltaTransform, level, distThres, normalThres); deltaTransform = computeBestRigidAlignment(context, inputSRV, inputNormalsSRV, mean, meanStDev, nValidCorres, deltaTransform, level, maxInnerIter, condThres, angleThres, currConfWiReject); if (std::abs(lastICPError - currConfWiReject.sumRegError) < earlyOut) { //std::cout << "ICP aboarted because no further convergence... " << i << std::endl; break; } lastICPError = currConfWiReject.sumRegError; //std::cout << currConfWiReject.numCorr << std::endl; //std::cout << "i " << i << std::endl; //currConf.print(); //if(level == 0) std::cout << deltaTransform << std::endl; //deltaTransform.setIdentity(); /*if(deltaTransform(0, 0) == -std::numeric_limits<float>::infinity()) { return m_matrixTrackingLost; }*/ } return deltaTransform; }
void AutoBlend::doBlend() { auto selected = gallery->getSelected(); if (selected.size() < 2) return; for(auto t : results->items) t->deleteLater(); results->items.clear(); ((GraphicsScene*)scene())->showPopup("Please wait.."); for(int shapeI = 0; shapeI < selected.size(); shapeI++) { for(int shapeJ = shapeI + 1; shapeJ < selected.size(); shapeJ++) { //auto sourceName = selected.front()->data["targetName"].toString(); //auto targetName = selected.back()->data["targetName"].toString(); auto sourceName = selected[shapeI]->data.value("targetName").toString(); auto targetName = selected[shapeJ]->data.value("targetName").toString(); auto cacheSource = document->cacheModel(sourceName); auto cacheTarget = document->cacheModel(targetName); if(cacheSource == nullptr || cacheTarget == nullptr) continue; auto source = QSharedPointer<Structure::Graph>(cacheSource->cloneAsShapeGraph()); auto target = QSharedPointer<Structure::Graph>(cacheTarget->cloneAsShapeGraph()); auto gcorr = QSharedPointer<GraphCorresponder>(new GraphCorresponder(source.data(), target.data())); // Apply computed correspondence //if (false) // enable/disable auto correspondence { QVector<QPair<QString, QString> > all_pairs; for(auto n : source->nodes) { if (!document->datasetCorr[sourceName][n->id][targetName].empty()) { for(auto nj : document->datasetCorr[sourceName][n->id][targetName]) { all_pairs << qMakePair(n->id, nj); } } } ResolveCorrespondence(source.data(), target.data(), all_pairs, gcorr.data()); } gcorr->computeCorrespondences(); // Schedule blending sequence auto scheduler = QSharedPointer<Scheduler>(new Scheduler); auto blender = QSharedPointer<TopoBlender>(new TopoBlender(gcorr.data(), scheduler.data())); // Sample geometries int numSamples = 100; int reconLevel = 4; int LOD = widget->levelDetails->currentIndex(); switch (LOD){ case 0: numSamples = 100; reconLevel = 4; break; case 1: numSamples = 1000; reconLevel = 5; break; case 2: numSamples = 10000; reconLevel = 7; break; } /// Visualize schedule: if (false) { blender->parentWidget = new QMainWindow(); blender->parentWidget->show(); blender->setupUI(); QStringList corr; for(auto n : scheduler->activeGraph->nodes){ corr << QString("%1-%2").arg(n->id, n->property["correspond"].toString()); } QMessageBox::information(blender->parentWidget, "Correspondence", corr.join("\n")); } auto synthManager = QSharedPointer<SynthesisManager>(new SynthesisManager(gcorr.data(), scheduler.data(), blender.data(), numSamples)); synthManager->genSynData(); // Compute blending scheduler->timeStep = 1.0 / 100.0; scheduler->defaultSchedule(); scheduler->executeAll(); int numResults = widget->count->value(); for (int i = 0; i < numResults; i++) { double a = ((double(i) / (numResults - 1)) * 0.9) + 0.05; auto blendedModel = scheduler->allGraphs[a * (scheduler->allGraphs.size() - 1)]; synthManager->renderGraph(*blendedModel, "", false, reconLevel ); auto t = results->addTextItem(""); t->setCamera(cameraPos, cameraMatrix); QVariantMap data; data["name"] = QString("%1_%2").arg(sourceName).arg(targetName); t->setData(data); // Add parts of target shape for (auto n : blendedModel->nodes){ t->addAuxMesh(toBasicMesh(blendedModel->getMesh(n->id), n->vis_property["color"].value<QColor>())); } } } } ((GraphicsScene*)scene())->hidePopup(); }