void Helper::loadKeyPointsFromFile(const char* nativePath, std::vector<cv::KeyPoint> &keypoints) { cv::FileStorage fs(nativePath, cv::FileStorage::READ); const cv::FileNode keypointsNode = fs["keypoints"]; cv::read(keypointsNode ,keypoints); fs.release(); }
// ----------------------------------------------------------------------------- // // Purpose and Method: // Inputs: // Outputs: // Dependencies: // Restrictions and Caveats: // // ----------------------------------------------------------------------------- cv::Mat Affine2DFactorizedProblem::computeResidual ( cv::Mat& image, cv::Mat& params ) { cv::Mat template_coords; std::vector<int> ctrl_coords_indices; std::vector<LineIndices> ctrl_coords_lines; cv::Mat warped_image, warped_image_gray; cv::Mat residual; assert(params.rows == 6); assert(params.cols == 1); m_object_model->getReferenceCoords(template_coords); warped_image = m_motion_model->warpImage(image, params, template_coords, ctrl_coords_indices); warped_image_gray = cv::Mat::zeros(warped_image.rows, warped_image_gray.cols, cv::DataType<uchar>::type); if (warped_image.channels() == 3) { cvtColor(warped_image, warped_image_gray, CV_RGB2GRAY); } else { warped_image_gray = warped_image.clone(); } cv::Mat features_vector = m_object_model->extractFeaturesFromWarpedImage(warped_image_gray); cv::Mat template_features_vector = m_object_model->computeTemplateFeatures(params); residual = features_vector - template_features_vector; #ifdef DEBUG_ // Show the warped image cv::namedWindow("warped image"); cv::imshow("warped image", warped_image_gray); cv::namedWindow("template"); cv::Mat template_uchar; template_features_vector.reshape(1, warped_image.rows).convertTo(template_uchar, cv::DataType<uchar>::type); cv::imshow("template", template_uchar); cv::namedWindow("residual"); cv::Mat residual_uchar; residual.convertTo(residual_uchar, cv::DataType<uchar>::type); cv::imshow("residual", residual_uchar.reshape(1, warped_image.rows)); // write Mat objects to the file cv::FileStorage fs("Affine2DFactorizedProblem_computeResidual.xml", cv::FileStorage::WRITE); fs << "template_features_vector" << template_features_vector; fs << "features_vector" << features_vector; fs << "params" << params; fs << "residual" << residual; fs.release(); // cv::imwrite("Affine2DFactorizedProblem_computeResidual_warped_image.bmp", warped_image_gray); #endif return residual; //residual_float; };
void saveMat(Mat mat, string filename) { FileStorage fs(ofToDataPath(filename), FileStorage::WRITE); fs << "Mat" << mat; }
bool UNICOREClient::submit(const JobDescription& jobdesc, XMLNode& id, bool delegate) { std::string faultstring; logger.msg(INFO, "Creating and sending request"); // Create job request /* bes-factory:CreateActivity bes-factory:ActivityDocument jsdl:JobDefinition */ PayloadSOAP req(unicore_ns); XMLNode op = req.NewChild("bes-factory:CreateActivity"); XMLNode act_doc = op.NewChild("bes-factory:ActivityDocument"); set_bes_factory_action(req, "CreateActivity"); WSAHeader(req).To(rurl.str()); //XMLNode proxyHeader = req.Header().NewChild("u6:Proxy"); if (true) { std::string pem_str; std::ifstream proxy_file(proxyPath.c_str()/*, ifstream::in*/); std::getline<char>(proxy_file, pem_str, 0); req.Header().NewChild("u6:Proxy") = pem_str; //std::cout << "\n----\n" << "pem_str = " << pem_str << "\n----\n"; //debug code, remove! } //std::string jsdl_str; //std::getline<char>(jsdl_file, jsdl_str, 0); std::string jsdl_str; if (!jobdesc.UnParse(jsdl_str, "nordugrid:jsdl")) { logger.msg(INFO, "Unable to submit job. Job description is not valid in the %s format", "nordugrid:jsdl"); return false; } XMLNode jsdl_doc = act_doc.NewChild(XMLNode(jsdl_str)); //std::cout << "\n----\n" << jsdl_str << "\n----\n"; //Debug line to verify the activity document jsdl_doc.Namespaces(unicore_ns); // Unify namespaces PayloadSOAP *resp = NULL; XMLNode ds = act_doc["jsdl:JobDefinition"]["jsdl:JobDescription"]["jsdl:DataStaging"]; for (; (bool)ds; ds = ds[1]) { // FilesystemName - ignore // CreationFlag - ignore // DeleteOnTermination - ignore XMLNode source = ds["jsdl:Source"]; XMLNode target = ds["jsdl:Target"]; if ((bool)source) { std::string s_name = ds["jsdl:FileName"]; if (!s_name.empty()) { XMLNode x_url = source["jsdl:URI"]; std::string s_url = x_url; if (s_url.empty()) s_url = "./" + s_name; else { URL u_url(s_url); if (!u_url) { if (s_url[0] != '/') s_url = "./" + s_url; } else { if (u_url.Protocol() == "file") { s_url = u_url.Path(); if (s_url[0] != '/') s_url = "./" + s_url; } else s_url.resize(0); } } if (!s_url.empty()) x_url.Destroy(); } } } act_doc.GetXML(jsdl_str); logger.msg(DEBUG, "Job description to be sent: %s", jsdl_str); // Try to figure out which credentials are used // TODO: Method used is unstable beacuse it assumes some predefined // structure of configuration file. Maybe there should be some // special methods of ClientTCP class introduced. std::string deleg_cert; std::string deleg_key; if (delegate) { client->Load(); // Make sure chain is ready XMLNode tls_cfg = find_xml_node((client->GetConfig())["Chain"], "Component", "name", "tls.client"); if (tls_cfg) { deleg_cert = (std::string)(tls_cfg["ProxyPath"]); if (deleg_cert.empty()) { deleg_cert = (std::string)(tls_cfg["CertificatePath"]); deleg_key = (std::string)(tls_cfg["KeyPath"]); } else deleg_key = deleg_cert; } if (deleg_cert.empty() || deleg_key.empty()) { logger.msg(ERROR, "Failed to find delegation credentials in " "client configuration"); return false; } } // Send job request + delegation if (client) { if (delegate) { DelegationProviderSOAP deleg(deleg_cert, deleg_key); logger.msg(INFO, "Initiating delegation procedure"); if (!deleg.DelegateCredentialsInit(*(client->GetEntry()), &(client->GetContext()))) { logger.msg(ERROR, "Failed to initiate delegation"); return false; } deleg.DelegatedToken(op); } MCC_Status status = client->process("http://schemas.ggf.org/bes/2006/08/bes-factory/" "BESFactoryPortType/CreateActivity", &req, &resp); if (!status) { logger.msg(ERROR, "Submission request failed"); return false; } if (resp == NULL) { logger.msg(VERBOSE, "There was no SOAP response"); return false; } } else if (client_entry) { Message reqmsg; Message repmsg; MessageAttributes attributes_req; attributes_req.set("SOAP:ACTION", "http://schemas.ggf.org/bes/2006/08/" "bes-factory/BESFactoryPortType/CreateActivity"); MessageAttributes attributes_rep; MessageContext context; if (delegate) { DelegationProviderSOAP deleg(deleg_cert, deleg_key); logger.msg(INFO, "Initiating delegation procedure"); if (!deleg.DelegateCredentialsInit(*client_entry, &context)) { logger.msg(ERROR, "Failed to initiate delegation"); return false; } deleg.DelegatedToken(op); } reqmsg.Payload(&req); reqmsg.Attributes(&attributes_req); reqmsg.Context(&context); repmsg.Attributes(&attributes_rep); repmsg.Context(&context); MCC_Status status = client_entry->process(reqmsg, repmsg); if (!status) { logger.msg(ERROR, "Submission request failed"); return false; } logger.msg(INFO, "Submission request succeed"); if (repmsg.Payload() == NULL) { logger.msg(VERBOSE, "There was no response to a submission request"); return false; } try { resp = dynamic_cast<PayloadSOAP*>(repmsg.Payload()); } catch (std::exception&) {} if (resp == NULL) { logger.msg(ERROR, "A response to a submission request was not " "a SOAP message"); delete repmsg.Payload(); return false; } } else { logger.msg(ERROR, "There is no connection chain configured"); return false; } //XMLNode id; SOAPFault fs(*resp); if (!fs) { (*resp)["CreateActivityResponse"]["ActivityIdentifier"].New(id); //id.GetDoc(jobid); //std::cout << "\n---\nActivityIdentifier:\n" << (std::string)((*resp)["CreateActivityResponse"]["ActivityIdentifier"]) << "\n---\n";//debug code delete resp; UNICOREClient luc((std::string)id["Address"], client_config); //local unicore client //std::cout << "\n---\nid element containing (?) Job Address:\n" << (std::string)id << "\n---\n";//debug code return luc.uasStartJob(); //return true; } else { faultstring = fs.Reason(); std::string s; resp->GetXML(s); delete resp; logger.msg(DEBUG, "Submission returned failure: %s", s); logger.msg(ERROR, "Submission failed, service returned: %s", faultstring); return false; } }
void CameraProjectorCalibration::loadExtrinsics(string filename, bool absolute) { cv::FileStorage fs(ofToDataPath(filename, absolute), cv::FileStorage::READ); fs["Rotation_Vector"] >> rotCamToProj; fs["Translation_Vector"] >> transCamToProj; }
CV_WRAP void read( const String& fileName ) { FileStorage fs(fileName, FileStorage::READ); wrapped->read(fs.root()); }
inline static bool is_readable(const std::string& filename) { std::ifstream fs(filename.c_str()); const bool fail = fs.fail(); fs.close(); return !fail; }
InputWidget::InputWidget(QWidget *parent) : AbstractItemView(parent), _networkId(0) { ui.setupUi(this); connect(ui.ownNick, SIGNAL(activated(QString)), this, SLOT(changeNick(QString))); layout()->setAlignment(ui.ownNick, Qt::AlignBottom); layout()->setAlignment(ui.inputEdit, Qt::AlignBottom); layout()->setAlignment(ui.showStyleButton, Qt::AlignBottom); layout()->setAlignment(ui.styleFrame, Qt::AlignBottom); ui.styleFrame->setVisible(false); setFocusProxy(ui.inputEdit); ui.ownNick->setFocusProxy(ui.inputEdit); ui.ownNick->setSizeAdjustPolicy(QComboBox::AdjustToContents); ui.ownNick->installEventFilter(new MouseWheelFilter(this)); ui.inputEdit->installEventFilter(this); ui.inputEdit->setMinHeight(1); ui.inputEdit->setMaxHeight(5); ui.inputEdit->setMode(MultiLineEdit::MultiLine); ui.inputEdit->setPasteProtectionEnabled(true); ui.boldButton->setIcon(QIcon::fromTheme("format-text-bold")); ui.italicButton->setIcon(QIcon::fromTheme("format-text-italic")); ui.underlineButton->setIcon(QIcon::fromTheme("format-text-underline")); ui.textcolorButton->setIcon(QIcon::fromTheme("format-text-color")); ui.highlightcolorButton->setIcon(QIcon::fromTheme("format-fill-color")); ui.encryptionIconLabel->hide(); _colorMenu = new QMenu(); _colorFillMenu = new QMenu(); QStringList names; names << tr("White") << tr("Black") << tr("Dark blue") << tr("Dark green") << tr("Red") << tr("Dark red") << tr("Dark magenta") << tr("Orange") << tr("Yellow") << tr("Green") << tr("Dark cyan") << tr("Cyan") << tr("Blue") << tr("Magenta") << tr("Dark gray") << tr("Light gray"); QPixmap pix(16, 16); for (int i = 0; i < inputLine()->mircColorMap().count(); i++) { pix.fill(inputLine()->mircColorMap().values()[i]); _colorMenu->addAction(pix, names[i])->setData(inputLine()->mircColorMap().keys()[i]); _colorFillMenu->addAction(pix, names[i])->setData(inputLine()->mircColorMap().keys()[i]); } pix.fill(Qt::transparent); _colorMenu->addAction(pix, tr("Clear Color"))->setData(""); _colorFillMenu->addAction(pix, tr("Clear Color"))->setData(""); ui.textcolorButton->setMenu(_colorMenu); connect(_colorMenu, SIGNAL(triggered(QAction *)), this, SLOT(colorChosen(QAction *))); ui.highlightcolorButton->setMenu(_colorFillMenu); connect(_colorFillMenu, SIGNAL(triggered(QAction *)), this, SLOT(colorHighlightChosen(QAction *))); new TabCompleter(ui.inputEdit); UiStyleSettings fs("Fonts"); fs.notify("UseCustomInputWidgetFont", this, SLOT(setUseCustomFont(QVariant))); fs.notify("InputWidget", this, SLOT(setCustomFont(QVariant))); if (fs.value("UseCustomInputWidgetFont", false).toBool()) setCustomFont(fs.value("InputWidget", QFont())); UiSettings s("InputWidget"); #ifdef HAVE_KDE4 s.notify("EnableSpellCheck", this, SLOT(setEnableSpellCheck(QVariant))); setEnableSpellCheck(s.value("EnableSpellCheck", false)); #endif s.notify("EnableEmacsMode", this, SLOT(setEnableEmacsMode(QVariant))); setEnableEmacsMode(s.value("EnableEmacsMode", false)); s.notify("ShowNickSelector", this, SLOT(setShowNickSelector(QVariant))); setShowNickSelector(s.value("ShowNickSelector", true)); s.notify("ShowStyleButtons", this, SLOT(setShowStyleButtons(QVariant))); setShowStyleButtons(s.value("ShowStyleButtons", true)); s.notify("EnablePerChatHistory", this, SLOT(setEnablePerChatHistory(QVariant))); setEnablePerChatHistory(s.value("EnablePerChatHistory", true)); s.notify("MaxNumLines", this, SLOT(setMaxLines(QVariant))); setMaxLines(s.value("MaxNumLines", 5)); s.notify("EnableScrollBars", this, SLOT(setScrollBarsEnabled(QVariant))); setScrollBarsEnabled(s.value("EnableScrollBars", true)); s.notify("EnableLineWrap", this, SLOT(setLineWrapEnabled(QVariant))); setLineWrapEnabled(s.value("EnableLineWrap", true)); s.notify("EnableMultiLine", this, SLOT(setMultiLineEnabled(QVariant))); setMultiLineEnabled(s.value("EnableMultiLine", true)); ActionCollection *coll = QtUi::actionCollection(); Action *activateInputline = coll->add<Action>("FocusInputLine"); connect(activateInputline, SIGNAL(triggered()), SLOT(setFocus())); activateInputline->setText(tr("Focus Input Line")); activateInputline->setShortcut(QKeySequence(Qt::CTRL + Qt::Key_L)); connect(inputLine(), SIGNAL(textEntered(QString)), SLOT(onTextEntered(QString)), Qt::QueuedConnection); // make sure the line is already reset, bug #984 connect(inputLine(), SIGNAL(currentCharFormatChanged(QTextCharFormat)), this, SLOT(currentCharFormatChanged(QTextCharFormat))); }
//Training inline void BoW::create_histograms(int N_cent, const string path_run_folders) { //Hacer para todas las personas, luego en la parte de entrenamiento no se usan todos //Step 2 - Obtain the BoF descriptor for given image/video frame. //prepare BOW descriptor extractor from the dictionary cv::Mat dictionary; std::stringstream name_vocabulary; name_vocabulary << "./run"<< run <<"/visual_vocabulary/means_Ng" << N_cent << "_dim" <<dim << "_all_sc" << ".yml"; cout << name_vocabulary.str() << endl; cv::FileStorage fs(name_vocabulary.str(), cv::FileStorage::READ); fs["vocabulary"] >> dictionary; fs.release(); //cout << "Loaded" << endl; int rows_dic = dictionary.rows; int cols_dic = dictionary.cols; //cout << "OpenCV Dict rows & cols " << rows_dic << " & " << cols_dic << endl; vec hist; for (uword pe=0; pe<peo_train.n_rows; ++pe) { for (uword act = 0 ; act < actions.n_rows; ++act) { for (uword sc = 1 ; sc <= 4; ++sc) { mat mat_features_video_i; std::stringstream ssName_feat_video; //ssName_feat_video << "./run"<< run <<"/features/train/feat_vec" << peo_train(pe) << "_" << actions(act) << "_d" << sc; ssName_feat_video << path_run_folders <<"/features_all_nor/feat_vec_" << peo_train(pe) << "_" << actions(act) << "_d" << sc; //cout << ssName_feat_video.str() << endl; mat_features_video_i.load( ssName_feat_video.str() ); fmat f_mat_features_video_i = conv_to< fmat >::from(mat_features_video_i); mat_features_video_i.reset(); cv::Mat features_video_i_OpenCV(f_mat_features_video_i.n_cols, dim, CV_32FC1, f_mat_features_video_i.memptr() ); int rows = features_video_i_OpenCV.rows; int cols = features_video_i_OpenCV.cols; //cout << "Features rows & cols " << rows << " & " << cols << endl; // init the matcher with you pre-trained codebook cv::Ptr<cv::DescriptorMatcher > matcher = new cv::BFMatcher(cv::NORM_L2); matcher->add(std::vector<cv::Mat>(1, dictionary)); // matches std::vector<cv::DMatch> matches; matcher->match(features_video_i_OpenCV,matches); //cout << matches.size() << endl; //Mira aqui: http://ttic.uchicago.edu/~mostajabi/Tutorial.html hist.zeros(N_cent) ; for (int i=0; i< matches.size(); ++i) { //cout << matches[i].trainIdx << " " ; int bin = matches[i].trainIdx ; hist(bin)++; } //getchar(); //cout << hist.t() << endl; hist = hist/hist.max(); //cout << hist.n_elem << endl; std::stringstream ssName_hist; ssName_hist << "./run"<<run << "/Histograms_BoW_OpenCV/hist_" << peo_train(pe) << "_" << actions(act) << "_d" << sc << "_Ng"<< N_cent << ".h5"; hist.save(ssName_hist.str(), hdf5_binary); } } } }
bool TextContent::writeToEditCtrl() { if (endoffile_) { return false; } if (textctrl_ == NULL) { return false; } if (position_ > kMaxLength) { endoffile_ = true; return false; } //textctrl_->Clear(); wxFile fs (filepath_); if (!fs.IsOpened()) { if (position_ == 0) { textctrl_->WriteText (msg_no_content); } return false; } fs.Seek (position_); if (fs.Eof()) { endoffile_ = true; return false; } char bbuf[2] = {0,0}; char buf[1000]; ssize_t rcount = 0; wxString line; bool wasLF = false; bool wasCR = false; int numread = 0; int linecount = 0; int cl = 0; wxString tx; if (binary_) do { rcount = fs.Read (bbuf, 1); if (fs.Eof()) { endoffile_ = true; } if (rcount < 1) { endoffile_ = true; } else { if (cl == 0) { textctrl_->WriteText (getFixString (position_ + numread)); textctrl_->WriteText (L" "); textctrl_->WriteText (getHex (bbuf[0])); textctrl_->WriteText (L" "); } else if (cl == 7) { textctrl_->WriteText (getHex (bbuf[0])); textctrl_->WriteText (L" "); } else { textctrl_->WriteText (getHex (bbuf[0])); textctrl_->WriteText (L" "); } if (bbuf[0] < 32 || bbuf[0] > 126) { tx += L"."; } else { tx += (wchar_t)bbuf[0]; } ++cl; if (cl == 16 || endoffile_) { for (int i = 0; i < 16 - cl; ++i) { textctrl_->WriteText (L" "); if (i == 7) { textctrl_->WriteText (L" "); } } cl = 0; textctrl_->WriteText (L" "); textctrl_->WriteText (tx); textctrl_->WriteText (L"\n"); ++linecount; tx.erase(); } ++numread; } if (linecount > kMaxLineCountPerReadBinary || numread >= kMaxLengthPerRead || endoffile_) { position_ += numread; break; } } while (true); else do { rcount = fs.Read (buf, 1000); for (int ix = 0; ix < rcount; ++ix) { ++numread; if (linecount <= kMaxLineCountPerRead && numread <= kMaxLengthPerRead) { if (buf[ix] == 10) { if (!wasCR) { line += L'\n'; textctrl_->WriteText (line); line.erase(); ++linecount; } wasLF = true; wasCR = false; } else if (buf[ix] == 13) { line += L'\n'; textctrl_->WriteText (line); line.erase(); ++linecount; wasLF = false; wasCR = true; } else if (buf[ix] == 9) { line += L" "; wasLF = false; wasCR = false; } else if (buf[ix] < 32 || buf[ix] > 126) { line += L'.'; wasLF = false; wasCR = false; } else { wchar_t wc = buf[ix]; line += wc; wasLF = false; wasCR = false; } if (line.size() > 2000) { line += L'\n'; textctrl_->WriteText (line); line.erase(); ++linecount; } } else { textctrl_->WriteText (line); break; } } if (fs.Eof()) { endoffile_ = true; } if (linecount > kMaxLineCountPerRead || numread <= kMaxLengthPerRead || endoffile_) { position_ += numread; break; } } while (true); // restore text cursor //textctrl_->SetInsertionPoint (oldInsPt); return true; }
//-------------------------------------------------------------------------------------------------- int main(int argc, char** argv) { // Read in a configuration file if ( argc < 2 ) { fprintf( stderr, "No configuration file provided\n" ); showUsage( argv[ 0 ] ); return -1; } std::cout << "Starting Camera Calibration" << std::endl; cv::Size boardSize; cv::Size boardSizeMm; cv::Size imageSize; std::string cameraName; bool bUseDotPattern; std::string configFilename( argv[ 1 ] ); std::vector<std::string> NameLocation = LoadConfig( configFilename,&boardSize,&boardSizeMm,&cameraName,&bUseDotPattern); float squareWidth = ((float)boardSizeMm.width/1000.0f)/(boardSize.width-1); float squareHeight = ((float)boardSizeMm.height/1000.0f)/(boardSize.height-1); std::cout << squareWidth << " " << squareHeight << std::endl; std::vector<cv::Point3f> objectCorners; //The points in the world coordinates std::vector<std::vector<cv::Point3f>> objectPoints; //The points positions in pixels std::vector<std::vector<cv::Point2f>> imagePoints; //Camera output matrices cv::Mat cameraMatrix; cv::Mat distCoeffs; int successes = 0; //3D Scene Points //Initialize the chessboard corners //in the chessboard reference frame //The corners are at 3D location (x,y,z) = (i,j,0) for(int i=boardSize.height-1; i>=0; i--) for(int j=0; j<boardSize.width; j++) objectCorners.push_back(cv::Point3f(j*squareWidth,i*squareHeight,0.0f)); for(int i = 0; i < (int)NameLocation.size(); i++) { std::string ImageAddress = NameLocation.at(i); std::cout << "Image Name " <<ImageAddress << std::endl; cv::Mat image,colorimage1,colorimage2; image =cv::imread(ImageAddress, CV_LOAD_IMAGE_GRAYSCALE); //colorimage1 =cv::imread(ImageAddress, CV_LOAD_IMAGE_COLOR); //colorimage2 =cv::imread(ImageAddress, CV_LOAD_IMAGE_COLOR); imageSize.height = image.rows; imageSize.width = image.cols; /*cv::imshow("Image",image); cv::waitKey(); cv::destroyWindow("Image");*/ // output vector of image points std::vector<cv::Point2f> imageCorners; // number of corners on the chessboard bool found = false; if ( !bUseDotPattern ) { found = cv::findChessboardCorners(image,boardSize,imageCorners); if ( found ) { //cv::drawChessboardCorners(colorimage1,boardSize,imageCorners,found); //Get subpixel accuracy on the corners //cv::cornerSubPix(image,imageCorners,cv::Size(5,5),cv::Size(-1,-1),cv::TermCriteria(cv::TermCriteria::MAX_ITER + cv::TermCriteria::EPS,100,0.225)); //cv::drawChessboardCorners(colorimage2,boardSize,imageCorners,found); } } else { printf( "Looking for circles...\n" ); //found = cv::findCirclesGrid(image,boardSize,imageCorners); BlobDetector::Params params; //params.minArea = 5.0; //params.minArea = 5.0; params.maxArea = 20000.0; //params.minCircularity = 0.5; //params.minDistBetweenBlobs = 1.0; //params.filterByCircularity = false; //params.filterByInertia = false; //params.filterByConvexity = false; //printf( "minDistBetweenBlobs is %f\n", params.min ); cv::Ptr<cv::FeatureDetector> pDetector = new BlobDetector( params ); found = findCirclesGridAB( image, boardSize, imageCorners, cv::CALIB_CB_SYMMETRIC_GRID, pDetector ); if ( !found ) { cv::Mat scaled; float scale; if ( image.rows < 480 ) { scale = 2.0; } else { scale = 0.25; } cv::resize( image, scaled, cv::Size( 0, 0 ), scale, scale ); found = findCirclesGridAB( scaled, boardSize, imageCorners, cv::CALIB_CB_SYMMETRIC_GRID, pDetector ); std::cout << "Found " << imageCorners.size() << " corners" << std::endl; for ( uint32_t i = 0; i < imageCorners.size(); i++ ) { imageCorners[ i ].x /= scale; imageCorners[ i ].y /= scale; } } /*cv::namedWindow("Corners", CV_WINDOW_NORMAL); cv::drawChessboardCorners(image,boardSize,imageCorners,found); cv::imshow( "Corners", image ); cv::waitKey();*/ //cv::destroyWindow("Corners"); } if ( !found ) { printf( "Warning: Unable to find corners in %s\n", ImageAddress.c_str() ); continue; } //cv::drawChessboardCorners(colorimage1,boardSize,imageCorners,found); //If we have a good board, add it to our data if(imageCorners.size() == (uint32_t)boardSize.area()) { //Add Image and scene points from one view imagePoints.push_back(imageCorners); objectPoints.push_back(objectCorners); std::cout << "Successfully found " << imageCorners.size() << " corners" << std::endl; successes++; } else std::cout << "Failed" << std::endl; //cv::resize(colorimage1,colorimage1,cv::Size(1024,768)); //cv::resize(colorimage2,colorimage2,cv::Size(1024,768)); //cv::imshow("Pre Sub Pixel",colorimage1); //cv::imshow("Post Sub Pixel",colorimage2); //cv::waitKey(); //cv::destroyWindow("Pre Sub Pixel"); //cv::destroyWindow("Post Sub Pixel"); } std::vector<cv::Mat> rvecs,tvecs; cameraMatrix = cv::Mat::eye( 3, 3, CV_64F ); cameraMatrix.at<double>( 0, 0 ) = 5458.0; cameraMatrix.at<double>( 1, 1 ) = 5458.0; cameraMatrix.at<double>( 0, 2 ) = 2272.0/2.0; cameraMatrix.at<double>( 1, 2 ) = 1704.0/2.0; distCoeffs = cv::Mat::zeros( 8, 1, CV_64F ); std::cout << cv::calibrateCamera( objectPoints,imagePoints,imageSize,cameraMatrix,distCoeffs,rvecs,tvecs, CV_CALIB_USE_INTRINSIC_GUESS //| CV_CALIB_FIX_PRINCIPAL_POINT //| CV_CALIB_FIX_ASPECT_RATIO // | CV_CALIB_ZERO_TANGENT_DIST | CV_CALIB_FIX_K1 | CV_CALIB_FIX_K2 | CV_CALIB_FIX_K3 //| CV_CALIB_FIX_K4 | CV_CALIB_FIX_K5 | CV_CALIB_FIX_K6 )<< std::endl; cv::FileStorage fs((cameraName + "_cameraMatrix.yml").c_str(), cv::FileStorage::WRITE); fs << "cameraMatrix" << cameraMatrix; fs << "distCoeffs" << distCoeffs; std::cout << cameraMatrix << std::endl; std::cout << "Finished Camera Calibration" << std::endl; return 0; }
bool CopasiPlot::saveData(const std::string & filename) { // No objects. if (!mObjects.size()) return true; // Find out whether we have any data. C_INT32 ItemActivity; for (ItemActivity = 0; ItemActivity < ActivitySize; ItemActivity++) if (mDataSize[ItemActivity] != 0) break; // No data if (ItemActivity == ActivitySize) return true; std::ofstream fs(CLocaleString::fromUtf8(filename).c_str()); if (!fs.good()) return false; // Write the table header fs << "# "; std::vector< std::vector < const CCopasiObject * > >::const_iterator itX; std::vector< std::vector < const CCopasiObject * > >::const_iterator endX = mSaveCurveObjects.end(); std::vector < const CCopasiObject * >::const_iterator it; std::vector < const CCopasiObject * >::const_iterator end; for (itX = mSaveCurveObjects.begin(); itX != endX; ++itX) for (it = itX->begin(), end = itX->end(); it != end; ++it) if (*it != NULL) fs << (*it)->getObjectDisplayName() << "\t"; else fs << "Not found\t"; fs << "\n"; size_t i, imax = mObjects.size(); std::vector< CVector< double > * > Data; Data.resize(imax); std::vector< CVector< double > * >::const_iterator itData; std::vector< CVector< double > * >::const_iterator endData = Data.end(); std::vector< size_t > Offset; std::vector< size_t >::const_iterator itOffset; Offset.resize(imax); std::map< Activity, std::map< const CCopasiObject *, size_t > >::iterator itActivity; std::map< const CCopasiObject *, size_t >::iterator itObject; if (mDataBefore) { for (itX = mSaveCurveObjects.begin(), i = 0; itX != endX; ++itX) for (it = itX->begin(), end = itX->end(); it != end; ++it, ++i) { if ((itActivity = mObjectIndex.find(COutputInterface::BEFORE)) != mObjectIndex.end() && (itObject = itActivity->second.find(*it)) != itActivity->second.end()) { Data[i] = mData[COutputInterface::BEFORE][itObject->second]; continue; } if ((itActivity = mObjectIndex.find((COutputInterface::Activity)(COutputInterface::BEFORE | COutputInterface::DURING))) != mObjectIndex.end() && (itObject = itActivity->second.find(*it)) != itActivity->second.end()) { Data[i] = mData[COutputInterface::BEFORE | COutputInterface::DURING][itObject->second]; continue; } if ((itActivity = mObjectIndex.find((COutputInterface::Activity)(COutputInterface::BEFORE | COutputInterface::AFTER))) != mObjectIndex.end() && (itObject = itActivity->second.find(*it)) != itActivity->second.end()) { Data[i] = mData[COutputInterface::BEFORE | COutputInterface::AFTER][itObject->second]; continue; } if ((itActivity = mObjectIndex.find((COutputInterface::Activity)(COutputInterface::BEFORE | COutputInterface::DURING | COutputInterface::AFTER))) != mObjectIndex.end() && (itObject = itActivity->second.find(*it)) != itActivity->second.end()) { Data[i] = mData[COutputInterface::BEFORE | COutputInterface::DURING | COutputInterface::AFTER][itObject->second]; continue; } Data[i] = NULL; } for (i = 0; i < mDataBefore; i++) { for (itData = Data.begin(); itData != endData; ++itData) { if (*itData) fs << (**itData)[i]; else fs << MissingValue; fs << "\t"; } fs << std::endl; } } if (mDataDuring) { for (itX = mSaveCurveObjects.begin(), i = 0; itX != endX; ++itX) for (it = itX->begin(), end = itX->end(); it != end; ++it, ++i) { if ((itActivity = mObjectIndex.find(COutputInterface::DURING)) != mObjectIndex.end() && (itObject = itActivity->second.find(*it)) != itActivity->second.end()) { Data[i] = mData[COutputInterface::DURING][itObject->second]; Offset[i] = 0; continue; } if ((itActivity = mObjectIndex.find((COutputInterface::Activity)(COutputInterface::BEFORE | COutputInterface::DURING))) != mObjectIndex.end() && (itObject = itActivity->second.find(*it)) != itActivity->second.end()) { Data[i] = mData[COutputInterface::BEFORE | COutputInterface::DURING][itObject->second]; Offset[i] = mDataBefore; continue; } if ((itActivity = mObjectIndex.find((COutputInterface::Activity)(COutputInterface::DURING | COutputInterface::AFTER))) != mObjectIndex.end() && (itObject = itActivity->second.find(*it)) != itActivity->second.end()) { Data[i] = mData[COutputInterface::DURING | COutputInterface::AFTER][itObject->second]; Offset[i] = 0; continue; } if ((itActivity = mObjectIndex.find((COutputInterface::Activity)(COutputInterface::BEFORE | COutputInterface::DURING | COutputInterface::AFTER))) != mObjectIndex.end() && (itObject = itActivity->second.find(*it)) != itActivity->second.end()) { Data[i] = mData[COutputInterface::BEFORE | COutputInterface::DURING | COutputInterface::AFTER][itObject->second]; Offset[i] = mDataBefore; continue; } Data[i] = NULL; } for (i = 0; i < mDataDuring; i++) { for (itData = Data.begin(), itOffset = Offset.begin(); itData != endData; ++itData) { if (*itData) fs << (**itData)[i + *itOffset]; else fs << MissingValue; fs << "\t"; } fs << std::endl; } } if (mDataAfter) { for (itX = mSaveCurveObjects.begin(), i = 0; itX != endX; ++itX) for (it = itX->begin(), end = itX->end(); it != end; ++it, ++i) { if ((itActivity = mObjectIndex.find(COutputInterface::AFTER)) != mObjectIndex.end() && (itObject = itActivity->second.find(*it)) != itActivity->second.end()) { Data[i] = mData[COutputInterface::AFTER][itObject->second]; Offset[i] = 0; continue; } if ((itActivity = mObjectIndex.find((COutputInterface::Activity)(COutputInterface::BEFORE | COutputInterface::AFTER))) != mObjectIndex.end() && (itObject = itActivity->second.find(*it)) != itActivity->second.end()) { Data[i] = mData[COutputInterface::BEFORE | COutputInterface::AFTER][itObject->second]; Offset[i] = mDataBefore; continue; } if ((itActivity = mObjectIndex.find((COutputInterface::Activity)(COutputInterface::DURING | COutputInterface::AFTER))) != mObjectIndex.end() && (itObject = itActivity->second.find(*it)) != itActivity->second.end()) { Data[i] = mData[COutputInterface::DURING | COutputInterface::AFTER][itObject->second]; Offset[i] = mDataDuring; continue; } if ((itActivity = mObjectIndex.find((COutputInterface::Activity)(COutputInterface::BEFORE | COutputInterface::DURING | COutputInterface::AFTER))) != mObjectIndex.end() && (itObject = itActivity->second.find(*it)) != itActivity->second.end()) { Data[i] = mData[COutputInterface::BEFORE | COutputInterface::DURING | COutputInterface::AFTER][itObject->second]; Offset[i] = mDataBefore + mDataDuring; continue; } Data[i] = NULL; } for (i = 0; i < mDataAfter; i++) { for (itData = Data.begin(), itOffset = Offset.begin(); itData != endData; ++itData) { if (*itData) fs << (**itData)[i + *itOffset]; else fs << MissingValue; fs << "\t"; } fs << std::endl; } } bool FirstHistogram = true; size_t HistogramIndex = 0; C2DPlotCurve ** itCurves = mCurves.array(); C2DPlotCurve ** endCurves = itCurves + mCurves.size(); for (; itCurves != endCurves; ++itCurves) { if ((*itCurves)->getType() == CPlotItem::histoItem1d) { if (FirstHistogram) { fs << "\n# The histograms: \n"; FirstHistogram = false; } if (mSaveHistogramObjects[HistogramIndex] != NULL) fs << mSaveHistogramObjects[HistogramIndex]->getObjectDisplayName(); else fs << "Not found"; fs << std::endl; CHistoCurveData * pData = static_cast< CHistoCurveData * >(&(*itCurves)->data()); size_t i, imax = pData->size(); for (i = 0; i < imax; ++i) { fs << pData->x(i) << "\t" << pData->y(i) << "\n"; } } } fs.close(); if (!fs.good()) return false; return true; }
HResource Resources::loadInternal(const String& UUID, const Path& filePath, bool synchronous, bool loadDependencies, bool keepInternalReference) { HResource outputResource; bool alreadyLoading = false; bool loadInProgress = false; { // Check if resource is already being loaded on a worker thread Lock inProgressLock(mInProgressResourcesMutex); auto iterFind2 = mInProgressResources.find(UUID); if (iterFind2 != mInProgressResources.end()) { LoadedResourceData& resData = iterFind2->second->resData; outputResource = resData.resource.lock(); if (keepInternalReference) { resData.numInternalRefs++; outputResource.addInternalRef(); } alreadyLoading = true; loadInProgress = true; } // Previously being loaded as async but now we want it synced, so we wait if (loadInProgress && synchronous) outputResource.blockUntilLoaded(); if (!alreadyLoading) { Lock loadedLock(mLoadedResourceMutex); auto iterFind = mLoadedResources.find(UUID); if (iterFind != mLoadedResources.end()) // Resource is already loaded { LoadedResourceData& resData = iterFind->second; outputResource = resData.resource.lock(); if (keepInternalReference) { resData.numInternalRefs++; outputResource.addInternalRef(); } alreadyLoading = true; } } } // Not loaded and not in progress, start loading of new resource // (or if already loaded or in progress, load any dependencies) if (!alreadyLoading) { // Check if the handle already exists Lock lock(mLoadedResourceMutex); auto iterFind = mHandles.find(UUID); if (iterFind != mHandles.end()) outputResource = iterFind->second.lock(); else { outputResource = HResource(UUID); mHandles[UUID] = outputResource.getWeak(); } } // We have nowhere to load from, warn and complete load if a file path was provided, // otherwise pass through as we might just want to load from memory. if (filePath.isEmpty()) { if (!alreadyLoading) { LOGWRN_VERBOSE("Cannot load resource. Resource with UUID '" + UUID + "' doesn't exist."); // Complete the load as that the depedency counter is properly reduced, in case this // is a dependency of some other resource. loadComplete(outputResource); return outputResource; } } else if (!FileSystem::isFile(filePath)) { LOGWRN_VERBOSE("Cannot load resource. Specified file: " + filePath.toString() + " doesn't exist."); // Complete the load as that the depedency counter is properly reduced, in case this // is a dependency of some other resource. loadComplete(outputResource); assert(!loadInProgress); // Resource already being loaded but we can't find its path now? return outputResource; } // Load dependency data if a file path is provided SPtr<SavedResourceData> savedResourceData; if (!filePath.isEmpty()) { FileDecoder fs(filePath); savedResourceData = std::static_pointer_cast<SavedResourceData>(fs.decode()); } // If already loading keep the old load operation active, otherwise create a new one if (!alreadyLoading) { { Lock lock(mInProgressResourcesMutex); ResourceLoadData* loadData = bs_new<ResourceLoadData>(outputResource.getWeak(), 0); mInProgressResources[UUID] = loadData; loadData->resData = outputResource.getWeak(); if (keepInternalReference) { loadData->resData.numInternalRefs++; outputResource.addInternalRef(); } loadData->remainingDependencies = 1; loadData->notifyImmediately = synchronous; // Make resource listener trigger before exit if loading synchronously // Register dependencies and count them so we know when the resource is fully loaded if (loadDependencies && savedResourceData != nullptr) { for (auto& dependency : savedResourceData->getDependencies()) { if (dependency != UUID) { mDependantLoads[dependency].push_back(loadData); loadData->remainingDependencies++; } } } } if (loadDependencies && savedResourceData != nullptr) { const Vector<String>& dependencyUUIDs = savedResourceData->getDependencies(); UINT32 numDependencies = (UINT32)dependencyUUIDs.size(); Vector<HResource> dependencies(numDependencies); for (UINT32 i = 0; i < numDependencies; i++) dependencies[i] = loadFromUUID(dependencyUUIDs[i], !synchronous, true, false); // Keep dependencies alive until the parent is done loading { Lock lock(mInProgressResourcesMutex); // At this point the resource is guaranteed to still be in-progress, so it's safe to update its dependency list mInProgressResources[UUID]->dependencies = dependencies; } } } else if (loadDependencies && savedResourceData != nullptr) // Queue dependencies in case they aren't already loaded { const Vector<String>& dependencies = savedResourceData->getDependencies(); if (!dependencies.empty()) { { Lock lock(mInProgressResourcesMutex); ResourceLoadData* loadData = nullptr; auto iterFind = mInProgressResources.find(UUID); if (iterFind == mInProgressResources.end()) // Fully loaded { loadData = bs_new<ResourceLoadData>(outputResource.getWeak(), 0); loadData->resData = outputResource.getWeak(); loadData->remainingDependencies = 0; loadData->notifyImmediately = synchronous; // Make resource listener trigger before exit if loading synchronously mInProgressResources[UUID] = loadData; } else { loadData = iterFind->second; } // Register dependencies and count them so we know when the resource is fully loaded for (auto& dependency : dependencies) { if (dependency != UUID) { bool registerDependency = true; auto iterFind2 = mDependantLoads.find(dependency); if (iterFind2 != mDependantLoads.end()) { Vector<ResourceLoadData*>& dependantData = iterFind2->second; auto iterFind3 = std::find_if(dependantData.begin(), dependantData.end(), [&](ResourceLoadData* x) { return x->resData.resource.getUUID() == outputResource.getUUID(); }); registerDependency = iterFind3 == dependantData.end(); } if (registerDependency) { mDependantLoads[dependency].push_back(loadData); loadData->remainingDependencies++; loadData->dependencies.push_back(_getResourceHandle(dependency)); } } } } for (auto& dependency : dependencies) loadFromUUID(dependency, !synchronous, true, false); } } // Actually start the file read operation if not already loaded or in progress if (!alreadyLoading && !filePath.isEmpty()) { // Synchronous or the resource doesn't support async, read the file immediately if (synchronous || !savedResourceData->allowAsyncLoading()) { loadCallback(filePath, outputResource); } else // Asynchronous, read the file on a worker thread { String fileName = filePath.getFilename(); String taskName = "Resource load: " + fileName; SPtr<Task> task = Task::create(taskName, std::bind(&Resources::loadCallback, this, filePath, outputResource)); TaskScheduler::instance().addTask(task); } } else // File already loaded or in progress { // Complete the load unless its in progress in which case we wait for its worker thread to complete it. // In case file is already loaded this will only decrement dependency count in case this resource is a dependency. if (!loadInProgress) loadComplete(outputResource); else { // In case loading finished in the meantime we cannot be sure at what point ::loadComplete was triggered, // so trigger it manually so that the dependency count is properly decremented in case this resource // is a dependency. Lock lock(mLoadedResourceMutex); auto iterFind = mLoadedResources.find(UUID); if (iterFind != mLoadedResources.end()) loadComplete(outputResource); } } return outputResource; }
void Helper::loadDescriptorsFromFile(const char* file, cv::Mat &descriptors) { cv::FileStorage fs(file, cv::FileStorage::READ); fs["descriptors"] >> descriptors; fs.release(); }
void main() { bool patternfound = false; bool reset = false; bool resetAuto = false; int nbImages = 0; double moyFinale = 0; char key = 0; bool detectionMire = false; bool detectionVisage = false; int cpt = 0, moyCpt = 0, i = 0; std::cout << "initialisation de Chehra..." << std::endl; Chehra chehra; std::cout << "done" << std::endl; cv::TermCriteria termcrit(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03); cv::Size winSize(31, 31); cv::Mat cameraMatrix, distCoeffs; cv::Mat imCalib; cv::Mat imCalibColor; cv::Mat imCalibNext; cv::Mat rvecs, tvecs; cv::Mat Rc, C = cv::Mat(3, 1, CV_64F), rotVecInv; std::vector<cv::Point2f> imagePoints; std::vector<cv::Point3f> objectPoints; std::vector<cv::Point3f> cubeObjectPoints; std::vector<cv::Point3f> dessinPointsVisage; std::vector<std::vector<cv::Point2f>> chessCornersInit(2); std::vector<std::vector<cv::Point2f>> pointsVisageInit(2); std::vector<cv::Point3f> chessCorners3D; std::vector<cv::Point3f> pointsVisage3D; std::vector<cv::Point3f> visage; std::vector<double> distances; double moyDistances; // Creation des coins de la mire for(int x = 0; x < COLCHESSBOARD; x++) for(int y = 0; y < ROWCHESSBOARD; y++) chessCorners3D.push_back(cv::Point3f(x * SIZEMIRE, y * SIZEMIRE, 0.0f)); // Creation des points a projeter for(int x = 0; x < COLCHESSBOARD; x++) for(int y = 0; y < ROWCHESSBOARD; y++) objectPoints.push_back(cv::Point3f(x * SIZEMIRE, y * SIZEMIRE, 0.0f)); cv::FileStorage fs("../rsc/intrinsicMatrix.yml", cv::FileStorage::READ); fs["cameraMatrix"] >> cameraMatrix; fs["distCoeffs"] >> distCoeffs; double f = (cameraMatrix.at<double>(0, 0) + cameraMatrix.at<double>(1, 1)) / 2; // NEAR = distance focale ; si pixels carrés, fx = fy -> np //mais est généralement différent de fy donc on prend (pour l'instant) par défaut la valeur médiane double g = 2000 * f; // je sais pas pourquoi. au pif. fs.release(); cv::VideoCapture vcap(0); if(!vcap.isOpened()){ std::cout << "FAIL!" << std::endl; return; } cv::Mat *frame = new cv::Mat(cv::Mat::zeros(vcap.get(CV_CAP_PROP_FRAME_HEIGHT), vcap.get(CV_CAP_PROP_FRAME_WIDTH), CV_8UC3)); do { vcap >> *frame; }while(frame->empty()); osg::ref_ptr<osg::Image> backgroundImage = new osg::Image; backgroundImage->setImage(frame->cols, frame->rows, 3, GL_RGB, GL_BGR, GL_UNSIGNED_BYTE, (uchar*)(frame->data), osg::Image::AllocationMode::NO_DELETE, 1); // read the scene from the list of file specified commandline args. osg::ref_ptr<osg::Group> group = new osg::Group; osg::ref_ptr<osg::Geode> cam = createHUD(backgroundImage, vcap.get(CV_CAP_PROP_FRAME_WIDTH), vcap.get(CV_CAP_PROP_FRAME_HEIGHT), cameraMatrix.at<double>(0, 2), cameraMatrix.at<double>(1, 2), f); std::cout << "initialisation de l'objet 3D..." << std::endl; osg::ref_ptr<osg::Node> objet3D = osgDB::readNodeFile("../rsc/objets3D/Creature.obj"); std::cout << "done" << std::endl; osg::StateSet* obectStateset = objet3D->getOrCreateStateSet(); obectStateset->setMode(GL_DEPTH_TEST,osg::StateAttribute::OFF); osg::ref_ptr<osg::MatrixTransform> mat = new osg::MatrixTransform(); osg::ref_ptr<osg::PositionAttitudeTransform> pat = new osg::PositionAttitudeTransform(); // construct the viewer. osgViewer::CompositeViewer compositeViewer; osgViewer::View* viewer = new osgViewer::View; osgViewer::View* viewer2 = new osgViewer::View; // add the HUD subgraph. group->addChild(cam); mat->addChild(objet3D); pat->addChild(mat); group->addChild(pat); pat->setScale(osg::Vec3d(3, 3, 3)); osg::Matrixd projectionMatrix; projectionMatrix.makeFrustum( -cameraMatrix.at<double>(0, 2), vcap.get(CV_CAP_PROP_FRAME_WIDTH) - cameraMatrix.at<double>(0, 2), -cameraMatrix.at<double>(1, 2), vcap.get(CV_CAP_PROP_FRAME_HEIGHT) - cameraMatrix.at<double>(1, 2), f, g); osg::Vec3d eye(0.0f, 0.0f, 0.0f), target(0.0f, g, 0.0f), normal(0.0f, 0.0f, 1.0f); // set the scene to render viewer->setSceneData(group.get()); viewer->setUpViewInWindow(0, 0, 1920 / 2, 1080 / 2); viewer->getCamera()->setProjectionMatrix(projectionMatrix); viewer->getCamera()->setViewMatrixAsLookAt(eye, target, normal); viewer2->setSceneData(group.get()); viewer2->setUpViewInWindow(1920 / 2, 0, 1920 / 2, 1080 / 2); viewer2->getCamera()->setProjectionMatrix(projectionMatrix); osg::Vec3d eye2(4 * f, 3 * f / 2, 0.0f), target2(0.0f, f, 0.0f), normal2(0.0f, 0.0f, 1.0f); viewer2->getCamera()->setViewMatrixAsLookAt(eye2, target2, normal2); compositeViewer.addView(viewer); compositeViewer.addView(viewer2); compositeViewer.realize(); // set up windows and associated threads. do { group->removeChild(pat); patternfound = false; resetAuto = false; detectionMire = false; detectionVisage = false; imagePoints.clear(); chessCornersInit[0].clear(); chessCornersInit[1].clear(); pointsVisageInit[0].clear(); pointsVisageInit[1].clear(); pointsVisage3D.clear(); dessinPointsVisage.clear(); visage.clear(); moyDistances = 0; distances.clear(); imCalibNext.release(); std::cout << "recherche de pattern" << std::endl; time_t start = clock(); double timer = 0; do { start = clock(); vcap >> *frame; backgroundImage->dirty(); //detectionMire = detecterMire(frame, &chessCornersInit[1], &imCalibNext); detectionVisage = detecterVisage(frame, &chehra, &pointsVisageInit[1], &visage, &pointsVisage3D, &imCalibNext); cpt++; double duree = (clock() - start)/(double) CLOCKS_PER_SEC; timer += duree; if(timer >= 1){ std::cout << cpt << " fps" << std::endl; moyCpt += cpt; timer = 0; duree = 0; i++; cpt = 0; start = clock(); } compositeViewer.frame(); }while(!detectionMire && !detectionVisage && !compositeViewer.done()); if(compositeViewer.done()) break; std::cout << "pattern detectee" << std::endl << std::endl; group->addChild(pat); do { start = clock(); vcap >> *frame; cv::Mat rotVec = trackingMire(frame, &imCalibNext, &pointsVisageInit, &pointsVisage3D, &cameraMatrix, &distCoeffs, &tvecs); //cv::Mat rotVec = trackingMire(frame, &imCalibNext, &chessCornersInit, &chessCorners3D, &cameraMatrix, &distCoeffs, &tvecs); //imagePoints = dessinerPoints(frame, objectPoints, rotVec, tvecs, cameraMatrix, distCoeffs); imagePoints = dessinerPoints(frame, pointsVisage3D, rotVec, tvecs, cameraMatrix, distCoeffs); double r11 = rotVec.at<double>(0, 0); double r21 = rotVec.at<double>(1, 0); double r31 = rotVec.at<double>(2, 0); double r32 = rotVec.at<double>(2, 1); double r33 = rotVec.at<double>(2, 2); osg::Matrixd matrixR; matrixR.makeRotate( atan2(r32, r33), osg::Vec3d(1.0, 0.0, 0.0), -atan2(-r31, sqrt((r32 * r32) + (r33 * r33))), osg::Vec3d(0.0, 0.0, 1.0), atan2(r21, r11), osg::Vec3d(0.0, 1.0, 0.0)); mat->setMatrix(matrixR); pat->setPosition(osg::Vec3d(tvecs.at<double>(0, 0), tvecs.at<double>(2, 0), -tvecs.at<double>(1, 0))); //std::cout << "x = " << tvecs.at<double>(0, 0) << " - y = " << tvecs.at<double>(1, 0) << " - z = " << tvecs.at<double>(2, 0) << std::endl; // Calcul d'erreur de reprojection double moy = 0; for(int j = 0; j < pointsVisageInit[1].size() ; j++) { double d = sqrt(pow(pointsVisageInit[0][j].y - imagePoints[j].y, 2) + pow(pointsVisageInit[0][j].x - imagePoints[j].x, 2)); distances.push_back(d); moy += d; } moyDistances = moy / pointsVisageInit[1].size(); if(moyDistances > 1) // si l'ecart de reproj est trop grand, reset resetAuto = true; double duree = (clock() - start)/(double) CLOCKS_PER_SEC; std::cout << (int)(1/duree) << " fps" << std::endl; moyCpt += (int)(1/duree); duree = 0; i++; backgroundImage->dirty(); compositeViewer.frame(); }while(!compositeViewer.done() && !resetAuto); }while(!compositeViewer.done()); std::cout << std::endl << "Moyenne des fps : " << moyCpt/i << std::endl; std::system("PAUSE"); }
inline void BoW::create_vocabulary(int N_cent, const string path_run_folders) { cout << "Calculating Vocabulary " << endl; cout << "# clusters: " << N_cent << endl; mat uni_features; for (uword pe=0; pe<peo_train.n_rows; ++pe) { mat mat_features_tmp; mat mat_features; for (uword act = 0 ; act < actions.n_rows; ++act) { for (uword sc = 1 ; sc <= 4; ++sc) { mat mat_features_video_i; std::stringstream ssName_feat_video; //ssName_feat_video << "./run"<< run <<"/features/train/feat_vec" << peo_train(pe) << "_" << actions(act) << "_d" << sc; ssName_feat_video << path_run_folders <<"/features_all_nor/feat_vec_" << peo_train(pe) << "_" << actions(act) << "_d" << sc; mat_features_video_i.load( ssName_feat_video.str() ); if ( mat_features_video_i.n_cols>0 ) { mat_features_tmp = join_rows( mat_features_tmp, mat_features_video_i ); } else { cout << "# vectors = 0 in " << ssName_feat_video.str() << endl; } } } cout << "mat_features_tmp.n_cols "<< mat_features_tmp.n_cols << endl; const uword N_max = 100000; // maximum number of vectors per action to create universal GMM //const uword N_max = 100000; //??? if (mat_features_tmp.n_cols > N_max) { ivec tmp1 = randi( N_max, distr_param(0,mat_features_tmp.n_cols-1) ); ivec tmp2 = unique(tmp1); uvec my_indices = conv_to<uvec>::from(tmp2); mat_features = mat_features_tmp.cols(my_indices); // extract a subset of the columns } else { mat_features = mat_features_tmp; } cout << "mat_features.n_cols "<< mat_features.n_cols << endl; if ( mat_features.n_cols>0 ) { uni_features = join_rows( uni_features, mat_features ); } else { cout << "# vectors = 0 in uni_features" << endl; } //uni_features = join_rows( uni_features, mat_features ); mat_features_tmp.reset(); mat_features.reset(); } cout << "r&c "<< uni_features.n_rows << " & " << uni_features.n_cols << endl; bool is_finite = uni_features.is_finite(); if (!is_finite ) { cout << "is_finite?? " << is_finite << endl; cout << uni_features.n_rows << " " << uni_features.n_cols << endl; getchar(); } fmat f_uni_features = conv_to< fmat >::from(uni_features); //uni_features.reset(); cv::Mat featuresUnclustered(f_uni_features.n_cols, dim, CV_32FC1, f_uni_features.memptr() ); //cv::Mat featuresUnclustered( featuresUnclusteredTMP.t() ); int rows = featuresUnclustered.rows; int cols = featuresUnclustered.cols; cout << "OpenCV rows & cols " << rows << " & " << cols << endl; //cout << "Press a Key" << endl; //getchar(); //cout << f_uni_features.col(1000) << endl; //cout << uni_features.col(1000) << endl; //cout << featuresUnclustered.row(1000) << endl; //cout << "Press a Key" << endl; //getchar(); //Construct BOWKMeansTrainer //the number of bags int dictionarySize = N_cent; //define Term Criteria cv::TermCriteria tc(CV_TERMCRIT_ITER,100,0.001); //retries number int retries=1; //necessary flags int flags=cv::KMEANS_PP_CENTERS; //Create the BoW (or BoF) trainer cv::BOWKMeansTrainer bowTrainer(dictionarySize,tc,retries,flags); //cluster the feature vectors cv::Mat dictionary = bowTrainer.cluster(featuresUnclustered); //Displaying # of Rows&Cols int rows_dic = dictionary.rows; int cols_dic = dictionary.cols; cout << "OpenCV Dict rows & cols " << rows_dic << " & " << cols_dic << endl; //store the vocabulary std::stringstream name_vocabulary; name_vocabulary << "./run"<< run <<"/visual_vocabulary/means_Ng" << N_cent << "_dim" <<dim << "_all_sc" << ".yml"; cv::FileStorage fs(name_vocabulary.str(), cv::FileStorage::WRITE); fs << "vocabulary" << dictionary; fs.release(); cout << "DONE"<< endl; }
void SOpenCVWriter::updating() { ::arData::CameraSeries::csptr camSeries = this->getInput< ::arData::CameraSeries >(::fwIO::s_DATA_KEY); SLM_ASSERT("CameraSeries is null", camSeries); bool use_dialog = false; if(!this->hasLocationDefined()) { use_dialog = this->defineLocationGUI(); if(!use_dialog) { return; } } ::fwData::mt::ObjectReadLock lock(camSeries); size_t numberOfCameras = camSeries->getNumberOfCameras(); std::vector< ::arData::Camera::sptr > cameras; std::vector< ::cv::Mat > cameraMatrices; std::vector< ::cv::Mat > cameraDistCoefs; // Set the cameras ::fwData::TransformationMatrix3D::sptr extrinsicMatrix; ::cv::Mat extrinsic = ::cv::Mat::eye(4, 4, CV_64F); ::fwData::mt::ObjectReadLock camSeriesLock(camSeries); for(size_t i = 0; i < numberOfCameras; ++i) { cameras.push_back(camSeries->getCamera(i)); cameraMatrices.push_back(::cv::Mat::eye(3, 3, CV_64F)); cameraDistCoefs.push_back(::cv::Mat::eye(5, 1, CV_64F)); cameraMatrices[i].at<double>(0, 0) = cameras[i]->getFx(); cameraMatrices[i].at<double>(1, 1) = cameras[i]->getFy(); cameraMatrices[i].at<double>(0, 2) = cameras[i]->getCx(); cameraMatrices[i].at<double>(1, 2) = cameras[i]->getCy(); for(std::uint8_t c = 0; c < 5; ++c) { cameraDistCoefs[i].at<double>(c, 0) = cameras[i]->getDistortionCoefficient()[c]; } } ::cv::FileStorage fs(this->getFile().string().c_str(), ::cv::FileStorage::WRITE); fs << "nbCameras"<< static_cast<int>(numberOfCameras); for( size_t c = 0; c < numberOfCameras; ++c) { std::stringstream camNum; camNum << "camera_"<< c; fs << camNum.str() << "{"; fs << "id"<<camSeries->getCamera(c)->getCameraID().c_str(); fs << "description" << camSeries->getCamera(c)->getDescription().c_str(); fs << "imageWidth" << static_cast< int> (camSeries->getCamera(c)->getWidth()); fs << "imageHeight" << static_cast< int >(camSeries->getCamera(c)->getHeight()); fs << "matrix" << cameraMatrices[c]; fs << "distortion" << cameraDistCoefs[c]; fs << "scale" << camSeries->getCamera(c)->getScale(); extrinsicMatrix = camSeries->getExtrinsicMatrix(c); if(extrinsicMatrix) { for(std::uint8_t i = 0; i < 4; ++i) { for(std::uint8_t j = 0; j < 4; ++j) { extrinsic.at< double >(i, j) = extrinsicMatrix->getCoefficient(i, j); } } fs << "extrinsic"<< extrinsic; } fs << "}"; } fs.release(); //clear locations only if it was configured through GUI. if(use_dialog) { this->clearLocations(); } }
//Testing inline void BoW::create_histograms_testing(int N_cent, const string path_run_folders, int segm_length) { //prepare BOW descriptor extractor from the dictionary cv::Mat dictionary; std::stringstream name_vocabulary; name_vocabulary << "./run"<< run <<"/visual_vocabulary/means_Ng" << N_cent << "_dim" <<dim << "_all_sc" << ".yml"; cout << name_vocabulary.str() << endl; cv::FileStorage fs(name_vocabulary.str(), cv::FileStorage::READ); fs["vocabulary"] >> dictionary; fs.release(); //cout << "Loaded" << endl; mat multi_features; vec real_labels, fr_idx, fr_idx_2; for (uword sc = 1 ; sc <= 4; ++sc) { for (uword pe=0; pe<peo_test.n_rows; ++pe) { //Loading matrix with all features (for all frames) std::stringstream ssName_feat_video; //ssName_feat_video << "./run"<< run <<"/features/train/feat_vec" << peo_train(pe) << "_" << actions(act) << "_d" << sc; ssName_feat_video << path_run_folders << "/run" << run << "/multi_features/feat_" << peo_test(pe) << "_d" << sc << ".dat"; multi_features.load( ssName_feat_video.str() ); cout << ssName_feat_video.str() << endl; //Loading labels. In a frame basis std::stringstream ssload_name_lab; ssload_name_lab << path_run_folders << "/run" << run << "/multi_features/lab_" << peo_test(pe) << "_d" << sc << ".dat"; real_labels.load( ssload_name_lab.str() ); int n_frames = real_labels.n_elem; //Loading frame index for each of the feature vector in feat_video std::stringstream ssload_name_fr_idx; ssload_name_fr_idx << path_run_folders << "/run" << run << "/multi_features/fr_idx_" << peo_test(pe) << "_d" << sc << ".dat"; fr_idx.load( ssload_name_fr_idx.str() ); // Solo uso las pares: 2,4,6... fr_idx_2 = fr_idx/2; // Empieza en uno for (int f=1; f<=n_frames - segm_length; ++f) { int ini = f; int fin = ini + segm_length; mat feat_frame_fr; for (int i=ini; i<=fin; ++i) { uvec q1 = find(fr_idx_2 == i); //cout << "ini " << ini << ". q1 " << q1.n_elem << endl; //getchar(); mat sub_multi_features; sub_multi_features = multi_features.cols(q1); feat_frame_fr = join_rows( feat_frame_fr, sub_multi_features ); } //Aqui Obtener el histogram y guardarlo!!!!!!!!!!!!!!! fmat f_feat_frame_fr = conv_to< fmat >::from(feat_frame_fr); feat_frame_fr.reset(); cv::Mat features_segm_f_OpenCV(f_feat_frame_fr.n_cols, dim, CV_32FC1, f_feat_frame_fr.memptr() ); int rows = features_segm_f_OpenCV.rows; int cols = features_segm_f_OpenCV.cols; //cout << "Features rows & cols " << rows << " & " << cols << endl; // init the matcher with you pre-trained codebook cv::Ptr<cv::DescriptorMatcher > matcher = new cv::BFMatcher(cv::NORM_L2); matcher->add(std::vector<cv::Mat>(1, dictionary)); // matches std::vector<cv::DMatch> matches; matcher->match(features_segm_f_OpenCV,matches); //cout << matches.size() << endl; //Mira aqui: http://ttic.uchicago.edu/~mostajabi/Tutorial.html vec hist; hist.zeros(N_cent) ; for (int i=0; i< matches.size(); ++i) { int bin = matches[i].trainIdx ; hist(bin)++; } hist = hist/hist.max(); std::stringstream ssName_hist; ssName_hist << "./run"<<run << "/multi_Histograms_BoW_OpenCV/multi_hist_" << peo_test(pe) << "_d" << sc << "_Ng"<< N_cent << "fr_" << ini << "_" << fin << ".h5"; //cout << ssName_hist.str() << endl; hist.save(ssName_hist.str(), hdf5_binary); } } } }
Application::Application(QmlApplicationViewer& v,QObject *parent) : QObject(parent),viewer(v) { TBuf<255> p1(_L("C:\\System\\JellyUnlock\\splash_h.jpg")); TBuf<255> p2(_L("C:\\System\\JellyUnlock\\splash_v.jpg")); splash=CSplashScreen::NewL(p2,p1); int lang=1; #ifdef Q_OS_SYMBIAN lang=User::Language(); #endif loc=new Localizer(lang,this); QFile fs(KFirstStart); isFirstStart=false; if (fs.exists()) { fs.remove(); QFile file(KConfigFile); if (file.exists()){file.remove();} isFirstStart=true; } settings=new QSettings(KConfigFile,QSettings::IniFormat); keyMap.clear(); keyMap.insert(0,180); // menu keyMap.insert(1,179); // lock keyMap.insert(2,166); // power keyMap.insert(3,196); // green keyMap.insert(4,197); // red keyMap.insert(5,171); // camera keyMap.insert(6,226); // light camera keys.clear(); settings->beginGroup("keys"); QStringList sets=settings->allKeys(); for (int i=0;i<sets.length();i++) { keys.append(settings->value(sets[i],-1).toInt()); } settings->endGroup(); if (keys.length()==0){keys.append(180);} #ifdef Q_OS_SYMBIAN RApaLsSession AppSession; AppSession.Connect(); TApaAppInfo appInfo; AppSession.GetAllApps(); TBuf<255> UidTxt; while (AppSession.GetNextApp(appInfo)==KErrNone) { HBufC* fn; if (AppSession.GetAppIcon(appInfo.iUid,fn)!=KErrNone){continue;} if (fn){delete fn;} if (appInfo.iCaption.Length()<2){continue;} TApplicationInfo info; info.iCaption=appInfo.iCaption; info.iUid=appInfo.iUid; apps.Append(info); fullApps.Append(info); } AppSession.Close(); TLinearOrder<TApplicationInfo> sortOrder(TApplicationInfo::Compare); fullApps.Sort(sortOrder); apps.Sort(sortOrder); for (int i=0; i<fullApps.Count();i++) { appModel<<QString::fromRawData(reinterpret_cast<const QChar*>(apps[i].iCaption.Ptr()),apps[i].iCaption.Length()); } #else for (int i=0; i<20;i++) { appModel<<QString::number(i); } #endif }
CV_WRAP void write( const string& fileName ) const { FileStorage fs(fileName, FileStorage::WRITE); ((FeatureDetector*)this)->write(fs); fs.release(); }
bool UNICOREClient::uasStartJob(){ std::string state, faultstring; logger.msg(INFO, "Creating and sending a start job request"); PayloadSOAP req(unicore_ns); XMLNode SOAPMethod = req.NewChild("jms:Start"); WSAHeader(req).To(rurl.str()); WSAHeader(req).Action("http://schemas.ggf.org/bes/2006/08/bes-activity/BESActivityPortType/StartRequest"); // Send status request PayloadSOAP *resp = NULL; if (client) { MCC_Status status = client->process("http://schemas.ggf.org/bes/2006/08/bes-activity/BESActivityPortType/StartRequest", &req, &resp); if (resp == NULL) { logger.msg(VERBOSE, "There was no SOAP response"); return false; } } else if (client_entry) { Message reqmsg; Message repmsg; MessageAttributes attributes_req; attributes_req.set("SOAP:ACTION", "http://schemas.ggf.org/bes/2006/08/bes-activity/BESActivityPortType/StartRequest"); MessageAttributes attributes_rep; MessageContext context; reqmsg.Payload(&req); reqmsg.Attributes(&attributes_req); reqmsg.Context(&context); repmsg.Attributes(&attributes_rep); repmsg.Context(&context); MCC_Status status = client_entry->process(reqmsg, repmsg); if (!status) { logger.msg(ERROR, "A start job request failed"); return false; } logger.msg(INFO, "A start job request succeeded"); if (repmsg.Payload() == NULL) { logger.msg(VERBOSE, "There was no response to a start job request"); return false; } try { resp = dynamic_cast<PayloadSOAP*>(repmsg.Payload()); } catch (std::exception&) {} if (resp == NULL) { logger.msg(ERROR, "The response of a start job request was " "not a SOAP message"); delete repmsg.Payload(); return false; } } else { logger.msg(ERROR, "There is no connection chain configured"); return false; } SOAPFault fs(*resp); if (!fs) { return true; } else { faultstring = fs.Reason(); std::string s; resp->GetXML(s); delete resp; logger.msg(DEBUG, "Submission returned failure: %s", s); logger.msg(ERROR, "Submission failed, service returned: %s", faultstring); return false; } }
CV_WRAP void read( const string& fileName ) { FileStorage fs(fileName, FileStorage::READ); ((FeatureDetector*)this)->read(fs.root()); fs.release(); }
void CameraProjectorCalibration::saveExtrinsics(string filename, bool absolute) const { cv::FileStorage fs(ofToDataPath(filename, absolute), cv::FileStorage::WRITE); fs << "Rotation_Vector" << rotCamToProj; fs << "Translation_Vector" << transCamToProj; }
ProcessState CDownload::_StartSegments( CRequestPool &request_pool, CHttpAsyncInPtr http ) { ProcessState state = ProcessState_Idle; CString strTmpFile = m_strFilePath + DOWNLOAD_TEMP_FILE_SUFFIX; CString strTmpInfoFile = m_strFilePath + DOWNLOAD_TEMP_FILE_INFO_SUFFIX; // 防止 _kt & _kti 文件不同时存在 if(PathFileExists(strTmpInfoFile) && !PathFileExists(strTmpFile)) { DeleteFile(strTmpFile); DeleteFile(strTmpInfoFile); } CSegmentPool segment_pool(m_strFilePath); m_FileInfo = http->GetRemoteFileInfo(); m_FileInfo.fileDownloaded = segment_pool.Init(m_FileInfo, m_nCocurrent); MYTRACE(_T("BeginDownload :%s -> %s (%I64d Bytes / %I64d )\r\n"), m_locationPool.GetMainUrl(), m_strFilePath, m_FileInfo.fileDownloaded, m_FileInfo.fileSize); if(m_FileInfo.fileDownloaded==0 && PathFileExists(strTmpFile)) { DeleteFile(strTmpFile); DeleteFile(strTmpInfoFile); } CFileStream fs(strTmpFile); if (!fs.Create()) { MYTRACE(_T("Create File Error : %s\r\n"), m_strFilePath); m_errCode = DLERR_CREATEFILE; state = ProcessState_Failed; } else { if (m_FileInfo.fileSize > 0) fs.SetLength(m_FileInfo.fileSize); // run all segements BOOL first = TRUE; for(int i=0; i<m_nCocurrent; ++i) { SegmentPtr s = NULL; if(first) { s = segment_pool.GetNextSegment(NULL); if(!s) break; s->instream = http; if(s->currentposition>0) s->instream->OpenRange(s->currentposition, s->endposition); else if(s->endposition>0) // s->instream->ModifyRange(0, s->endposition); s->instream->SetFileSizeExpect(m_FileInfo.fileSize); first = FALSE; } else { CString strUrl; if( !m_locationPool.Get(strUrl) ) break; else { UrlInfo urlinfo; CrackUrl(strUrl, urlinfo); s = segment_pool.GetNextSegment(NULL); if(!s) break; CHttpAsyncInPtr h = request_pool.Get(); h->Open(&urlinfo, s->currentposition, s->endposition, m_FileInfo.fileSize); s->instream = h; } } if( s ) segment_pool.NotifySegmentToRunning(s); } if(first) { state = ProcessState_Finished; } else { // read and run m_downStat.OnDownBegin(); m_dwTmLastDownloadProgess = GetTickCount(); m_iInitialDownloaded = m_FileInfo.fileDownloaded; INT nTriesFromNoData = 0; DWORD dwLastFailStartTime = m_dwTmLastDownloadProgess; HANDLE *hWaits = new HANDLE[1+m_nCocurrent]; while(segment_pool.hasRunning() || segment_pool.hasQueuing()) { ZeroMemory( hWaits, sizeof(HANDLE)*(1+m_nCocurrent) ); hWaits[0] = m_hStopEvent; SegmentPtrs &runnings = segment_pool.GetRunningSegments(); for(size_t i=0; i<runnings.size(); ++i) { ATLASSERT(runnings[i]->instream->GetWaitHandle()); hWaits[i+1] = runnings[i]->instream->GetWaitHandle(); } DWORD dwWaitRet = WAIT_FAILED; //dwWaitRet = WaitForMultipleObjects(1+runnings.size(), hWaits, FALSE, INFINITE); dwWaitRet = WaitForMultipleObjects(1+runnings.size(), hWaits, FALSE, 1000); // 1s for start failed segments !! DWORD dwTimeNow = GetTickCount(); m_downStat.OnDownData(dwTimeNow, 0); BOOL bSegmentIsDone = FALSE; if (WAIT_FAILED == dwWaitRet ) { m_errCode = DLERR_INTERNAL; state = ProcessState_Failed; break; } else if (WAIT_OBJECT_0 == dwWaitRet) { state = ProcessState_UserCanceled; m_errCode = DLLER_USER_CANCEL; break; } else if (WAIT_TIMEOUT == dwWaitRet) { ; } else { BOOL bGotData = FALSE; int ret = _ProcessSegment(request_pool, segment_pool, dwWaitRet, fs, bGotData ); if(ret<0) state = ProcessState_Failed; else if(ret>0) bSegmentIsDone = TRUE; if(bGotData) // Reset on received data nTriesFromNoData = 0; } if( (dwTimeNow-m_dwTmLastDownloadProgess)>DOWNLOAD_PROGRESS_REPORT_INTERVAL ) { _Notify(ProcessState_ReceiveData); m_dwTmLastDownloadProgess = dwTimeNow; } // Restart error !! if((dwTimeNow-dwLastFailStartTime)>DOWNLOAD_FAIL_RETRY_INTERVAL) { if(segment_pool.GetRunningSegments().size()<m_nCocurrent && segment_pool.hasQueuing()) { dwLastFailStartTime = dwTimeNow; CString strUrl; if( m_locationPool.Get(strUrl) ) { UrlInfo urlinfo; CrackUrl(strUrl, urlinfo); SegmentPtr s = segment_pool.GetNextSegment(NULL); if(s) { CHttpAsyncInPtr h = request_pool.Get(); h->Open(&urlinfo, s->currentposition, s->endposition, m_FileInfo.fileSize); s->instream = h; segment_pool.NotifySegmentToRunning(s); } } ++ nTriesFromNoData; } } // check if all parts is done if(bSegmentIsDone && !segment_pool.hasRunning()) { if(segment_pool.hasQueuing()) { MYTRACE(_T("- All Parts Finished, %d Segments Failed, reach max try '%d' \r\n"), segment_pool.GetQueueSegments().size(), nTriesFromNoData); if(nTriesFromNoData>DOWNLOAD_MAINURL_MAXRETYR) { state = ProcessState_Failed; m_errCode = DLLER_SEGENT_ERROR; break; } } else { MYTRACE(_T("- All Parts Finished \r\n")); state = ProcessState_Finished; break; } } } SAFE_DELETE_ARRAY(hWaits); } fs.CloseFile(); // if(state==ProcessState_Finished) { CSegmentInfoFile(m_strFilePath).Unlink(); MoveFileEx(m_strFilePath + DOWNLOAD_TEMP_FILE_SUFFIX, m_strFilePath, MOVEFILE_REPLACE_EXISTING); } else if(m_FileInfo.fileSize>0) CSegmentInfoFile(m_strFilePath).Save(m_FileInfo.fileSize, m_FileInfo.fileDownloaded, segment_pool.GetSegments()); } request_pool.Cleanup(); m_downStat.OnDownEnd(); return state; }
/*---------------------------- * 功能 : 载入双目定标结果数据 *---------------------------- * 函数 : StereoMatch::loadCalibData * 访问 : public * 返回 : 1 成功 * 0 读入校正参数失败 * -1 定标参数的图像尺寸与当前配置的图像尺寸不一致 * -2 校正方法不是 BOUGUET 方法 * -99 未知错误 * * 参数 : xmlFilePath [in] 双目定标结果数据文件 */ int StereoMatch::loadCalibData(const char* xmlFilePath) { // 读入摄像头定标参数 Q roi1 roi2 mapx1 mapy1 mapx2 mapy2 try { cv::FileStorage fs(xmlFilePath, cv::FileStorage::READ); if ( !fs.isOpened() ) { return (0); } cv::Size imageSize; cv::FileNodeIterator it = fs["imageSize"].begin(); it >> imageSize.width >> imageSize.height; if (imageSize.width != m_frameWidth || imageSize.height != m_frameHeight) { return (-1); } vector<int> roiVal1; vector<int> roiVal2; fs["leftValidArea"] >> roiVal1; m_Calib_Roi_L.x = roiVal1[0]; m_Calib_Roi_L.y = roiVal1[1]; m_Calib_Roi_L.width = roiVal1[2]; m_Calib_Roi_L.height = roiVal1[3]; fs["rightValidArea"] >> roiVal2; m_Calib_Roi_R.x = roiVal2[0]; m_Calib_Roi_R.y = roiVal2[1]; m_Calib_Roi_R.width = roiVal2[2]; m_Calib_Roi_R.height = roiVal2[3]; fs["QMatrix"] >> m_Calib_Mat_Q; fs["remapX1"] >> m_Calib_Mat_Remap_X_L; fs["remapY1"] >> m_Calib_Mat_Remap_Y_L; fs["remapX2"] >> m_Calib_Mat_Remap_X_R; fs["remapY2"] >> m_Calib_Mat_Remap_Y_R; cv::Mat lfCamMat; fs["leftCameraMatrix"] >> lfCamMat; m_FL = lfCamMat.at<double>(0,0); m_Calib_Mat_Mask_Roi = cv::Mat::zeros(m_frameHeight, m_frameWidth, CV_8UC1); cv::rectangle(m_Calib_Mat_Mask_Roi, m_Calib_Roi_L, cv::Scalar(255), -1); m_BM.state->roi1 = m_Calib_Roi_L; m_BM.state->roi2 = m_Calib_Roi_R; m_Calib_Data_Loaded = true; string method; fs["rectifyMethod"] >> method; if (method != "BOUGUET") { return (-2); } } catch (std::exception& e) { m_Calib_Data_Loaded = false; return (-99); } return 1; }
int main() { glfwInit(); ////our window //GLFWwindow* window; //window = glfwCreateWindow(800, 600, "ParticleSystemXML", NULL, NULL); //glfwMakeContextCurrent(window); // ////CAM //cam.setKeySpeed(4.0); //iH.setAllInputMaps(cam); //glfwSetKeyCallback(window, key_callback); //cam.setFOV(50); //cam.setNearFar(1, 100); Window testWindow(500, 50, 800, 600, "testWindow"); glfwMakeContextCurrent(testWindow.getWindow()); // Callback glfwSetKeyCallback(testWindow.getWindow(), key_callback); cam.setKeySpeed(4.0); cam.setNearFar(0.1, 100); glewInit(); //our renderer OpenGL3Context context; Renderer *renderer; renderer = new Renderer(context); //////////////////////Textures////////////////////// Texture* fireTex = new Texture((char*)RESOURCES_PATH "/ParticleSystem/fire/Fire2_M.png"); Texture* fireTex1 = new Texture((char*)RESOURCES_PATH "/ParticleSystem/fire/fire1_M.png"); Texture* fireTex2 = new Texture((char*)RESOURCES_PATH "/ParticleSystem/fire/fire3_M.png"); Texture* fireTex3 = new Texture((char*)RESOURCES_PATH "/ParticleSystem/fire/flame02_L.png"); //Texture* fireFlickering1 = new Texture((char*)RESOURCES_PATH "/ParticleSystem/fire/fire_flickering_1.png"); //Texture* fireFlickering2 = new Texture((char*)RESOURCES_PATH "/ParticleSystem/fire/fire_flickering_2.png"); //Texture* fireFlickering3 = new Texture((char*)RESOURCES_PATH "/ParticleSystem/fire/fire_flickering_3.png"); //Texture* fireFlickering4 = new Texture((char*)RESOURCES_PATH "/ParticleSystem/fire/fire_flickering_4.png"); Texture* fireSparkleTex1 = new Texture((char*)RESOURCES_PATH "/ParticleSystem/fire/fireSparkle1_S.png"); Texture* fireSparkleTex2 = new Texture((char*)RESOURCES_PATH "/ParticleSystem/fire/fireSparkle2.png"); Texture* texFireworkBlue = new Texture((char*)RESOURCES_PATH "/ParticleSystem/firework/firework_blue.png"); Texture* texFireworkRed = new Texture((char*)RESOURCES_PATH "/ParticleSystem/firework/firework_red.png"); Texture* texFireworkGreen = new Texture((char*)RESOURCES_PATH "/ParticleSystem/firework/firework_green.png"); Texture* texFireworkGold = new Texture((char*)RESOURCES_PATH "/ParticleSystem/firework/firework_gold.png"); Texture* texFireworkTail = new Texture((char*)RESOURCES_PATH "/ParticleSystem/firework/firework_tail.png"); Texture* smokeWhiteTex1 = new Texture((char*)RESOURCES_PATH "/ParticleSystem/smoke/smokeWhite/smokeWhite01.png"); Texture* smokeWhiteTex2 = new Texture((char*)RESOURCES_PATH "/ParticleSystem/smoke/smokeWhite/smokeWhite02.png"); Texture* smokeBlack1 = new Texture((char*)RESOURCES_PATH "/ParticleSystem/smoke/smokeBlack/smokeBlack01.png"); Texture* smokeBlack2 = new Texture((char*)RESOURCES_PATH "/ParticleSystem/smoke/smokeBlack/smokeBlack02.png"); //////////////////////Emitter////////////////////// //Emitter explosion sparkle Emitter* explosionSparkle = new Emitter(0, glm::vec3(0.0, 0.0, 0.0), 0.25, 0.01, 80, 1.25, true); explosionSparkle->setVelocity(5); //explosionSparkle->usePhysicDirectionGravity(glm::vec4(0.0, -1.0, 0.0, 0.3), 3.0f); explosionSparkle->usePhysicPointGravity(glm::vec3(0.0, 0.0, 0.0), 0.6, 10.0, 2, 4.0f, true); explosionSparkle->addTexture(fireSparkleTex1, 1.0); explosionSparkle->defineLook(true, 0.01, 0.0, 0.1); //Emitter explosion fire //TODO //Emitter fire smoke Emitter* fire = new Emitter(0, glm::vec3(0.0, 0.0, 0.0), 0.0, 0.2, 2, 5.0, true); fire->setVelocity(5); fire->usePhysicDirectionGravity(glm::vec4(0.0, 1.0, 0.0, 5.0), 0.5f); fire->addTexture(fireTex1, 1.0); fire->addTexture(fireTex2, 0.7); fire->addTexture(smokeBlack2, 0.1); //fire->addTexture(smokeBlack2, 0.1); //fire->addTexture(smokeWhiteTex2, 0.25); std::vector<float> sizeF{ 0.05f, 0.5f, 0.75f, 1.0f }; std::vector<float> timeF{ 0.0f, 0.4f, 0.75f, 1.0f }; fire->defineLook(true, sizeF, timeF, 0.5, 4.0, 3.0, true, 0.3); fire->switchToGeometryShader(); //Emitter fire flickering Emitter* fireFlickering = new Emitter(0, glm::vec3(0.0, 0.1, 0.0), 0.0, 0.2, 1, 2.0, true); fireFlickering->setVelocity(5); fireFlickering->usePhysicDirectionGravity(glm::vec4(0.0, 1.0, 0.0, 0.5), 0.3f); /*fireFlickering->addTexture(fireFlickering1, 1.0); fireFlickering->addTexture(fireFlickering2, 0.7); fireFlickering->addTexture(fireFlickering3, 0.5); fireFlickering->addTexture(fireFlickering4, 0.3);*/ fireFlickering->defineLook(true, 0.1, 0.5, 1.0, 0.5, true, 0.3); fireFlickering->switchToGeometryShader(); Emitter* fireSparkle = new Emitter(0, glm::vec3(0.0, 0.1, 0.0), 0.0, 0.05, 3, 2.5, true); fireSparkle->setVelocity(5); fireSparkle->usePhysicDirectionGravity(glm::vec4(0.0, 1.0, 0.0, 0.8), 0.5f); fireSparkle->addTexture(fireSparkleTex1, 1.0); fireSparkle->defineLook(true, 0.05, 0.5, 0.5); //Emitter firework explosion Emitter* fireworkExplosion = new Emitter(0, glm::vec3(0.0, 0.0, 0.0), 0.1, 0.01, 80, 2.0, true); fireworkExplosion->setVelocity(6); //fireworkExplosion->usePhysicDirectionGravity(glm::vec4(0.0, -1.0, 0.0, 0.6), 3.0f); fireworkExplosion->usePhysicPointGravity(glm::vec3(0.0, -2.0, 0.0), 0.9, 5.0, 2, 2.0f, true); fireworkExplosion->addTexture(texFireworkRed, 1.0); fireworkExplosion->defineLook(true, 0.04, 0.0, 0.5); fireworkExplosion->setStartTime(2.0); //Emitter firework tail Emitter* fireworkTail = new Emitter(0, glm::vec3(0.0, 0.0, 0.0), 2.0, 0.01, 20, 0.5, true); fireworkTail->setVelocity(5); //fireworkTail->usePhysicDirectionGravity(glm::vec4(0.0, -1.0, 0.0, 2.9), 0.2f); fireworkTail->usePhysicPointGravity(glm::vec3(0.0, -4.0, 0.0), 30.6, 10.0, 2, 0.2f, true); fireworkTail->addTexture(texFireworkTail, 1.0); fireworkTail->defineLook(true, 0.001, 0.0, 0.1); //FINAL EMITTER WHITE SMOKE Emitter* smokeWhite = new Emitter(0, glm::vec3(0.0, 0.0, 0.0), 0.0, 0.4, 1, 8.0, true); smokeWhite->setVelocity(2); smokeWhite->usePhysicDirectionGravity(glm::vec4(0.0, -1.0, 0.0, -0.8), 0.3f); smokeWhite->addTexture(smokeWhiteTex1, 1.0); smokeWhite->addTexture(smokeWhiteTex2, 0.25); std::vector<float> smokeWhiteSize{ 0.05f, 0.5f, 0.75f, 1.0f }; std::vector<float> smokeWhiteTime{ 0.0f, 0.4f, 0.75f, 1.0f }; smokeWhite->defineLook(true, smokeWhiteSize, smokeWhiteTime, 1.0, 2.0, 1.0, false, 0.3); smokeWhite->switchToGeometryShader(); //////////////////////Effect////////////////////// Effect* efExplosion = new Effect(); efExplosion->addEmitter(explosionSparkle); Effect* efFire = new Effect(); efFire->addEmitter(fire); //efFire->addEmitter(fireFlickering); efFire->addEmitter(fireSparkle); efFire->saveEffect(RESOURCES_PATH "/XML/Effect_Fire.xml"); Effect* efFirework = new Effect(); efFirework->addEmitter(fireworkTail); efFirework->addEmitter(fireworkExplosion); //efFirework->saveEffect(RESOURCES_PATH "/XML/Effect_Firework.xml"); Effect* efFireworkTail = new Effect(); efFireworkTail->addEmitter(fireworkTail); Effect* efFireworkExplosion = new Effect(); efFireworkExplosion->addEmitter(fireworkExplosion); Effect* efSmWhi = new Effect(); efSmWhi->addEmitter(smokeWhite); efSmWhi->saveEffect(RESOURCES_PATH "/XML/Effect_SmokeWhite.xml"); //////////////////////ParticleSystem////////////////////// //ParticleSystem* psExplosion = new ParticleSystem(glm::vec3(0, -1, 0), efExplosion); //ParticleSystem* psFire = new ParticleSystem(glm::vec3(-2, 0, 3), efFire); ParticleSystem* psFire = new ParticleSystem(glm::vec3(-2, 0, 3), RESOURCES_PATH "/XML/Effect_Fire.xml"); ParticleSystem* psFirework = new ParticleSystem(glm::vec3(0, 0, 5), efFirework); ParticleSystem* psFireworkTail = new ParticleSystem(glm::vec3(0, 0, 5), efFireworkTail); ParticleSystem* psFireworkExplosion = new ParticleSystem(glm::vec3(0, 2, 5), efFireworkExplosion); ParticleSystem* psSmokeWhite = new ParticleSystem(glm::vec3(2, 0, 3), efSmWhi); //ParticleSystem* psSmokeWhite = new ParticleSystem(glm::vec3(2, 0, 3), RESOURCES_PATH "/XML/Effect_SmokeWhite.xml"); ParticleSystem* psFireworkRed = new ParticleSystem(glm::vec3(-3, -1, 5), RESOURCES_PATH "/XML/Effect_FireworkRed.xml"); ParticleSystem* psFireworkBlue = new ParticleSystem(glm::vec3(-1, -1, 5), RESOURCES_PATH "/XML/Effect_FireworkBlue.xml"); ParticleSystem* psFireworkGreen = new ParticleSystem(glm::vec3(1, -1, 5), RESOURCES_PATH "/XML/Effect_FireworkGreen.xml"); ParticleSystem* psFireworkGold = new ParticleSystem(glm::vec3(3, -1, 5), RESOURCES_PATH "/XML/Effect_FireworkGold.xml"); //////////////////////Node////////////////////// //Node nodeExplosion("nodeExplosion"); //nodeExplosion.setCamera(&cam); //nodeExplosion.addParticleSystem(psExplosion); //nodeExplosion.setParticleActive(true); // //Node fireNode("fireNode"); //fireNode.setCamera(&cam); //fireNode.addParticleSystem(psFire); //fireNode.setParticleActive(true); // //Node nodeFirework("fireworkNode"); //nodeFirework.setCamera(&cam); //nodeFirework.addParticleSystem(psFirework); //nodeFirework.setParticleActive(true); // //Node whiteSmokeNode("whiteSmokeNode"); //whiteSmokeNode.setCamera(&cam); //whiteSmokeNode.addParticleSystem(psSmokeWhite); //whiteSmokeNode.setParticleActive(true); ////Firework //Node nodeFireworkRed("fireworkRedNode"); //nodeFireworkRed.setCamera(&cam); //nodeFireworkRed.addParticleSystem(psFireworkRed); //nodeFireworkRed.setParticleActive(true); //Node nodeFireworkBlue("fireworBlueNode"); //nodeFireworkBlue.setCamera(&cam); //nodeFireworkBlue.addParticleSystem(psFireworkBlue); //nodeFireworkBlue.setParticleActive(true); //Node nodeFireworkGreen("fireworkGreenNode"); //nodeFireworkGreen.setCamera(&cam); //nodeFireworkGreen.addParticleSystem(psFireworkGreen); //nodeFireworkGreen.setParticleActive(true); //Node nodeFireworkGold("fireworkGoldNode"); //nodeFireworkGold.setCamera(&cam); //nodeFireworkGold.addParticleSystem(psFireworkGold); //nodeFireworkGold.setParticleActive(true); // Shader VertexShader vs(loadShaderSource(SHADERS_PATH + std::string("/ColorShader3D/ColorShader3D.vert"))); FragmentShader fs(loadShaderSource(SHADERS_PATH + std::string("/ColorShader3D/ColorShader3D.frag"))); ShaderProgram shader(vs, fs); //need scene here mainly because of input Level testLevel("testLevel"); Scene testScene("testScene"); testLevel.addScene(&testScene); testLevel.changeScene("testScene"); //Add Camera to Scene testScene.getScenegraph()->addCamera(&cam); testScene.getScenegraph()->setActiveCamera("Pilotview"); //Set Input-Maps and activate one iH.setAllInputMaps(*(testScene.getScenegraph()->getActiveCamera())); iH.changeActiveInputMap(MapType::CAMPILOTVIEW); iH.getActiveInputMap()->update(cam); //Object Cube cube; Texture bricks((char*)RESOURCES_PATH "/Wall/bricks_diffuse.png"); Node cube1("cube"); cube1.addGeometry(&cube); cube1.addTexture(&bricks); cube1.setModelMatrix(glm::translate(cube1.getModelMatrix(), glm::vec3(0.0, 0.0, 0.0))); //cube1.setModelMatrix(glm::scale(cube1.getModelMatrix(), glm::vec3(0.5, 0.5, 0.5))); testScene.getScenegraph()->getRootNode()->addChildrenNode(&cube1); //add nodes to the scenegraph //testScene.getScenegraph()->getRootNode()->addChildrenNode(&nodeExplosion); //testScene.getScenegraph()->getRootNode()->addChildrenNode(&fireNode); //testScene.getScenegraph()->getRootNode()->addChildrenNode(&whiteSmokeNode); //testScene.getScenegraph()->getRootNode()->addChildrenNode(&nodeFirework); /*testScene.getScenegraph()->getRootNode()->addChildrenNode(&nodeFireworkRed); testScene.getScenegraph()->getRootNode()->addChildrenNode(&nodeFireworkBlue); testScene.getScenegraph()->getRootNode()->addChildrenNode(&nodeFireworkGreen); testScene.getScenegraph()->getRootNode()->addChildrenNode(&nodeFireworkGold);*/ //using this, the ParticleSystems get rendered in order of their distance to the camera testScene.getScenegraph()->addParticleSystem(psFire); testScene.getScenegraph()->addParticleSystem(psSmokeWhite); testScene.getScenegraph()->addParticleSystem(psFireworkBlue); testScene.getScenegraph()->addParticleSystem(psFireworkRed); testScene.getScenegraph()->addParticleSystem(psFireworkGreen); testScene.getScenegraph()->addParticleSystem(psFireworkGold); //TEST /*ParticleSystem* psComicCloud = new ParticleSystem(glm::vec3(0, -1, 4), RESOURCES_PATH "/XML/ComicCloudEffect - Kopie.xml"); testScene.getScenegraph()->addParticleSystem(psComicCloud); psComicCloud->start();*/ //start the ParticleSystems psFire->start(); psSmokeWhite->start(); //psExplosion->start(); //psFirework->start(); psFireworkRed->start(); psFireworkBlue->start(); psFireworkGreen->start(); psFireworkGold->start(); double startTime = glfwGetTime(); double lastTime = glfwGetTime(); int nbFrames = 0; while (!glfwWindowShouldClose(testWindow.getWindow())) { // Measure speed double currentTime = glfwGetTime(); nbFrames++; if (currentTime - lastTime >= 1.0){ // If last prinf() was more than 1 sec ago // printf and reset timer //printf("%f ms/frame\n", 1000.0 / double(nbFrames)); nbFrames = 0; lastTime += 1.0; } cam.setSensitivity(glfwGetTime() - startTime); startTime = glfwGetTime(); /*glEnable(GL_DEPTH); glEnable(GL_DEPTH_TEST); glEnable(GL_BLEND); glClearColor(0.5, 0.5, 0.5, 1.0); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); shader.bind(); shader.sendMat4("viewMatrix", cam.getViewMatrix()); shader.sendMat4("projectionMatrix", cam.getProjectionMatrix()); testScene.render(shader); testScene.renderParticleSystems(); shader.unbind();*/ //renderer->useBloom(true); renderer->renderScene(testScene, testWindow); //update Positions of firework ParticleSystems glm::vec3 pos = psFireworkRed->getPosition(); psFireworkRed->setPosition(glm::vec3(pos.x, pos.y + (glfwGetTime() - startTime), pos.z)); pos = psFireworkBlue->getPosition(); psFireworkBlue->setPosition(glm::vec3(pos.x, pos.y + (glfwGetTime() - startTime), pos.z)); pos = psFireworkGreen->getPosition(); psFireworkGreen->setPosition(glm::vec3(pos.x, pos.y + (glfwGetTime() - startTime), pos.z)); pos = psFireworkGold->getPosition(); psFireworkGold->setPosition(glm::vec3(pos.x, pos.y + (glfwGetTime() - startTime), pos.z)); /*glfwSwapBuffers(testWindow.getWindow()); glfwPollEvents();*/ } glfwDestroyWindow(testWindow.getWindow()); glfwTerminate(); return 0; }
// ----------------------------------------------------------------------------- // // Purpose and Method: // Inputs: // Outputs: // Dependencies: // Restrictions and Caveats: // // ----------------------------------------------------------------------------- cv::Mat Affine2DFactorizedProblem::computeM0Matrix () { cv::Mat template_coords; std::vector<int> ctrl_coords_indices; cv::Mat M0; cv::Mat gradients; cv::Mat zero_params; MAT_TYPE x, y; MAT_TYPE grad_x, grad_y; MAT_TYPE norm_grad; std::vector<LineIndices> ctrl_coords_lines; m_object_model->getReferenceCoords(template_coords); M0 = cv::Mat::zeros(template_coords.rows, m_motion_model->getNumParams(), cv::DataType<MAT_TYPE>::type); zero_params = cv::Mat::zeros(m_motion_model->getNumParams(), 1, cv::DataType<MAT_TYPE>::type); gradients = m_object_model->computeTemplateFeaturesGradient(zero_params); assert(gradients.rows == M0.rows); assert(gradients.cols == 2); for (int i=0; i < M0.rows; i++) { x = template_coords.at<MAT_TYPE>(i,0); y = template_coords.at<MAT_TYPE>(i,1); grad_x = gradients.at<MAT_TYPE>(i,0); grad_y = gradients.at<MAT_TYPE>(i,1); M0.at<MAT_TYPE>(i,0) = grad_x; M0.at<MAT_TYPE>(i,1) = grad_y; M0.at<MAT_TYPE>(i,2) = (grad_x * x); M0.at<MAT_TYPE>(i,3) = (grad_y * x); M0.at<MAT_TYPE>(i,4) = (grad_x * y); M0.at<MAT_TYPE>(i,5) = (grad_y * y); } #ifdef DEBUG cv::namedWindow("M0"); cv::Mat col = template_coords.col(0); MAT_TYPE min_x = *std::min_element(col.begin<MAT_TYPE>(), col.end<MAT_TYPE>()); MAT_TYPE max_x = *std::max_element(col.begin<MAT_TYPE>(), col.end<MAT_TYPE>()); col = template_coords.col(1); MAT_TYPE min_y = *std::min_element(col.begin<MAT_TYPE>(), col.end<MAT_TYPE>()); MAT_TYPE max_y = *std::max_element(col.begin<MAT_TYPE>(), col.end<MAT_TYPE>()); int img_width = round(max_x - min_x+ 1); int img_height = round(max_y - min_y + 1); cv::Mat Jacobian_image = cv::Mat::zeros(img_height, img_width * M0.cols, cv::DataType<uint8_t>::type); for (int i=0; i<M0.cols; i++) { cv::Mat M0_i; M0.col(i).copyTo(M0_i); cv::Mat normalized; cv::normalize(M0_i.reshape(1, img_height), normalized, 0, 255, cv::NORM_MINMAX, cv::DataType<uint8_t>::type); cv::Mat Jacobian_image_ref = Jacobian_image(cv::Range::all(), cv::Range(i*img_width, (i+1)*img_width)); normalized.copyTo(Jacobian_image_ref); } cv::imshow("M0", Jacobian_image); // write Mat objects to the file cv::FileStorage fs("Affine2DFactorizedProblem_computeM0Matrix.xml", cv::FileStorage::WRITE); fs << "M0_0" << M0.col(0); fs << "M0_1" << M0.col(1); fs << "M0_2" << M0.col(2); fs << "M0_3" << M0.col(3); fs << "M0_4" << M0.col(3); fs << "template_coords" << template_coords; fs.release(); #endif return M0; };
bool GSRenderer::Merge(int field) { bool en[2]; GSVector4i fr[2]; GSVector4i dr[2]; int baseline = INT_MAX; for(int i = 0; i < 2; i++) { en[i] = IsEnabled(i); if(en[i]) { fr[i] = GetFrameRect(i); dr[i] = GetDisplayRect(i); baseline = min(dr[i].top, baseline); //printf("[%d]: %d %d %d %d, %d %d %d %d\n", i, fr[i].x,fr[i].y,fr[i].z,fr[i].w , dr[i].x,dr[i].y,dr[i].z,dr[i].w); } } if(!en[0] && !en[1]) { return false; } // try to avoid fullscreen blur, could be nice on tv but on a monitor it's like double vision, hurts my eyes (persona 4, guitar hero) // // NOTE: probably the technique explained in graphtip.pdf (Antialiasing by Supersampling / 4. Reading Odd/Even Scan Lines Separately with the PCRTC then Blending) bool samesrc = en[0] && en[1] && m_regs->DISP[0].DISPFB.FBP == m_regs->DISP[1].DISPFB.FBP && m_regs->DISP[0].DISPFB.FBW == m_regs->DISP[1].DISPFB.FBW && m_regs->DISP[0].DISPFB.PSM == m_regs->DISP[1].DISPFB.PSM; // bool blurdetected = false; if(samesrc /*&& m_regs->PMODE.SLBG == 0 && m_regs->PMODE.MMOD == 1 && m_regs->PMODE.ALP == 0x80*/) { if(fr[0].eq(fr[1] + GSVector4i(0, -1, 0, 0)) && dr[0].eq(dr[1] + GSVector4i(0, 0, 0, 1)) || fr[1].eq(fr[0] + GSVector4i(0, -1, 0, 0)) && dr[1].eq(dr[0] + GSVector4i(0, 0, 0, 1))) { // persona 4: // // fr[0] = 0 0 640 448 // fr[1] = 0 1 640 448 // dr[0] = 159 50 779 498 // dr[1] = 159 50 779 497 // // second image shifted up by 1 pixel and blended over itself // // god of war: // // fr[0] = 0 1 512 448 // fr[1] = 0 0 512 448 // dr[0] = 127 50 639 497 // dr[1] = 127 50 639 498 // // same just the first image shifted int top = min(fr[0].top, fr[1].top); int bottom = max(dr[0].bottom, dr[1].bottom); fr[0].top = top; fr[1].top = top; dr[0].bottom = bottom; dr[1].bottom = bottom; // blurdetected = true; } else if(dr[0].eq(dr[1]) && (fr[0].eq(fr[1] + GSVector4i(0, 1, 0, 1)) || fr[1].eq(fr[0] + GSVector4i(0, 1, 0, 1)))) { // dq5: // // fr[0] = 0 1 512 445 // fr[1] = 0 0 512 444 // dr[0] = 127 50 639 494 // dr[1] = 127 50 639 494 int top = min(fr[0].top, fr[1].top); int bottom = min(fr[0].bottom, fr[1].bottom); fr[0].top = fr[1].top = top; fr[0].bottom = fr[1].bottom = bottom; // blurdetected = true; } //printf("samesrc = %d blurdetected = %d\n",samesrc,blurdetected); } GSVector2i fs(0, 0); GSVector2i ds(0, 0); GSTexture* tex[2] = {NULL, NULL}; if(samesrc && fr[0].bottom == fr[1].bottom) { tex[0] = GetOutput(0); tex[1] = tex[0]; // saves one texture fetch } else { if(en[0]) tex[0] = GetOutput(0); if(en[1]) tex[1] = GetOutput(1); } GSVector4 src[2]; GSVector4 dst[2]; for(int i = 0; i < 2; i++) { if(!en[i] || !tex[i]) continue; GSVector4i r = fr[i]; // overscan hack if(dr[i].height() > 512) // hmm { int y = GetDeviceSize(i).y; if(m_regs->SMODE2.INT && m_regs->SMODE2.FFMD) y /= 2; r.bottom = r.top + y; } GSVector4 scale = GSVector4(tex[i]->GetScale()).xyxy(); src[i] = GSVector4(r) * scale / GSVector4(tex[i]->GetSize()).xyxy(); GSVector2 o(0, 0); if(dr[i].top - baseline >= 4) // 2? { o.y = tex[i]->GetScale().y * (dr[i].top - baseline); if(m_regs->SMODE2.INT && m_regs->SMODE2.FFMD) { o.y /= 2; } } dst[i] = GSVector4(o).xyxy() + scale * GSVector4(r.rsize()); fs.x = max(fs.x, (int)(dst[i].z + 0.5f)); fs.y = max(fs.y, (int)(dst[i].w + 0.5f)); } ds = fs; if(m_regs->SMODE2.INT && m_regs->SMODE2.FFMD) { ds.y *= 2; } bool slbg = m_regs->PMODE.SLBG; bool mmod = m_regs->PMODE.MMOD; if(tex[0] || tex[1]) { if(tex[0] == tex[1] && !slbg && (src[0] == src[1] & dst[0] == dst[1]).alltrue()) { // the two outputs are identical, skip drawing one of them (the one that is alpha blended) tex[0] = NULL; } GSVector4 c = GSVector4((int)m_regs->BGCOLOR.R, (int)m_regs->BGCOLOR.G, (int)m_regs->BGCOLOR.B, (int)m_regs->PMODE.ALP) / 255; m_dev->Merge(tex, src, dst, fs, slbg, mmod, c); if(m_regs->SMODE2.INT && m_interlace > 0) { if (m_interlace == 7 && m_regs->SMODE2.FFMD == 1) // Auto interlace enabled / Odd frame interlace setting { int field2 = 0; int mode = 2; m_dev->Interlace(ds, field ^ field2, mode, tex[1] ? tex[1]->GetScale().y : tex[0]->GetScale().y); } else { int field2 = 1 - ((m_interlace - 1) & 1); int mode = (m_interlace - 1) >> 1; m_dev->Interlace(ds, field ^ field2, mode, tex[1] ? tex[1]->GetScale().y : tex[0]->GetScale().y); } } if(m_shadeboost) { m_dev->ShadeBoost(); } if (m_shaderfx) { m_dev->ExternalFX(); } if(m_fxaa) { m_dev->FXAA(); } } return true; }
int main (int argc, char *argv[]) { int ch; #if defined(HAVE_DYNAMIC_CATS_BACKENDS) alist *backend_directories = NULL; #endif char *jobids = (char *)"1"; char *path=NULL, *client=NULL; uint64_t limit=0; bool clean=false; setlocale(LC_ALL, ""); bindtextdomain("bareos", LOCALEDIR); textdomain("bareos"); init_stack_dump(); Dmsg0(0, "Starting bvfs_test tool\n"); my_name_is(argc, argv, "bvfs_test"); init_msg(NULL, NULL); OSDependentInit(); while ((ch = getopt(argc, argv, "h:c:l:d:D:n:P:Su:vf:w:?j:p:f:T")) != -1) { switch (ch) { case 'd': /* debug level */ if (*optarg == 't') { dbg_timestamp = true; } else { debug_level = atoi(optarg); if (debug_level <= 0) { debug_level = 1; } } break; case 'D': db_driver = optarg; break; case 'l': limit = str_to_int64(optarg); break; case 'c': client = optarg; break; case 'h': db_host = optarg; break; case 'n': db_name = optarg; break; case 'w': working_directory = optarg; break; case 'u': db_user = optarg; break; case 'P': db_password = optarg; break; case 'v': verbose++; break; case 'p': path = optarg; break; case 'f': file = optarg; break; case 'j': jobids = optarg; break; case 'T': clean = true; break; case '?': default: usage(); } } argc -= optind; argv += optind; if (argc != 0) { Pmsg0(0, _("Wrong number of arguments: \n")); usage(); } JCR *bjcr = new_jcr(sizeof(JCR), NULL); bjcr->JobId = getpid(); bjcr->setJobType(JT_CONSOLE); bjcr->setJobLevel(L_FULL); bjcr->JobStatus = JS_Running; bjcr->client_name = get_pool_memory(PM_FNAME); pm_strcpy(bjcr->client_name, "Dummy.Client.Name"); bstrncpy(bjcr->Job, "bvfs_test", sizeof(bjcr->Job)); #if defined(HAVE_DYNAMIC_CATS_BACKENDS) backend_directories = New(alist(10, owned_by_alist)); backend_directories->append((char *)backend_directory); db_set_backend_dirs(backend_directories); #endif if ((db = db_init_database(NULL, NULL, db_name, db_user, db_password, db_host, 0, NULL)) == NULL) { Emsg0(M_ERROR_TERM, 0, _("Could not init Bareos database\n")); } Dmsg1(0, "db_type=%s\n", db_get_type(db)); if (!db_open_database(NULL, db)) { Emsg0(M_ERROR_TERM, 0, db_strerror(db)); } Dmsg0(200, "Database opened\n"); if (verbose) { Pmsg2(000, _("Using Database: %s, User: %s\n"), db_name, db_user); } bjcr->db = db; if (clean) { Pmsg0(0, "Clean old table\n"); db_sql_query(db, "DELETE FROM PathHierarchy", NULL, NULL); db_sql_query(db, "UPDATE Job SET HasCache=0", NULL, NULL); db_sql_query(db, "DELETE FROM PathVisibility", NULL, NULL); bvfs_update_cache(bjcr, db); } Bvfs fs(bjcr, db); fs.set_handler(result_handler, &fs); fs.set_jobids(jobids); fs.update_cache(); if (limit) fs.set_limit(limit); if (path) { fs.ch_dir(path); fs.ls_special_dirs(); fs.ls_dirs(); while (fs.ls_files()) { fs.next_offset(); } if (fnid && client) { Pmsg0(0, "---------------------------------------------\n"); Pmsg1(0, "Getting file version for %s\n", file); fs.get_all_file_versions(fs.get_pwd(), fnid, client); } exit (0); } Pmsg0(0, "list /\n"); fs.ch_dir("/"); fs.ls_special_dirs(); fs.ls_dirs(); fs.ls_files(); Pmsg0(0, "list /tmp/\n"); fs.ch_dir("/tmp/"); fs.ls_special_dirs(); fs.ls_dirs(); fs.ls_files(); Pmsg0(0, "list /tmp/regress/\n"); fs.ch_dir("/tmp/regress/"); fs.ls_special_dirs(); fs.ls_files(); fs.ls_dirs(); Pmsg0(0, "list /tmp/regress/build/\n"); fs.ch_dir("/tmp/regress/build/"); fs.ls_special_dirs(); fs.ls_dirs(); fs.ls_files(); fs.get_all_file_versions(1, 347, "zog4-fd"); char p[200]; strcpy(p, "/tmp/toto/rep/"); bvfs_parent_dir(p); if(!bstrcmp(p, "/tmp/toto/")) { Pmsg0(000, "Error in bvfs_parent_dir\n"); } bvfs_parent_dir(p); if(!bstrcmp(p, "/tmp/")) { Pmsg0(000, "Error in bvfs_parent_dir\n"); } bvfs_parent_dir(p); if(!bstrcmp(p, "/")) { Pmsg0(000, "Error in bvfs_parent_dir\n"); } bvfs_parent_dir(p); if(!bstrcmp(p, "")) { Pmsg0(000, "Error in bvfs_parent_dir\n"); } bvfs_parent_dir(p); if(!bstrcmp(p, "")) { Pmsg0(000, "Error in bvfs_parent_dir\n"); } return 0; }
void Helper::saveDescriptorsInFile(const char* nativePath, cv::Mat descriptors) { cv::FileStorage fs(nativePath, cv::FileStorage::WRITE); fs << "descriptors" << descriptors; fs.release(); }