void threhold::showThrehold() { int div=ui->lineEdit->text().toInt(); calcThreshold(mtx,&mtx_thr,div); IplImage* img=new IplImage(mtx); IplImage* img_thr=new IplImage(mtx_thr); QImage qmtx=IplImage2QImage(img); QImage qmtx_thr=IplImage2QImage(img_thr); ui->label_2->setPixmap(QPixmap::fromImage(qmtx)); ui->label_3->setPixmap(QPixmap::fromImage(qmtx_thr)); repaint(); }
void linearTrans::showLinearTrans() { this->a=ui->lineEdit->text().toDouble(); this->b=ui->lineEdit_2->text().toDouble(); calcLinear(mtx,&mtx_linear_trans,a,b); IplImage* img=new IplImage(mtx); IplImage* img_linear_trans=new IplImage(mtx_linear_trans); QImage qmtx=IplImage2QImage(img); QImage qmtx_linear_trans=IplImage2QImage(img_linear_trans); //QImage qimg((uchar*)mtxQimg.data,mtxQimg.cols,mtxQimg.rows,QImage::Format_RGB32); ui->label_5->setPixmap(QPixmap::fromImage(qmtx)); ui->label_4->setPixmap(QPixmap::fromImage(qmtx_linear_trans)); repaint(); }
int Converter::processVideo(QString s) { QTime timer; timer.start(); QList<QFuture<bool>> fv; CvCapture * capture = cvCaptureFromAVI(s.toStdString().c_str()); if(!capture) { return 0; } //int fps = (int) cvGetCaptureProperty(capture, CV_CAP_PROP_FPS); // qDebug() << "* FPS: %d" << fps << "\n"; IplImage* frame = NULL; int frame_number = 0; while ((frame = cvQueryFrame(capture))) { char filename[100]; strcpy(filename, "frame_"); char frame_id[30]; sprintf(frame_id,"%d",frame_number); strcat(filename, frame_id); strcat(filename, ".bmp"); QImage img = IplImage2QImage(frame).mirrored(false, true); QFuture<bool> fb = QtConcurrent::run(&saveImageToFile, img, QString(filename)); fv.append(fb); frame_number++; } cvReleaseCapture(&capture); foreach (auto& x, fv) { x.result(); } std::cerr << "time: " << timer.elapsed() << std::endl; return frame_number; }
void ImageDisplayer::Inverse_2D_blocks_DCT(int coefficient) { QElapsedTimer timer; timer.start(); M88iDCT=img88Inverse_DCT(M88DCT,mtx.rows,mtx.cols,coefficient); int et=timer.elapsed(); double mypsnr=myPSNR(mtx,M88iDCT); double psnr=PSNR(mtx,M88iDCT); IplImage* imgQ=new IplImage(M88iDCT); ImageBlocks=QImage(IplImage2QImage(imgQ)); QString time=QString::number(et,10); QString st("Coefficient used:"); if(coefficient==0) st.append("All"); else { st.append("1/"); st.append(QString::number(coefficient)); } st.append("\n\nInverse Evaluation:\n"); st.append("Inverse Time Elapsed:"); st.append(time); st.append(" ms\n"); st.append("myPSNR:"); st.append(QString::number(mypsnr)); st.append("\ncvPSNR:"); st.append(QString::number(psnr)); st.append("\n"); Evaluation2Dblocks=st; }
void ImageDisplayer::on_actionIn_verse_8_8_blocks_triggered() { QElapsedTimer timer; timer.start(); M88iDCT=img88Inverse_DCT(M88DCT,mtx.rows,mtx.cols,0); int et=timer.elapsed(); double mypsnr=myPSNR(mtx,M88iDCT); double psnr=PSNR(mtx,M88iDCT); IplImage* imgQ=new IplImage(M88iDCT); QImage qimg=IplImage2QImage(imgQ); imageLabel->setPixmap(QPixmap::fromImage(qimg)); QString time=QString::number(et,10); QString st("Inverse 2DDCT on 8*8 blocks finished!\n"); st.append("Time Elapsed:"); st.append(time); st.append(" ms\n"); st.append("myPSNR:"); st.append(QString::number(mypsnr)); st.append("\ncvPSNR:"); st.append(QString::number(psnr)); QMessageBox finish(this); finish.setText(st); finish.exec(); }
void affine::showWarp() { warp(mtx,&mtx_warp,ui->horizontalSlider_4->value()); IplImage* img=new IplImage(mtx_warp); QImage qmtx=IplImage2QImage(img); ui->label->clear(); ui->label->setPixmap(QPixmap::fromImage(qmtx)); repaint(); }
int QtGuiOutputView::showFrame(IplImage *frame) { QImage *image = IplImage2QImage (frame); QtGuiImageEvent *event = new QtGuiImageEvent (image); qapp->postEvent (widget, event); return 0; }
void affine::showRotate() { int theta=ui->horizontalSlider->value(); rotate(mtx,&mtx_rot,theta); IplImage* img=new IplImage(mtx_rot); QImage qmtx=IplImage2QImage(img); ui->label->clear(); ui->label->setPixmap(QPixmap::fromImage(qmtx)); repaint(); }
void Camera::run() { while(!stopTh) { usleep(50); image = cvQueryFrame(capture);//récuperer le flux de la webcam img=IplImage2QImage(image);//convertir IplImage en QImage emit emSig2(img); } }
void ZoneSelector::setImage(const cv::Mat &img) { if(!mImage.empty()){ mImage.release(); } mImage = img.clone(); QImage qimg; IplImage2QImage(mImage, qimg); mMouseLabel->setImage(qimg); }
void Anaglyph::loadLeft() { QString fileName = QFileDialog::getOpenFileName(this,tr("Open Image"),".",tr("Image Files (*.png *.jpg *.bmp *.jpeg *.tiff *.tif *.p?m *.sr *.jp2);;All Files (*.*);;BMP(*.bmp);;JPEG(*.jpg *.jpeg *.jp2);;PNG(*.png);;TIFF(*.tif *.tiff)")); if (!fileName.isEmpty()) { mtx_left=imread(fileName.toStdString(),1); IplImage* img=new IplImage(mtx_left); QImage qimg=IplImage2QImage(img); ui->label_left->setPixmap(QPixmap::fromImage(qimg)); } }
void affine::showScale() { double factorx=ui->horizontalSlider_2->value()/10.0; double factory=ui->horizontalSlider_3->value()/10.0; //cout<<factorx<<' '<<factory<<endl; scale(mtx,&mtx_sca,factorx,factory); IplImage* img=new IplImage(mtx_sca); QImage qmtx=IplImage2QImage(img); ui->label->clear(); ui->label->setPixmap(QPixmap::fromImage(qmtx)); repaint(); }
void Webcam::timerEvent(QTimerEvent *event) { if (event->timerId() == timerId) { IplImage *frame = cvQueryFrame(capture); QImage image = IplImage2QImage(frame); // convert pixmap = QPixmap::fromImage(image); // convert repaint(); // immediate repaint if (++nframes == 50) { qDebug("frame rate: %f", // actual frame rate (float) nframes * 1000 / time.elapsed()); nframes = 0; time.restart(); } } else { QWidget::timerEvent(event); } }
void Anaglyph::loadRight() { QString fileName = QFileDialog::getOpenFileName(this,tr("Open Image"),".",tr("Image Files (*.png *.jpg *.bmp *.jpeg *.tiff *.tif *.p?m *.sr *.jp2);;All Files (*.*);;BMP(*.bmp);;JPEG(*.jpg *.jpeg *.jp2);;PNG(*.png);;TIFF(*.tif *.tiff)")); if (!fileName.isEmpty()) { mtx_right=imread(fileName.toStdString(),1); IplImage* img=new IplImage(mtx_right); QImage qimg=IplImage2QImage(img); ui->label_right->setPixmap(QPixmap::fromImage(qimg)); } if (mtx_left.size!=mtx_right.size) { QMessageBox qmb;qmb.setText("Please choose two images of the same size!");qmb.exec(); loadRight(); } }
void* VideoLoader::streamingThreadFunc(void* param) { VideoLoader* videoLoader = reinterpret_cast<VideoLoader*> (param); while (1) { pthread_mutex_lock(&videoLoader->capture_mutex); if (!videoLoader->capture) pthread_cond_wait(&videoLoader->capture_cond, &videoLoader->capture_mutex); if (videoLoader->capture) { double fps = cvGetCaptureProperty(videoLoader->capture, CV_CAP_PROP_FPS); struct timespec tf, t2, r; tf.tv_sec = 0; tf.tv_nsec = 1000000000L / fps; GET_ABSOLUTE_TIME(t2); while (videoLoader->capture) { IplImage* img = cvQueryFrame(videoLoader->capture); if (!img) { cvReleaseCapture(&videoLoader->capture); videoLoader->dispatcher->send_signal("videoloader", ABORTED, NULL); } else { videoLoader->dispatcher->send_signal("controller", NEW_IMAGE, IplImage2QImage(img)); /*send the frame*/ ADD_TIME(r, t2, tf); pthread_cond_timedwait(&videoLoader->capture_cond, &videoLoader->capture_mutex, &r); GET_ABSOLUTE_TIME(t2); } } } else { pthread_mutex_unlock(&videoLoader->capture_mutex); break; } pthread_mutex_unlock(&videoLoader->capture_mutex); } }