예제 #1
0
void imagesequence::prepareCapture()
{
  INDI_P * tempProp(NULL);

  // Do we need to select filter First??
  if (currentFilter.isEmpty() || currentFilter == i18n("None"))
   captureImage();
  else
  {
     if (!verifyFilterIntegrity())
     {
        stopSequence();
	return;
     }

     if ( stdDevFilter && ((tempProp = stdDevFilter->dp->findProp("FILTER_SLOT")) != NULL))
     {
     connect (tempProp, SIGNAL(okState()), this, SLOT(captureImage()));
     selectFilter();
     }
     else
       kdDebug() << "Error: std Filter device lost or missing FILTER_SLOT property" << endl;
  }
    
}
예제 #2
0
void CameraExample::searchAndLock()
{
    m_focusing = false;
    m_focusMessage.clear();

    if (pictureCaptured) {
        // Starting view finder again
        pictureCaptured = false;
        m_stackedWidget->setCurrentIndex(0);
        if (m_myVideoSurface) {
            showViewFinder = true;
        }
    }
    else {
        // Search and lock picture (=focus)
        if (m_camera->supportedLocks() & QCamera::LockFocus) {
            m_focusing = true;
            m_focusMessage = "Focusing...";
            m_camera->searchAndLock(QCamera::LockFocus);
        } else {
        // No focus functionality, take picture right away
            captureImage();
        }
    }
}
예제 #3
0
void Capture::executeJob(SequenceJob *job)
{
    currentCCD    = job->activeCCD;
    currentFilter = job->activeFilter;

    targetChip = job->activeChip;

    targetChip->setBatchMode(!job->preview);

    targetChip->setShowFITS(job->showFITS);

    currentCCD->setISOMode(job->isoMode);

    currentCCD->setSeqPrefix(job->prefix);

    if (job->filterPos != -1 && currentFilter != NULL)
        currentFilter->runCommand(INDI_SET_FILTER, &(job->filterPos));

    seqExpose = job->exposure;

    if (job->preview)
        seqTotalCount = -1;
    else
        seqTotalCount = job->count;

    seqDelay = job->delay;

    seqCurrentCount = 0;

    job->status = SequenceJob::JOB_BUSY;

    if (job->preview == false)
    {
        fullImgCountOUT->setText( QString::number(seqTotalCount));
        currentImgCountOUT->setText(QString::number(seqCurrentCount));

        // set the progress info
        imgProgress->setEnabled(true);
        imgProgress->setMaximum(seqTotalCount);
        imgProgress->setValue(seqCurrentCount);

        updateSequencePrefix(job->prefix);
        job->statusCell->setText(job->statusStrings[job->status]);
    }

    // Update button status
    startB->setEnabled(false);
    stopB->setEnabled(true);
    previewB->setEnabled(false);

    pi->startAnimation();

    activeJob = job;

    useGuideHead = (targetChip->getType() == ISD::CCDChip::PRIMARY_CCD) ? false : true;

    captureImage();

}
void MainWindow::redocapture()
{
    if(DHC->cameraOpened){
        captureImage("", saveCount - 1, true);
        }
    else
        QMessageBox::warning(this,tr("Warning"), tr("Open cameras failed."));
}
예제 #5
0
Camera::Camera(MainWindow * p_mainWindow) :
    m_cameraCapture(0)
{
    qDebug("Camera::Camera(MainWindow * p_mainWindow)");
    this->m_mainWindow = p_mainWindow;

    connect(&(this->m_timer), SIGNAL(timeout()), this, SLOT(captureImage()));
}
cv::Mat ImageFromDisplay::getImage()
{
    std::vector<std::uint8_t> pixels;
    int height, width, bpp;
    height = width = bpp = 0;
    captureImage(pixels, width, height);
    if (width && height) return cv::Mat(height, width, CV_8UC4, &pixels[0]);
    else return cv::Mat();
}
예제 #7
0
파일: main.c 프로젝트: jdsutton/Nachtfalter
int aimAtSideMostTarget(int side){
	int most_blob, i, center, error;
	int attempts = 0;

	do{
		captureImage();
		openImage(SOURCE_FILE_NAME);
		gaussianSmooth();
		isolateRed();
		findObjects();
		if(numBlobs <= 0)
		{
			//printf("Couldn't find any targets =(\n");
			return(0);
		}

		most_blob = 0;
		if (side)
		{
			for(i = 1; i < numBlobs; i++)
			{
				if (blobs[most_blob].rightX < blobs[i].rightX && blobs[i].rightX - blobs[i].leftX > MIN_BLOB_WIDTH)
				{
					most_blob = i;
				}
			}
		}
		else
		{
			for(i = 1; i < numBlobs; i++)
			{
				if (blobs[most_blob].leftX > blobs[i].leftX && blobs[i].rightX - blobs[i].leftX > MIN_BLOB_WIDTH)
				{
					most_blob = i;
				}
			}
		}
		if(blobs[most_blob].rightX - blobs[most_blob].leftX >= MIN_BLOB_WIDTH)
		{
			//printf("Found target:\n");
			//printf("   Width: %d\n", blobs[most_blob].rightX-blobs[most_blob].leftX);
			center = (blobs[most_blob].rightX-blobs[most_blob].leftX)/2 + blobs[most_blob].leftX;
			//printf("   Center: %d\n", center);
			error = center - IMAGE_CENTER;
			if(abs(error) <= MAX_TARGET_ERROR)
			{
				break;
			}
			aimAtTarget(error);
		}
		attempts++;
	}
	while (abs(error) > MAX_TARGET_ERROR && attempts < MAX_AIM_ATTEMPTS);	

	return(1);
}
예제 #8
0
void FlyCamera::startAutoCapture(){
    capturing = true;
    qDebug() << "Starting autoCapture";
	getCamera()->StartCapture();
    while(capturing){
        QImage image = captureImage();
		AbstractCamera::sendFrame(image);
    }
    qDebug() << "Stoped autoCapture !";
}
예제 #9
0
void approachTrack()
{
	PrintTextOnPobTerminal("Approach Track Task\n");
	int d=-1;	
	while(d!=IDP_0_CROSS)
	{	
		captureImage();
		decipherImage(2);
		d=getCanCross(2);
		moveForward(1);
	}
}
예제 #10
0
QImage FlyCamera::retrieveImage()
{
    TriggerMode triggerMode;
    TriggerMode oldTrigger;

    cam->GetTriggerMode(&oldTrigger);
    triggerMode.onOff = false;
    cam->SetTriggerMode(&triggerMode);

    getCamera()->StartCapture();
    QImage image = captureImage();
    cam->SetTriggerMode(&oldTrigger);
	getCamera()->StopCapture();  
	return image;
}
예제 #11
0
/** Displays images from the Player camera proxy.

 */
void Camera::displayHaar(char* windowName){

	cvNamedWindow(windowName,0);

	// Esc breaks loop.  Highgui is responding strangly to close window button
	while ( cvWaitKey(33) != 27 ){

		frame = captureImage();
		detectAndDrawHaar(frame, 1.3);
		cvShowImage(windowName, frame);
		cvReleaseImage(&frame);
	}

	cvDestroyWindow(windowName);
	return;
}
void MainWindow::capturecalib()
{
    if(DHC->cameraOpened){
        captureImage("", saveCount, true);
        ui->currentPhotoLabel->setText(QString::number(saveCount));
        saveCount++;
        QString explain = ":/" + QString::number(saveCount) + ".png";
        ui->explainLabel->setPixmap(explain);
        if(saveCount == CALIBIMGNUM+1){
            saveCount = 1;
            ui->calibButton->setEnabled(true);
        }
    }
    else
        QMessageBox::warning(this, tr("Warning"), tr("Open cameras failed."));
}
예제 #13
0
/** Displays images from the Player camera proxy.

 */
void Camera::displayStream(char* windowName){

	cvNamedWindow(windowName,0);

	// Esc breaks loop.  Highgui is responding strangly to close window button
	while (1){
		char c = cvWaitKey(33);
		frame = captureImage();
		cvShowImage(windowName, frame);
		cvReleaseImage(&frame);
		if( c == 27) break;
	}

	cvDestroyWindow(windowName);
	return;
}
예제 #14
0
void Camera::displayTetris(char* windowName){

    // create memory storage that will contain all the dynamic data
   	storage = cvCreateMemStorage(0);

	cvNamedWindow(windowName,0);
	// Esc breaks loop.  Highgui is responding strangly to close window button
	while ( cvWaitKey(33) != 27 ){
		frame = captureImage();
        drawTetris(frame,findTetris(frame, storage));
		cvShowImage(windowName, frame);
		cvReleaseImage(&frame);
	}
	cvDestroyWindow(windowName);
	return;
}
예제 #15
0
void imagesequence::selectFilter()
{

  INDI_P * filterProp(NULL);
  INDI_E * filterElem(NULL);
  INDI_D * filterDev(NULL);
  INDIMenu *devMenu = ksw->getINDIMenu();

  // Let's select a new filter in acoord with the settings
  // We need to take into consideration the following conditions:
  // A. The device has been disconnected.
  // B. The device has been lost.
  // C. The property is still busy.
  // D. The property has been lost.
  
  // We have a filter, let's check if it's valid
  if (!verifyFilterIntegrity())
   return;

  filterDev = devMenu->findDeviceByLabel(currentFilter);
  filterProp = filterDev->findProp("FILTER_SLOT");
  filterElem = filterProp->findElement("SLOT");

  // Do we need to change the filter position??
  if (filterPosCombo->currentItem() == filterElem->read_w->text().toInt())
  {
	captureImage();
	return;
  }

  if (filterProp->perm == PP_RW || filterProp->perm == PP_WO)
  {
    filterElem->targetValue = filterPosCombo->currentItem();
    if (filterElem->spin_w)
    {
      filterElem->spin_w->setValue(filterElem->targetValue);
      filterElem->spinChanged(filterElem->targetValue);
    }
    else
     filterElem->write_w->setText(QString("%1").arg(filterElem->targetValue));
      
    // We're done! Send it to the driver
    filterProp->newText();
  }

}
예제 #16
0
ControlWidget::ControlWidget(QWidget *parent) :
    QWidget(parent),
    ui(new Ui::ContolWidget)
{
    ui->setupUi(this);

    bool isConnected = false; Q_UNUSED(isConnected);
    //isConnected = connect(ui->buttonConnect, SIGNAL(clicked(bool)), this, SIGNAL(captureImage()));            Q_ASSERT(isConnected);
    //isConnected = connect(ui->buttonContiniousMode, SIGNAL(clicked()), this, SLOT(toggleContiniousMode())); Q_ASSERT(isConnected);
    isConnected = connect(ui->buttonConnect, SIGNAL(clicked()), this, SLOT(connectCamera()));             Q_ASSERT(isConnected);
    isConnected = connect(ui->buttonConnect, SIGNAL(clicked()), this, SLOT(startMonitoring()));             Q_ASSERT(isConnected);
    isConnected = connect(&shootTimer, SIGNAL(timeout()), this, SIGNAL(captureImage()));                    Q_ASSERT(isConnected);
    isConnected = connect(ui->triggerwidget, SIGNAL(triggered()), this, SIGNAL(triggered())); Q_ASSERT(isConnected);

    // Defaults
    int index = ui->comboBoxRefreshRate->findText("0.3", static_cast<Qt::MatchFlags>( Qt::MatchExactly));
    ui->comboBoxRefreshRate->setCurrentIndex(index);
}
예제 #17
0
void MainWindow::connectCamera()
{
    if(Connect->text()=="Connect"){
        qDebug() << "Connect";
        if(!camera_thread->isRunning()){
            camera_thread->start(QThread::HighPriority);
            connect(mylabel,SIGNAL(click_event()),camera_thread,SLOT(captureImage()));
        }
        camera_thread->connect(camera_thread,SIGNAL(send_UIupdate(QImage)),this,SLOT(updateUI(QImage)));
        Connect->setText("Disconnect");
        Connect->setStatusTip("Disconnect from the default camera");
    } else {
        camera_thread->disconnect(camera_thread,SIGNAL(send_UIupdate(QImage)),this,SLOT(updateUI(QImage)));
        qDebug() << "Disconnect";
        mylabel->setText("<font size='50' color='gray'>Not connected to camera</font>");
        Connect->setText("Connect");
        Connect->setStatusTip("Connect to the default camera");
    }
}
예제 #18
0
void CameraExample::lockStatusChanged(QCamera::LockStatus status, QCamera::LockChangeReason reason)
{
    if (status == QCamera::Locked) {
        if (reason == QCamera::LockAcquired) {
            // Focus locked
            m_focusMessage.clear();
            m_focusing = false;
            // Capture new image
            captureImage();
            // Unlock camera
            m_camera->unlock();
        } else {
            if (m_focusing)
                m_focusMessage = "No focus, try again";
        }
    } else if (status == QCamera::Unlocked && m_focusing) {
        m_focusMessage = "No focus, try again";
    }
}
예제 #19
0
static void draw( void )
{
    double    gl_para[16];
    GLfloat   mat_ambient[]     = {0.0, 0.0, 1.0, 1.0};
    GLfloat   mat_flash[]       = {0.0, 0.0, 1.0, 1.0};
    GLfloat   mat_flash_shiny[] = {50.0};
    GLfloat   light_position[]  = {100.0,-200.0,200.0,0.0};
    GLfloat   ambi[]            = {0.1, 0.1, 0.1, 0.1};
    GLfloat   lightZeroColor[]  = {0.9, 0.9, 0.9, 0.1};

   
    argDrawMode3D();
    argDraw3dCamera( 0, 0 );
    glClearDepth( 1.0 );
    glClear(GL_DEPTH_BUFFER_BIT);
    glEnable(GL_DEPTH_TEST);
    glDepthFunc(GL_LEQUAL);
    
	/* load the camera transformation matrix */
    argConvGlpara(patt_trans, gl_para);
    glMatrixMode(GL_MODELVIEW);
    glLoadMatrixd( gl_para );

    glEnable(GL_LIGHTING);
    glEnable(GL_LIGHT0);
    glLightfv(GL_LIGHT0, GL_POSITION, light_position);
    glLightfv(GL_LIGHT0, GL_AMBIENT, ambi);
    glLightfv(GL_LIGHT0, GL_DIFFUSE, lightZeroColor);
    glMaterialfv(GL_FRONT, GL_SPECULAR, mat_flash);
    glMaterialfv(GL_FRONT, GL_SHININESS, mat_flash_shiny);	
    glMaterialfv(GL_FRONT, GL_AMBIENT, mat_ambient);
    glMatrixMode(GL_MODELVIEW);
    glTranslatef( 0.0, 0.0, 25.0 );
    glutSolidCube(50.0);

    glDisable( GL_LIGHTING );

    glDisable( GL_DEPTH_TEST );

	captureImage();

}
예제 #20
0
MainWindow::MainWindow(QWidget *parent) :
    QMainWindow(parent),
    ui(new Ui::MainWindow)
{
    ui->setupUi(this);

    camera=new QCamera(this);
    viewfinder=new QCameraViewfinder(this);
    imageCapture=new QCameraImageCapture(camera);

    ui->horizontalLayout_View->addWidget(viewfinder);
    ui->label_Display->setScaledContents(true);

    camera->setViewfinder(viewfinder);
    camera->start();

    connect(imageCapture, SIGNAL(imageCaptured(int,QImage)), this, SLOT(displayImage(int,QImage)));

    connect(ui->pushButton_Capture, SIGNAL(clicked()), this, SLOT(captureImage()));
    connect(ui->pushButton_Save, SIGNAL(clicked()), this, SLOT(saveImage()));
    connect(ui->pushButton_Quit, SIGNAL(clicked()), qApp, SLOT(quit()));
}
예제 #21
0
파일: streamwg.cpp 프로젝트: KDE/kstars
StreamWG::StreamWG(QWidget * parent) : QWidget(parent)
{

    setupUi(this);
    streamWidth    = streamHeight = -1;
    processStream  = colorFrame = false;

    streamFrame      = new VideoWG(videoFrame);

    playPix    = QIcon::fromTheme( "media-playback-start", QIcon(":/icons/breeze/default/media-playback-start.svg"));
    pausePix   = QIcon::fromTheme( "media-playback-pause", QIcon(":/icons/breeze/default/media-playback-pause.svg"));
    capturePix = QIcon::fromTheme( "media-record", QIcon(":/icons/breeze/default/media-record.svg"));

    foreach (const QByteArray &format, QImageWriter::supportedImageFormats())
    imgFormatCombo->addItem(QString(format));

    playB->setIcon(pausePix);
    captureB->setIcon(capturePix);

    connect(playB, SIGNAL(clicked()), this, SLOT(playPressed()));
    connect(captureB, SIGNAL(clicked()), this, SLOT(captureImage()));
}
예제 #22
0
/** Displays images from the Player camera proxy.

 */
void Camera::captureStream(char* windowName){

	cvNamedWindow(windowName,0);
	int count=0;
	char imageName[] = "image000.bmp";
	// Esc breaks loop.  Highgui is responding strangly to close window button
	while ( 1 ){

		frame = captureImage();
	    char c = cvWaitKey(33);
		if(c == 112){
			IplImage* tmpImg;
			// Clone frame as destination for saving needs
			// to be the exact same size as source.
			// Perhaps there is a less expensive way?
			// Like just allocating the space. Make that a to do.
			tmpImg = cvCloneImage(frame);
			cvCvtColor(frame, tmpImg, CV_BGR2RGB);
			//name += 2*11;
			imageName[7] = (char)(count % 10+48);
			if(count >= 10 )
				imageName[6] =(char)(count / 10+48);
			if(count >= 100 )
				imageName[5] =(char)(count / 100+48);
			count++;
			if(cvSaveImage(imageName, frame) == 0){
				printf("Error saving image");
			}
			printf("Saved %s\n", imageName);
		}
		cvShowImage(windowName, frame);
		cvReleaseImage(&frame);
		if(c == 27) break;
	}

	cvDestroyWindow(windowName);
	return;
}
예제 #23
0
파일: main.cpp 프로젝트: MrGobblez/Catch21
int main()
{
    // Objects
    CameraInput *camera = new CameraInput();
    Control *controller = new Control();
    Process *processer = new Process();
    Tracking *tracker = new Tracking();
    Serial_Communication *serial = new Serial_Communication("/dev/ttyUSB0", "/dev/ttyUSB1");
//    Serial_Communication *serial = new Serial_Communication("/dev/ttyUSB0");// #### For testing with only one arduino
    File_Handler *file_Handler = new File_Handler();
    Window_Handler *window_Handler = new Window_Handler();
    Menu *menu = new Menu();


    // Threads
    QThread *t1 = new QThread;
    QThread *t2 = new QThread;
    QThread *t3 = new QThread;
    QThread *t4 = new QThread;
    QThread *t5 = new QThread;

    camera->moveToThread(t1);
    processer->moveToThread(t2);
    tracker->moveToThread(t3);
    serial->moveToThread(t3);
    controller->moveToThread(t4);
    file_Handler->moveToThread(t5);

    // Connect signals to slots. Whenever a signal is emitted in a function, its corresponding (connected) function will run.
    qRegisterMetaType<cv::Mat>("cv::Mat");

    //Signals calling from:
    //Main thread
    QObject::connect(menu, SIGNAL(startRecording(bool)), controller, SLOT(startRecording(bool)));
    QObject::connect(menu, SIGNAL(stopRecording()), controller, SLOT(stopRecording()));
    QObject::connect(menu, SIGNAL(displayMenu(cv::Mat)), window_Handler, SLOT(drawImage(cv::Mat)));
    QObject::connect(menu, SIGNAL(requestDataFromFootController()), serial, SLOT(receiveDataFromFootControllerLoop()));
    QObject::connect(menu, SIGNAL(startHighRep()), controller, SLOT(startDelayMode()));
    QObject::connect(menu, SIGNAL(decreaseDelay()), controller, SLOT(decreaseDelay()));
    QObject::connect(menu, SIGNAL(increaseDelay()), controller, SLOT(increaseDelay()));
    QObject::connect(menu, SIGNAL(modeSwitch()), controller, SLOT(endMode()));
    QObject::connect(menu, SIGNAL(startPlayback()), file_Handler, SLOT(readFromFile()));
    QObject::connect(menu, SIGNAL(stopPlayback()), file_Handler, SLOT(stopVideo()));
    QObject::connect(menu, SIGNAL(toggleSlowMotion()), file_Handler, SLOT(toggleSlowMotion()));
    QObject::connect(menu, SIGNAL(toggleTracking()), controller, SLOT(toggleTracking()));

    //Thread 1
    QObject::connect(t1, SIGNAL(started()), camera, SLOT(captureImage()));
    QObject::connect(camera, SIGNAL(capturedImage(cv::Mat)), controller, SLOT(inputImage(cv::Mat)));

    //Thread 2
    QObject::connect(t2, SIGNAL(started()), controller, SLOT(processerReady()));
    QObject::connect(processer, SIGNAL(posXposY(int,int)), tracker, SLOT(position(int,int)));
    QObject::connect(processer, SIGNAL(readyForWork()), controller, SLOT(processerReady()));

    //Thread 3
    QObject::connect(tracker, SIGNAL(directionAndSpeed(int,int)), serial, SLOT(sendDataToControlUnit(int,int)));
    QObject::connect(serial, SIGNAL(fromFootController(char)), menu, SLOT(giveInput(char)));

    //Thread 4
    QObject::connect(t4, SIGNAL(started()), controller, SLOT(fileHandlerReadyToWrite()));
    QObject::connect(controller, SIGNAL(imageToProcess(cv::Mat)), processer, SLOT(processImage(cv::Mat)));
    QObject::connect(controller, SIGNAL(requestImage()), camera, SLOT(captureImage()));
    QObject::connect(controller, SIGNAL(imageToRecord(cv::Mat)), file_Handler, SLOT(writeImage(cv::Mat)));
//    QObject::connect(controller, SIGNAL(imageToShow(cv::Mat)), window_Handler, SLOT(drawImage(cv::Mat)));
    QObject::connect(processer, SIGNAL(processedImage(cv::Mat)), window_Handler, SLOT(drawImage(cv::Mat)));
    QObject::connect(controller, SIGNAL(stopMotor()), serial, SLOT(stopMotor()));

    //Thread 5
    QObject::connect(file_Handler, SIGNAL(showFrame(cv::Mat)), window_Handler, SLOT(drawImage(cv::Mat)));
    QObject::connect(file_Handler, SIGNAL(readyToWrite()), controller, SLOT(fileHandlerReadyToWrite()));
    QObject::connect(file_Handler, SIGNAL(timeout()), file_Handler, SLOT(playVideo()));
    QObject::connect(file_Handler, SIGNAL(playbackEnded()), menu, SLOT(returnToLowRep()));

    // Starting Threads
    t1->start();
    t2->start();
    t3->start();
    t4->start();
    t5->start();

//    menu->menu();
    menu->inputHandler();
    return 0;
}
예제 #24
0
int main(int argc, const char * argv[])
{
    
    ft_data ftdata;
    if (argc<3) {
        cout<<argv[0]<<" user_profile_dir camera_profile.yaml";
        return 0;
    }

    fs::path baseDirPath(argv[1]);
    ASM_Gaze_Tracker poseTracker(baseDirPath / "trackermodel.yaml", fs::path(argv[2]));
    
    
    vector<Point3f> faceCrdRefVecs;
    faceCrdRefVecs.push_back(Point3f(0,0,0));
    faceCrdRefVecs.push_back(Point3f(50,0,0));
    faceCrdRefVecs.push_back(Point3f(0,50,0));
    faceCrdRefVecs.push_back(Point3f(0,0,50));
    
    VideoCapture cam;
    cam.open(0);
    if(!cam.isOpened()){
        return 0;
    }
    Mat rvec, tvec;
    Mat im;
    captureImage(cam,im);
    

    while(true){
        bool success = captureImage(cam, im, true);
        if (success == false) {
            break;
        }
        
        bool succeeded = poseTracker.featureTracking(im);
        if (succeeded)
            poseTracker.estimateFacePose();

        
        
        
        Mat frontim,flipback;
        flip(im,flipback,1);
        
        vector<Point2f> reprjCrdRefPts;
        vector<Point2f> reprjFeaturePts;
        poseTracker.projectPoints(poseTracker.facialPointsIn3D, reprjFeaturePts);
        poseTracker.projectPoints(faceCrdRefVecs, reprjCrdRefPts);
        line(im, reprjCrdRefPts[0], reprjCrdRefPts[1], Scalar(255,0,0),2);
        line(im, reprjCrdRefPts[0], reprjCrdRefPts[2], Scalar(0,255,0),2);
        line(im, reprjCrdRefPts[0], reprjCrdRefPts[3], Scalar(0,0,255),2);
        drawPoints(im, reprjFeaturePts);
        drawStringAtTopLeftCorner(im, "distance to camera:" + boost::lexical_cast<string>(poseTracker.distanceToCamera()));
        imshow("head pose",im);
        
        vector<Point2f> transformedPoints = poseTracker.tracker.points;
        fliplr(transformedPoints, im.size());
        Mat part;
        
        Mat hM = findHomography(poseTracker.facialPointsIn2D ,transformedPoints, 0);
        warpPerspective(flipback(boundingRect(transformedPoints)), frontim, hM, im.size());
        imshow("front", im);

        
        int c = waitKey(1)%256;
        if(c == 'q')break;
        
    }
    
}
예제 #25
0
/* CAPTURE IMG TASKS */
void captureImgLeft()
{
	PrintTextOnPobTerminal("Capture Image Left Task\n");
	captureImage();
	decipherImage(0);
}
예제 #26
0
void captureImgRight()
{
	PrintTextOnPobTerminal("Capture Image Right Task\n");
	captureImage();
	decipherImage(4);
}
void MainWindow::startscan()
{
    if (scanSN < 0)
    {
        if (QMessageBox::warning(this,tr("Mark Point Need to be Found"), tr("Scan result can't be aligned,continue?")
                                 ,QMessageBox::Yes,QMessageBox::No) == QMessageBox::No)
            return;
        else
            scanSN = 0;
    }
    ui->progressBar->reset();
    nowProgress = 0;

    DHC->closeCamera();
    timer->stop();
    pj->displaySwitch(false);
    pj->opencvWindow();
    if (codePatternUsed == GRAY_ONLY){
        grayCode = new GrayCodes(scanWidth,scanHeight,false);
        grayCode->generateGrays();
        pj->showMatImg(grayCode->grayCodes[0]);
    }
    else if (codePatternUsed == GRAY_EPI){
        grayCode = new GrayCodes(scanWidth,scanHeight,true);
        grayCode->generateGrays();
        pj->showMatImg(grayCode->grayCodes[0]);
    }
    else{
        mf = new MultiFrequency(this, scanWidth, scanHeight);
        mf->generateMutiFreq();
        pj->showMatImg(mf->getNextMultiFreq());
    }
    progressPop(6);

    int imgCount = 0;

    QString pref = QString::number(scanSN) + "/";
    QDir *addpath_1 = new QDir;
    QDir *addpath_2 = new QDir;
    addpath_1->mkpath(projChildPath + "/left/" + pref);
    addpath_2->mkpath(projChildPath +"/right/" + pref);

    while(true){
        cv::waitKey(100);
        DHC->daHengSnapShot(1);
        image_1 = QImage(DHC->m_pRawBuffer_1, cameraWidth, cameraHeight, QImage::Format_Indexed8);
        pimage_1 = QPixmap::fromImage(image_1);
        DHC->daHengSnapShot(2);
        image_2 = QImage(DHC->m_pRawBuffer_2, cameraWidth, cameraHeight, QImage::Format_Indexed8);
        pimage_2 = QPixmap::fromImage(image_2);

        captureImage(pref, imgCount, false);
        imgCount++;

        if (codePatternUsed == GRAY_ONLY){
            if(imgCount == grayCode->getNumOfImgs())
                break;
            pj->showMatImg(grayCode->grayCodes[imgCount]);
            progressPop(2);
        }
        else if (codePatternUsed == GRAY_EPI){
            if(imgCount == grayCode->getNumOfImgs())
                break;
            pj->showMatImg(grayCode->grayCodes[imgCount]);
            progressPop(4);
        }
        else{
            if(imgCount == mf->getNumOfImgs())
                break;
            pj->showMatImg(mf->getNextMultiFreq());
            progressPop(7);
        }
    }
    DHC->openDaHengCamera(cameraWidth,cameraHeight);
    timer->start();
    pj->destoryWindow();
    pj->displaySwitch(true);

    ui->progressBar->setValue(100);
}
예제 #28
0
void captureImgLeftCenter()
{
	PrintTextOnPobTerminal("Capture Image Left Center Task\n");
	captureImage();
	decipherImage(1);
}
예제 #29
0
void captureImgRightCenter()
{
	PrintTextOnPobTerminal("Capture Image Right Center Task\n");
	captureImage();
	decipherImage(3);
}
예제 #30
0
// test pushing to github by xcode
int main(int argc, const char * argv[])
{
    
    ft_data ftdata;
    if (argc<3) {
        cout<<argv[0]<<" user_profile_dir camera_profile.yaml";
        return 0;
    }
//    string fname =string(argv[1]);
//    ftdata = load_ft_jzp(fname);
//    face_tracker tracker = load_ft<face_tracker>(string(ftdata.baseDir+"trackermodel.yaml").c_str());
//    tracker.detector.baseDir = ftdata.baseDir;
//    
//    Mat cameraMatrix, distCoeffs;
//    readCameraProfile(fs::path(argv[2]), cameraMatrix, distCoeffs);
    
    fs::path baseDirPath(argv[1]);
    ASM_Gaze_Tracker poseTracker(baseDirPath / "trackermodel.yaml", fs::path(argv[2]));
    
    
    
//    vector<Point3f> faceFeatures ; //findBestFrontalFaceShape(smodel);
    vector<Point3f> faceCrdRefVecs;
    faceCrdRefVecs.push_back(Point3f(0,0,0));
    faceCrdRefVecs.push_back(Point3f(50,0,0));
    faceCrdRefVecs.push_back(Point3f(0,50,0));
    faceCrdRefVecs.push_back(Point3f(0,0,50));
    
//    vector<Point2f> frontPerspective2D = findBestFrontalFaceShape2D(ftdata);
    
    VideoCapture cam;
    cam.open(0);
    if(!cam.isOpened()){
        return 0;
    }
    Mat rvec, tvec;
    Mat im;
    captureImage(cam,im);
    

    while(true){
        bool success = captureImage(cam, im, true);
        if (success == false) {
            break;
        }
        
        bool succeeded = poseTracker.featureTracking(im);
        if (succeeded)
            poseTracker.estimateFacePose();

//        Mat hM = findHomography(featuresTruPts ,frontPerspective2D, 0);
//        Mat frontim;
//        Mat gray;
//        warpPerspective(im.clone(), frontim, hM, im.size());
//        imshow("front", frontim);
        
        
        
        
        vector<Point2f> reprjCrdRefPts;

        poseTracker.projectPoints(faceCrdRefVecs, reprjCrdRefPts);
        line(im, reprjCrdRefPts[0], reprjCrdRefPts[1], Scalar(255,0,0),2);
        line(im, reprjCrdRefPts[0], reprjCrdRefPts[2], Scalar(0,255,0),2);
        line(im, reprjCrdRefPts[0], reprjCrdRefPts[3], Scalar(0,0,255),2);
        drawStringAtTopLeftCorner(im, "distance to camera:" + boost::lexical_cast<string>(poseTracker.distanceToCamera()));
        imshow("head pose",im);
        
        int c = waitKey(1);
        if(c == 'q')break;
        else if(c == 'd') poseTracker.reDetectFace();
        
    }
    
}