예제 #1
0
void Camera::put(Telecommand &tc){
	Telecommand* executable = (Telecommand*)&tc;

	if(executable->dst != 4) return;
	int16_t cmd = (int16_t) roundf(executable->message);
	continous = false;

	switch(cmd){
	case 1001:
		//PRINTF("Sending Picture\n");
		break;
	case 1002:
		break;
	case 1003:
		continous = false;
		sendPic = false;
		PRINTF("Cam active: %d\n", continous);
		break;
	case 1004:
		propertiesSent = false;
		break;
	case 1005:
		propertiesSent = false;
		takePicture();
		sendPic = true;
		PRINTF("Cam active. \n");
		break;
	default:
		PRINTF("Camera could not interpret command\n");
	}
}
예제 #2
0
AddObjectDialog::AddObjectDialog(Camera * camera, const cv::Mat & image, bool mirrorView, QWidget * parent, Qt::WindowFlags f) :
		QDialog(parent, f),
		camera_(camera),
		object_(0)
{
	ui_ = new Ui_addObjectDialog();
	ui_->setupUi(this);

	connect(ui_->pushButton_cancel, SIGNAL(clicked()), this, SLOT(cancel()));
	connect(ui_->pushButton_back, SIGNAL(clicked()), this, SLOT(back()));
	connect(ui_->pushButton_next, SIGNAL(clicked()), this, SLOT(next()));
	connect(ui_->pushButton_takePicture, SIGNAL(clicked()), this, SLOT(takePicture()));
	connect(ui_->comboBox_selection, SIGNAL(currentIndexChanged(int)), this, SLOT(changeSelectionMode()));

	connect(ui_->cameraView, SIGNAL(selectionChanged()), this, SLOT(updateNextButton()));
	connect(ui_->cameraView, SIGNAL(roiChanged(const QRect &)), this, SLOT(updateNextButton(const QRect &)));
	ui_->cameraView->setMirrorView(mirrorView);

	if((camera_ && camera_->isRunning()) || image.empty())
	{
		this->setState(kTakePicture);
	}
	else if(!image.empty())
	{
		update(image);
		this->setState(kSelectFeatures);
	}
}
예제 #3
0
void Camera::run(){
	cameraInitFinished = true;
	while(1){
		if (processData) {
			processData = false;
			uint8_t buffer[16];
			PictureProperties p;
			p.type = GRAY;
			p.Height = HEIGHT;
			p.Width = WIDTH;
			if (sendPic) { // If picture was requested, send
				state = sendingPicture;

				if(!propertiesSent){

					PRINTF("CAMERA_TX_START;%03d;%03d;%01d;PROPS;", p.Height, p.Width, p.type);
					while (!TeleUART.isWriteFinished()) {
					}
					propertiesSent = true;
				}

				uint8_t linecount = 0;
				int32_t length = 0;

				for(int i = 0; i < IMAGESIZE; i ++){

					PRINTF("%03u", DCMI_Buffer[i]);
					while (!TeleUART.isWriteFinished()) {
					}


					suspendCallerUntil(NOW() + 100*MICROSECONDS);

				}

				suspendCallerUntil(NOW() + 1*MILLISECONDS);
				TeleUART.write(";$\n", 4);

				if(!continous){
					   sendPic = false;

				}else{
					takePicture();
				}

				state = regularMode;


			}


			suspendCallerUntil(NOW()+100*MILLISECONDS); // Could run even faster but 200ms is sufficient for mission mode
		}
	}
}
예제 #4
0
void ButtonPoller::run()
{
    int buttons_fd;
    char buttons[6] = {'0', '0', '0', '0', '0', '0'};

    buttons_fd = open("/dev/buttons", 0);
    if (buttons_fd < 0) {
        perror("open device buttons");
        //exit(1);
    }

    printf("Starting button poller thread\n");
    fflush(stdout);

    for (;;) {
        char current_buttons[6];
        int count_of_changed_key;
        int i;
        if (read(buttons_fd, current_buttons, sizeof current_buttons) != sizeof current_buttons) {
            perror("read buttons:");
            //exit(1);
        }

        for (i = 0, count_of_changed_key = 0; i < sizeof buttons / sizeof buttons[0]; i++) {
            if (buttons[i] != current_buttons[i]) {
                buttons[i] = current_buttons[i];
                printf("%skey %d is %s val: %d", count_of_changed_key? ", ": "", i+1, buttons[i] == '0' ? "up" : "down", buttons[i]);
                count_of_changed_key++;
                if(buttons[i] == 48) {
                    printf("Aici buton"); fflush(stdout);
                    emit takePicture();
                }
            }
        }
        /*if (count_of_changed_key) {
            printf("\n");
            printf("bitMask: %d\n", bitMask);
            switch (bitMask) {
                case CHINA:  emit (touchChina()); blinkCountry(CHINA); ledOn(CHINA); break;
                case MEXICO: emit (touchMexico());blinkCountry(MEXICO); ledOn(MEXICO); break;
                case LEFT:   emit swipeLeft();break;
                case RIGHT:  emit swipeRight();break;
                case UP:     emit swipeUp(); break;
                case DOWN:   emit swipeDown(); break;
                default: printf("Not initalized!\n"); fflush(stdout);
            }
        }*/
    }

    close(buttons_fd);
}
예제 #5
0
void testApp::keyReleased(int key){
	if (key == 'd') debug = !debug;
	if (key == 'f') ofToggleFullscreen();
	if(key =='t'){
		ofBuffer buffer;
		buffer = ofBufferFromFile("image_bytes");
		printBitmap(100, 100, (uint8_t *)buffer.getBinaryBuffer());
	}
	if(key == 'p') printBitmap(300, 20, adalogo_data);
	
	if(key == ' '){
		takePicture();
	}
}
예제 #6
0
void CameraView::timer_fired( ofPtr< ofxGenericTimer > timer )
{
    if ( _microphone )
    {
        float power = _microphone->getAveragePowerForChannel( 0 );
        float normalizedPower = _microphone->normalizePower( power );
        _camera->setAlpha( normalizedPower );
        ofxGLogVerbose( ofxGToString( power ) + " " + ofxGToString( normalizedPower ) );
        
        if ( normalizedPower > 0.9 )
        {
            takePicture();
        }
    }
}
예제 #7
0
void MainWindow::workOnNextImage() {
    if(!pause && runCam) {
        // Try to take a picture
        bool gotPicture = takePicture();
        if(gotPicture) {
            displayMat(currentFrame, *ui->imDisplay1);
            processImage();
        }
        else {
            // We couldn't get the next picture, so stop processing
            qWarning("Stopped processing. Please choose a new file or restart the camera.");
            runCam = false;
        }
    }
}
예제 #8
0
파일: worker.cpp 프로젝트: ckondrat/paol
void Worker::workOnNextImage() {
    bool gotPicture = takePicture();
    // Update time associated with current frame
    time(&currentImageTime);
    if(gotPicture) {
        // Print image capture success
        printToLog("Took picture in thread %p at time %ld\n", this, currentImageTime);
        // Increase captured image count
        capturedImageCount++;

        processImage();
    }
    else {
        // Print image capture failure
        printToLog("Failed to take picture in thread %p at time %ld\n", this, currentImageTime);
    }
}
예제 #9
0
void FCamera::takePictures()
{
    while(keepTakingPictures())
    {
        adjust();
        // limit how frequently we do activity ...
        if(!isTakePicture())
            sleep();
        else
            takePicture();

        FCam::Event e;
        while (FCam::getNextEvent(&e, FCam::Event::Error)) {
            log(Error, this, "Error: %s\n", e.description.c_str());
        }
    }
}
예제 #10
0
void Snapshot::takePicture(const int time, const QImage& image, const QString prefix)
{
    takePicture(time, image, -1, prefix);
}
예제 #11
0
status_t BnCamera::onTransact(
    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
    switch(code) {
        case DISCONNECT: {
            LOGV("DISCONNECT");
            CHECK_INTERFACE(ICamera, data, reply);
            disconnect();
            return NO_ERROR;
        } break;
        case SET_PREVIEW_DISPLAY: {
            LOGV("SET_PREVIEW_DISPLAY");
            CHECK_INTERFACE(ICamera, data, reply);
            sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
            reply->writeInt32(setPreviewDisplay(surface));
            return NO_ERROR;
        } break;
        case SET_FRAME_CALLBACK_FLAG: {
            LOGV("SET_FRAME_CALLBACK_TYPE");
            CHECK_INTERFACE(ICamera, data, reply);
            int frame_callback_flag = data.readInt32();
            setFrameCallbackFlag(frame_callback_flag);
            return NO_ERROR;
        } break;
        case START_PREVIEW: {
            LOGV("START_PREVIEW");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(startPreview());
            return NO_ERROR;
        } break;
        case STOP_PREVIEW: {
            LOGV("STOP_PREVIEW");
            CHECK_INTERFACE(ICamera, data, reply);
            stopPreview();
            return NO_ERROR;
        } break;
        case AUTO_FOCUS: {
            LOGV("AUTO_FOCUS");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(autoFocus());
            return NO_ERROR;
        } break;
        case TAKE_PICTURE: {
            LOGV("TAKE_PICTURE");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(takePicture());
            return NO_ERROR;
        } break;
        case SET_PARAMETERS: {
            LOGV("SET_PARAMETERS");
            CHECK_INTERFACE(ICamera, data, reply);
            String8 params(data.readString8());
            reply->writeInt32(setParameters(params));
            return NO_ERROR;
         } break;
        case GET_PARAMETERS: {
            LOGV("GET_PARAMETERS");
            CHECK_INTERFACE(ICamera, data, reply);
             reply->writeString8(getParameters());
            return NO_ERROR;
         } break;
        case CONNECT: {
            CHECK_INTERFACE(ICamera, data, reply);
            sp<ICameraClient> cameraClient = interface_cast<ICameraClient>(data.readStrongBinder());
            reply->writeInt32(connect(cameraClient));
            return NO_ERROR;
        } break;
        default:
            return BBinder::onTransact(code, data, reply, flags);
    }
}
예제 #12
0
int initPositionnement(){
	pthread_t thread;
	threadArg arg;
	char message[32];
	string messageEnvoi, messageRecu;
	short angleRotationR = 0;

	// Faire l'initialisation de la caméra:
	initCamera();
	
	//Initialisation du magnétomètre:
	sem_wait(Arg.verrou_I2C);
	if (i2c_w(ADDR_MAGNETOMETRE, CALIBRATION_START, NULL) != 0){
		puts("Magnétomètre failed");
		// Arrêt du programme
		finProgramme = true;
		pthread_exit(NULL);
	}
	robot.pivotRight(20);
	sleep(20);
	if (i2c_w(ADDR_MAGNETOMETRE, CALIBRATION_END, NULL) != 0){
		puts("Magnétomètre failed");
		// Arrêt du programme
		finProgramme = true;
		pthread_exit(NULL);
	}
	if (i2c_w(ADDR_MAGNETOMETRE, CMD_A, NULL) != 0){
		puts("Magnétomètre failed");
		// Arrêt du programme
		finProgramme = true;
		pthread_exit(NULL);
	}
	sem_post(Arg.verrou_I2C);

	// Initialisation des variables indiquant l'état dans lequel on se trouve
	robotTourne = false;
	robotAvance = true;
	flagVirage = 0;
	flagStop = true;
	taskDone = false;

	// On tente de trouver la personne en activant la caméra puis en vérifiant que la personne et le robot
	// soient dans la même direction. -> Si la caméra détecte une personne on va lancer la prise de photo.
	do{
		// On regarde dans quelles positions se trouvent la personne par rapport au robot avec les magnéto.
		taskDone = false;
		pthread_create(&thread, NULL, magnetometre, (void *) &arg);
		while(taskDone != true){/* On attend la fin de ta tache */}
		pthread_join(thread, NULL);
		// Si c'est dans le thread magnéto qu'on a reçu l'ordre d'arrêt
		if (finProgramme == true){
			// Fin du thread
			pthread_exit(NULL);
		}

		// Lance la vérif avec le magnétomètre et la caméra sur 360° par palier de 30°:
		while (abs(differenceAngle) > ANGLE_LIMITE){
			sprintf(message, "A: %d", differenceAngle);
			messageEnvoi = message;
			zigbee_w(messageEnvoi);
			// Petite tempo pour laisser le temps à la beagle de la personne de faire des modifs
			usleep(100000);
			// On refait une acquisition des magnéto
			taskDone = false;
			pthread_create(&thread, NULL, magnetometre, NULL);
			while(taskDone != true){/* On attend la fin de ta tache */}
			pthread_join(thread, NULL);
			// Si c'est dans le thread magnéto qu'on a reçu l'ordre d'arrêt
			if (finProgramme == true){
				// Fin du thread
				pthread_exit(NULL);
			}
		}
		// Message indiquant que la personne et le robot sont dans la même direction
		zigbee_w("M: magneto_success");
		// On attend que le son est bien été joué
		zigbee_r(messageRecu);
		if (strcmp(messageRecu.c_str(), "need_synchro") == 0){
			string synchro = "";
			while (strcmp(synchro.c_str(), "synchro_success") != 0){
				zigbee_w("synchronize");
				zigbee_r(synchro);
			}
		}
		else if (strcmp(messageRecu.c_str(), "end_program") == 0){
			// Arrêt du programme
			finProgramme = true;
			pthread_exit(NULL);
		}

		// Mnt que le robot et la personne sont dans la même direction on va pouvoir faire une acquisition
		// pour voir si on trouve quelqu'un.
		cameraInformation = camera();
		if (cameraInformation.flagDetect == false){
			// Si on ne détecte toujours pas la personne on va alors faire pivoter le robot sur lui même de 30°
			// Et ainsi de suite jusqu'à balayer un angle de 180° permettant de vérifier la zone se trouvant à 
			// l'arrière du robot au moment où on s'est rendu compte de la perte de la personne 
			zigbee_w("M: detection_failed");
			// On attend que le son est bien été joué
			zigbee_r(messageRecu);
			if (strcmp(messageRecu.c_str(), "need_synchro") == 0){
				string synchro = "";
				while (strcmp(synchro.c_str(), "synchro_success") != 0){
					zigbee_w("synchronize");
					zigbee_r(synchro);
				}
			}
			else if (strcmp(messageRecu.c_str(), "end_program") == 0){
				// Arrêt du programme
				finProgramme = true;
				pthread_exit(NULL);
			}

			angleRotationR += 30;
			if (angleRotationR >= 360)
			{
				puts("Aucune personne n'a été détectée, arrêt du programme");
				zigbee_w("I: not_found");
				// Arrêt du programme
				finProgramme = true;
				pthread_exit(NULL);
			}
			// Modifier la direction du robot:
			robot.pivotRight(100, 30);
		}
		// Petite tempo pour laisser le temps à la personne de se replacer
		usleep(1500000);
	}while (cameraInformation.flagDetect == false);

	// Si on détecte un visage, on va supposer que c'est la personne qui veut être guidée.
	// On va lancer la prise de photo
	takePicture();

	// Initialisation des ultrasons:
	if (i2c_w(ADDR_ULTRASON_3, REGISTRE_W_RANGE, DETECTION_MAX, NULL) != 0){
		finProgramme = true;
		pthread_exit(NULL);
	}
	if (i2c_w(ADDR_ULTRASON_2, REGISTRE_W_RANGE, DETECTION_MAX, NULL) != 0){
		finProgramme = true;
		pthread_exit(NULL);
	}
	if (i2c_w(ADDR_ULTRASON_4, REGISTRE_W_RANGE, DETECTION_MAX, NULL) != 0){
		finProgramme = true;
		pthread_exit(NULL);
	}
	if (i2c_w(ADDR_ULTRASON_1, REGISTRE_W_RANGE, DETECTION_MAX, NULL) != 0){
		finProgramme = true;
		pthread_exit(NULL);
	}
	if (i2c_w(ADDR_ULTRASON_5, REGISTRE_W_RANGE, DETECTION_MAX, NULL) != 0){
		finProgramme = true;
		pthread_exit(NULL);
	}

	flagInit = true;

	return 0;
}
예제 #13
0
TakePicView::TakePicView(int calibType, int numPictures, QWidget *parent) : QDialog(parent)
{

    if (calibType == Enums::controllerEnum::INTRINSIC)
    {
        // create intrinsic calib stuff
        videoLabel = new QLabel(this);
        videoLabel->setMinimumWidth(640);
        videoLabel->setMinimumHeight(480);
        titleLabel = new QLabel("Intrinsic Calibration");
        picProgressLabel = new QLabel("");
        this->incrementSuccesses(1, numPictures);
        messageLabel = new QLabel("Messages: ");
        messages = new QLabel("");
        messages->setStyleSheet("color: red; font-weight: bold;");
        takePicButton = new QPushButton("Take Picture");
        takePicButton->setEnabled(false);
        setButtonStyle(takePicButton, false);
        cancelButton = new QPushButton("Cancel");
        setButtonStyle(cancelButton, true);

        connect(takePicButton, SIGNAL(clicked()), this, SLOT(takePicture()));
        connect(cancelButton, SIGNAL(clicked()), this, SLOT(stopVideo()));
        connect(cancelButton, SIGNAL(clicked()), this, SLOT(reject()));

        // layout code
        mainLayout = new QBoxLayout(QBoxLayout::TopToBottom);
        mainLayout->addWidget(titleLabel);
        mainLayout->addWidget(videoLabel);
        mainLayout->addWidget(picProgressLabel);
        mainLayout->addWidget(messageLabel);
        mainLayout->addWidget(messages);
        buttonLayout = new QGridLayout();
        buttonLayout->addWidget(takePicButton, 0, 0);
        buttonLayout->addWidget(cancelButton, 1, 0);
        mainLayout->addLayout(buttonLayout);
        setLayout(mainLayout);
    }
    else if (calibType == Enums::controllerEnum::EXTRINSIC)
    {
        // create extrin calib stuff
        videoLabel = new QLabel(this);
        videoLabel->setMinimumWidth(640);
        videoLabel->setMinimumHeight(480);
        titleLabel = new QLabel("Extrinsic Calibration");
        picProgressLabel = new QLabel("");
        this->incrementSuccesses(1, numPictures);
        messageLabel = new QLabel("Messages: ");
        messages = new QLabel("<b>Messages Go Here</b>");
        messages->setStyleSheet("color: red; font-weight: bold;");
        takePicButton = new QPushButton("Take Picture");
        takePicButton->setEnabled(false);
        setButtonStyle(takePicButton, false);
        cancelButton = new QPushButton("Cancel");
        setButtonStyle(cancelButton, true);

        connect(takePicButton, SIGNAL(clicked()), this, SLOT(takePicture()));
        connect(cancelButton, SIGNAL(clicked()), this, SLOT(stopVideo()));
        connect(cancelButton, SIGNAL(clicked()), this, SLOT(reject()));

        // layout code
        mainLayout = new QBoxLayout(QBoxLayout::TopToBottom);
        mainLayout->addWidget(titleLabel);
        mainLayout->addWidget(videoLabel);
        mainLayout->addWidget(picProgressLabel);
        mainLayout->addWidget(messageLabel);
        mainLayout->addWidget(messages);
        buttonLayout = new QGridLayout();
        buttonLayout->addWidget(takePicButton, 0, 0);
        buttonLayout->addWidget(cancelButton, 1, 0);
        mainLayout->addLayout(buttonLayout);
        setLayout(mainLayout);
    }
    capture.open(0);
    if (capture.isOpened())
    {
        //Disable autofocus by setting focus to current focus only
        //capture.set(CV_CAP_PROP_FOCUS, capture.get(CV_CAP_PROP_FOCUS));
        // if the Video Capture Stream is open, set button and create timer
        takePicButton->setEnabled(true);
        setButtonStyle(takePicButton, true);
        timer = new QTimer(this);
        // slot for displaying video every 20ms
        connect(timer, SIGNAL(timeout()), this, SLOT(displayVideoFrame()));
        timer->start(20);
    }
    else
    {
        videoLabel->setPixmap(QPixmap("noCamera.png"));
        takePicButton->setEnabled(false);
        setButtonStyle(takePicButton, false);
        messages->setStyleSheet("color: red; font-weight: bold;");
        messages->setText("No camera is detected! Please check your connection!");
    }
}