Example #1
0
void Eyrie::process() {
	if(recbin == NULL) {
		return;
	}
	if(GST_BUFFER_SIZE(buf) == 0) {
		endRecording();
		QVariant ret;
		QMetaObject::invokeMethod(parent(), "setStatus", Q_RETURN_ARG(QVariant, ret), Q_ARG(QVariant, "Sorry, the recording failed."));
		return;
	}
	mutex->lock();
	const float *pcm = (const float *) GST_BUFFER_DATA(buf);
	Codegen *codegen = new Codegen(pcm, GST_BUFFER_SIZE(buf) / sizeof(float), 0);
	mutex->unlock();
	std::string code = codegen->getCodeString();
	QNetworkAccessManager *networkManager = new QNetworkAccessManager();
	QUrl url("http://developer.echonest.com/api/v4/song/identify");
	QByteArray params;
	params.append("api_key=RIUKSNTIPKUMPHPEO");
	params.append("&query=[{\"metadata\":{\"version\":4.12},\"code\":\""); params.append(code.c_str()); params.append("\"}]");
	QNetworkRequest request;
	request.setHeader(QNetworkRequest::ContentTypeHeader, "application/x-www-form-urlencoded;charset=UTF-8");
	request.setUrl(url);
	connect(networkManager, SIGNAL(finished(QNetworkReply *)), this, SLOT(parseResponse(QNetworkReply *)));
	networkManager->post(request, params);
}
Example #2
0
//--------------------------------------------------------------
void ofApp::getOscData(){
    while(oscReceiver.hasWaitingMessages()){
        ofxOscMessage m;
		oscReceiver.getNextMessage(&m);
        if(m.getAddress() == "/fypRecord"){
			int record = m.getArgAsInt32(0);
            if(record == 1){
                bRecordingAvatar = true;
                startRecording();
                cout << "start record from osc" << bRecordingAvatar << endl;
            }else if( record == 0 ){
                bRecordingAvatar = false;
                endRecording();
                cout << "stop record from osc" << bRecordingAvatar << endl;

            }
        }else if( m.getAddress() == "/fypNewUser"){
            // clears for new user
            for(int i = 0; i < MAX_AVATARS; i++){
                avatars[i].resetAvatar();
            }
            totalAvatarsThisUser=0;
            currentAvatar=-1;
            recorder.q.empty();
            bRecordingAvatar = false;
            bSavingRecords = false;
            if(recorder.isThreadRunning()) recorder.stopThread();
        }
    }
}
Example #3
0
	CGEFrameRecorder::~CGEFrameRecorder()
	{
		CGE_LOG_INFO("CGEFrameRecorder::~CGEFrameRecorder");


		endRecording(false);

		if(m_recordThread != nullptr)
		{
			CGE_LOG_INFO("m_recordThread kill before...");

			bool bShoudWait = true;

			m_recordThread->run(CGEThreadPool::Work([&](void*){
				CGE_LOG_INFO("Delete offscreen context...");
				delete m_offscreenContext;
				m_offscreenContext = nullptr;
				bShoudWait = false;
			}));

			while(bShoudWait || m_recordThread->isActive())
			{
				std::this_thread::sleep_for(std::chrono::milliseconds(1));
			}

			CGE_LOG_INFO("m_recordThread kill after...");

			m_recordThread->quit();

			delete m_recordThread;
			m_recordThread = nullptr;
		}
	}
Example #4
0
void AddCamera::on_buttonBox_accepted()
{    
    QVector3D globalPosition = QVector3D(m_ui->X->text().toFloat(), m_ui->Y->text().toFloat(), m_ui->Z->text().toFloat());
    QVector2D resolution = QVector2D(m_frame.rows, m_frame.cols);

    if(!m_cameraSettings)
    {
        m_cameraSettings = std::make_shared<CameraSettings>(m_ui->name->text(), m_ui->usbId->text().toInt(),
                                              m_ui->diagonalFov->text().toFloat(), globalPosition, m_roomDims);
    }
    else
    {
        m_cameraSettings.get()->setName(m_ui->name->text());
        m_cameraSettings.get()->setVideoUsbId(m_ui->usbId->text().toInt());
        m_cameraSettings.get()->setDiagonalFov(m_ui->diagonalFov->text().toFloat());
        m_cameraSettings.get()->setGlobalPosition(globalPosition);
        m_cameraSettings.get()->setRoomDimensions(m_roomDims);
    }

    m_cameraSettings.get()->setResolution(resolution);

    if(!m_coefficients.empty())
    {
        m_cameraSettings.get()->setDistortionCoeffs(m_coefficients);
    }

    if(!m_cameraMatrix.empty())
    {
        //m_cameraSettings.setCameraMatrix(m_cameraMatrix);
    }

    if(m_tooHighValueWarning)
    {
        QMessageBox msgBox;
        msgBox.setWindowTitle(" ");
        msgBox.setText("Scene contains bright areas!");
        msgBox.setInformativeText("Do you want to ignore them in capture?");
        msgBox.setStandardButtons(QMessageBox::Yes | QMessageBox::No | QMessageBox::Cancel);
        msgBox.setDefaultButton(QMessageBox::Yes);

        int status = msgBox.exec();

        if(status == QMessageBox::Yes)
        {
            cv::Mat ex = getStructuringElement(cv::MORPH_ELLIPSE, cv::Size(20,20));
            cv::morphologyEx(m_mask, m_mask, cv::MORPH_DILATE, ex);
            m_cameraSettings->setuseRoi(true);
            m_cameraSettings->setRoiMask(255 - m_mask);
        }
        else if(status == QMessageBox::Cancel)
        {
            return;
        }
    }

    endRecording();

    accept();
}
	void VideoRecorderControls::stopRecording(void)
	{
		if(isRecording())
		{
			recordingDialog.hide();
			delete recorder;
			recorder = NULL;

			emit endRecording();
		}
	}
Example #6
0
void AddCamera::on_Play_ID_clicked(bool checked)
{
    if(checked)
    {
        bool isNumber = false;

        size_t usbId = m_ui->usbId->text().toInt(&isNumber);

        if(m_ui->usbId->text().isEmpty() || !isNumber)
        {
            QMessageBox::warning(this, "No device ID specified",
                                       "Couldn't turn on camera, because no device ID is specified");
            m_ui->Play_ID->setChecked(false);
            m_ui->usbId->setEnabled(true);
            return;
        }

        m_cameraRecording = true;
        m_videoCaptureTemp.open(usbId);

        if(! m_videoCaptureTemp.isOpened())
        {
            QMessageBox::warning(this, "Wrong device ID",
                                       "Specified device ID is wrong, camera could not be opened");

            m_ui->Play_ID->setChecked(false);
            m_ui->usbId->setEnabled(true);
            m_cameraRecording = false;
            return;
        }

        recording();
    }
    else
    {
        endRecording();
    }
}
Example #7
0
void Eyrie::parseResponse(QNetworkReply *reply) {
	QVariant ret;
	qDebug() << "Parsing network response";
	bool ok;
	QJson::Parser parser;
	QVariantMap result = parser.parse(reply->readAll(), &ok).toMap();
	QVariantMap response = result["response"].toMap();
	QVariantList songs = response["songs"].toList();
	if(songs.size() > 0) {
		endRecording();
		QVariantMap song = songs[0].toMap();
		QString artist_id = song["artist_id"].toString();
		QString artist = song["artist_name"].toString().trimmed();
		QString title = song["title"].toString().trimmed();
		qDebug() << artist << title;
		QMetaObject::invokeMethod(parent(), "setDetails", Q_RETURN_ARG(QVariant, ret), Q_ARG(QVariant, artist), Q_ARG(QVariant, title));
		QNetworkAccessManager *networkManager = new QNetworkAccessManager();
		QUrl url(QString("http://developer.echonest.com/api/v4/artist/images?api_key=RIUKSNTIPKUMPHPEO&results=1&id=") + artist_id);
		QNetworkRequest request;
		request.setUrl(url);
		connect(networkManager, SIGNAL(finished(QNetworkReply *)), this, SLOT(showImage(QNetworkReply *)));
		networkManager->get(request);
	} else {
Example #8
0
//--------------------------------------------------------------
void ofApp::keyPressed(int key){
    
    switch(key){
        case 'l':
            gui.loadFromFile("settings.xml");
            warper.load("warper.xml");
            break;
        case 's':
            gui.saveToFile("settings.xml");
            warper.save("warper.xml");
            break;
        case 'b':
            bCaptureBg = !bCaptureBg;
            break;
        case 'r':
            bRecordingAvatar = !bRecordingAvatar;
            if(bRecordingAvatar) startRecording();
            else endRecording();
            cout << "change record " << bRecordingAvatar << endl;
            break;
        case '>':
		case '.':
			//farThreshold ++;
			//if (farThreshold > 255) farThreshold = 255;
			break;
		case OF_KEY_UP:
			/*angle++;
			if(angle>30) angle=30;
			if(bUseKinect) kinect.setCameraTiltAngle(angle);*/
            avatarOffY-=5;
            for(int i = 0; i < MAX_AVATARS; i++){
                float yp = (i+1) * (ofGetHeight()/4.0);
                avatars[i].pos.set(avatarOffX,yp+avatarOffY);
            }
			break;
			
		case OF_KEY_DOWN:
			avatarOffY+=5;
            for(int i = 0; i < MAX_AVATARS; i++){
                float yp = (i+1) * (ofGetHeight()/4.0);
                avatars[i].pos.set(avatarOffX,yp+avatarOffY);
            }/*angle--;
			if(angle<-30) angle=-30;
			if(bUseKinect) kinect.setCameraTiltAngle(angle);*/
			break;
        case OF_KEY_LEFT:
            avatarOffX+=5;
            for( int i = 0; i < totalAvatarsThisUser; i++){
                avatars[i].pos.x = avatarOffX;
            }
            break;
        case OF_KEY_RIGHT:
            avatarOffX-=5;
            for( int i = 0; i < totalAvatarsThisUser; i++){
                avatars[i].pos.x = avatarOffX;
            }
            break;
        case 'x':
           if(totalAvatarsThisUser > 0){
                cout << "current Avatar " << currentAvatar << " totalAvatar " << totalAvatarsThisUser << endl;
                avatars[currentAvatar].resetAvatar();
                totalAvatarsThisUser--;
                currentAvatar--;
                recorder.q.empty();
           }
            
            break;
        case '0':
            openNextAvatarFromSaved();
            break;
        case 'f':
            ofToggleFullscreen();
            break;
        case 'g':
            bShowGui = !bShowGui;
            break;
            
    }

}
Example #9
0
void AddCamera::on_buttonBox_rejected()
{
    endRecording();
}