int main(int argc, char *argv[]) { QCoreApplication::addLibraryPath("app/native/plugins"); QApplication app(argc, argv); SimondConnector connector; QMLSimoneView view; QObject::connect(&view, SIGNAL(connectToServer()), &connector, SLOT(connectToServer())); QObject::connect(&view, SIGNAL(disconnectFromServer()), &connector, SLOT(disconnectFromServer())); QObject::connect(&view, SIGNAL(startRecording()), &connector, SLOT(startRecording())); QObject::connect(&view, SIGNAL(commitRecording()), &connector, SLOT(commitRecording())); QObject::connect(&view, SIGNAL(configurationChanged()), &connector, SLOT(configurationChanged())); QObject::connect(&connector, SIGNAL(connectionState(ConnectionState)), &view, SLOT(displayConnectionState(ConnectionState))); QObject::connect(&connector, SIGNAL(status(QString)), &view, SLOT(displayStatus(QString))); QObject::connect(&connector, SIGNAL(error(QString)), &view, SLOT(displayError(QString))); QObject::connect(&connector, SIGNAL(listening()), &view, SLOT(displayListening())); QObject::connect(&connector, SIGNAL(recognizing()), &view, SLOT(displayRecognizing())); QObject::connect(&connector, SIGNAL(microphoneLevel(int,int,int)), &view, SLOT(displayMicrophoneLevel(int,int,int))); QObject::connect(&connector, SIGNAL(recognized(RecognitionResultList)), &view, SLOT(recognized(RecognitionResultList))); view.show(); connector.init(); return app.exec(); }
void * getData() { char buffer[1024]; for (;;) { if (iflag == 0) continue; int readIn = read(STDIN_FILENO, buffer, 1024); if (readIn <= -1) exit(1); else if (readIn >= 1) { int i = 0; int given = 0; while (i < readIn) { if (buffer[i] == '\n') { buffer[i] = 0; if (strcmp(buffer + given, "OFF") == 0) shutDown(); else if (strncmp(buffer + given, "PERIOD=", strlen("PERIOD=")) == 0) pConverter(buffer + given); else if (strcmp(buffer + given, "SCALE=F") == 0) scaleHandler('F'); else if (strcmp(buffer + given, "SCALE=C") == 0) scaleHandler('C'); else if (strcmp(buffer + given, "STOP") == 0) startRecording(0); else if (strcmp(buffer + given, "START") == 0) startRecording(1); else exit(1); given = i + 1; } i++; } } else exit(1); } exit(0); }
void DccVoiceThread::run() { #ifndef COMPILE_DISABLE_DCC_VOICE for(;;) { // m_uSleepTime = 0; // Dequeue events while(KviThreadEvent * e = dequeueEvent()) { if(e->id() == KVI_THREAD_EVENT_TERMINATE) { delete e; goto exit_dcc; } else if(e->id() == KVI_DCC_THREAD_EVENT_ACTION) { int * act = ((KviThreadDataEvent<int> *)e)->getData(); if(*act)startRecording(); else stopRecording(); delete act; delete e; } else { // Other events are senseless to us delete e; } } if(!readWriteStep())goto exit_dcc; if(!soundStep())goto exit_dcc; m_pInfoMutex->lock(); m_iInputBufferSize = m_inSignalBuffer.size(); m_iOutputBufferSize = (m_outFrameBuffer.size() / m_pOpt->pCodec->encodedFrameSize()) * m_pOpt->pCodec->decodedFrameSize(); m_pInfoMutex->unlock(); // Actually the maximum that we can sleep here is // around 500 usecs... = 0.0005 sec -> 8 bytes at 8 KHz // if(m_uSleepTime)usleep(m_uSleepTime); // Start recording if the request was not fulfilled yet if(m_bRecordingRequestPending)startRecording(); } exit_dcc: #endif //! COMPILE_DISABLE_DCC_VOICE closeSoundcard(); kvi_socket_close(m_fd); m_fd = KVI_INVALID_SOCKET; }
void LTWindow::timeout() { if (recording) eventRecorder->timeout(); if (eventPlayer->isPlaying()) eventPlayer->timeout(); ui->eventStatusWidget->updateEventStatus(); // if (driverTrackerWidget->isVisible()) // driverTrackerWidget->update(); //during quali timer is stopped when we have red flag if (eventData.isSessionStarted()) { if (!playing && !recording && settings->value("ui/auto_record").toBool() && !eventRecorder->isSessionRecorded()) startRecording(true); } if (!recording && !playing && !eventData.isSessionStarted()) { sessionTimer->stop(); driverTrackerWidget->stopTimer(); } }
void testApp::keyReleased(int key) { switch(key) { case 'q': if(!record) playback = !playback; if(record) startRecording(); else stopRecording(); break; case 'r': if(!playback) record = !record; if(playback) startPlayback(); else stopPlayback(); break; case 'p': paused = !paused; break; case keyUp: angle += 5; } }
KeySequenceWidget::KeySequenceWidget(QWidget *parent) : QWidget(parent), _shortcutsModel(0), _isRecording(false), _modifierKeys(0) { QHBoxLayout *layout = new QHBoxLayout(this); layout->setMargin(0); _keyButton = new KeySequenceButton(this, this); _keyButton->setFocusPolicy(Qt::StrongFocus); _keyButton->setIcon(SmallIcon("configure")); _keyButton->setToolTip(tr("Click on the button, then enter the shortcut like you would in the program.\nExample for Ctrl+a: hold the Ctrl key and press a.")); layout->addWidget(_keyButton); _clearButton = new QToolButton(this); layout->addWidget(_clearButton); if (qApp->isLeftToRight()) _clearButton->setIcon(SmallIcon("edit-clear-locationbar-rtl")); else _clearButton->setIcon(SmallIcon("edit-clear-locationbar-ltr")); setLayout(layout); connect(_keyButton, SIGNAL(clicked()), SLOT(startRecording())); connect(_keyButton, SIGNAL(clicked()), SIGNAL(clicked())); connect(_clearButton, SIGNAL(clicked()), SLOT(clear())); connect(_clearButton, SIGNAL(clicked()), SIGNAL(clicked())); }
DWORD WINAPI startOddcast(LPVOID lpParameter) { initializeencoder(); if (connectToServer()) { setCurrentSongTitle(""); UpdateWhatsPlaying(); } else { MessageBox(NULL, "Unable to Connect", NULL, MB_OK); return(0); } if (getLiveRecordingSetFlag()) { if (!initLiveRecording()) { MessageBox(NULL, "Fail to open input device", NULL, MB_OK); disconnectFromServer(); return 0; } else { char buf[255] = ""; sprintf(buf, "%dHz/Stereo PCM", getLiveInSamplerate()); setSourceDescription(buf); int songChange = setCurrentSongTitle(getServerDesc()); startRecording(); } } return 1; }
/*-----------------Обработка полученного с компьютера сообщения--------------*/ void onRF_MessageReceived(){ switch(rf_rx_buf[0]){ case 0xFF: //stop recording command stopRecording(); rf_send(stopRecordingResponceMessage,5); break; case 0xFE: //start recording command startRecording(); break; case 0xFD: //hello command rf_send(helloMsg,5); break; case 0xFC: //версия прошивки rf_send(firmwareVersion,7); break; case 0xFB: //ping command pingCntr = 0; break; case 0xFA: //hardware config request rf_send(hardwareConfigMessage,7); break; default: if((rf_rx_buf_size < rf_rx_buf[0]) && (rf_rx_buf[0] < 0xFA)){//проверяем длину однобайтовой команды rf_send(unknownOneByteCmdError,7); } //проверяем два последних байта == маркер конца пакета if(((rf_rx_buf[rf_rx_buf[0]-1] == 0x55) && (rf_rx_buf[rf_rx_buf[0]-2] == 0x55))){ onRF_MultiByteMessage(); }else{ rf_send(noStopMarkerError,7); } break; } }
QIODevice *QOpenSLESAudioInput::start() { if (m_deviceState != QAudio::StoppedState) stopRecording(); m_audioSource = 0; if (!m_pullMode && m_bufferIODevice) { m_bufferIODevice->close(); delete m_bufferIODevice; } m_pullMode = false; m_pushBuffer.clear(); m_bufferIODevice = new QBuffer(&m_pushBuffer); m_bufferIODevice->open(QIODevice::ReadOnly); if (startRecording()) { m_deviceState = QAudio::IdleState; } else { m_deviceState = QAudio::StoppedState; Q_EMIT errorChanged(m_errorState); m_bufferIODevice->close(); delete m_bufferIODevice; m_bufferIODevice = 0; } Q_EMIT stateChanged(m_deviceState); return m_bufferIODevice; }
KeySequenceWidget::KeySequenceWidget(QWidget *parent) : QWidget(parent), _shortcutsModel(nullptr), _isRecording(false), _modifierKeys(0) { QHBoxLayout *layout = new QHBoxLayout(this); layout->setMargin(0); _keyButton = new KeySequenceButton(this, this); _keyButton->setFocusPolicy(Qt::StrongFocus); _keyButton->setToolTip(tr("Click on the button, then enter the shortcut like you would in the program.\nExample for Ctrl+a: hold the Ctrl key and press a.")); layout->addWidget(_keyButton); _clearButton = new QToolButton(this); layout->addWidget(_clearButton); _clearButton->setIcon(style()->standardIcon(QStyle::SP_LineEditClearButton)); setLayout(layout); connect(_keyButton, SIGNAL(clicked()), SLOT(startRecording())); connect(_keyButton, SIGNAL(clicked()), SIGNAL(clicked())); connect(_clearButton, SIGNAL(clicked()), SLOT(clear())); connect(_clearButton, SIGNAL(clicked()), SIGNAL(clicked())); }
bool VideoRecordDialog::onEvent(const GEvent& event) { if (GuiWindow::onEvent(event)) { // Base class handled the event return true; } if (enabled()) { // Video const bool buttonClicked = (event.type == GEventType::GUI_ACTION) && (event.gui.control == m_recordButton); const bool hotKeyPressed = (event.type == GEventType::KEY_DOWN) && (event.key.keysym.sym == m_hotKey) && (event.key.keysym.mod == m_hotKeyMod); if (buttonClicked || hotKeyPressed) { if (m_video) { stopRecording(); } else { startRecording(); } return true; } const bool ssHotKeyPressed = (event.type == GEventType::KEY_DOWN) && (event.key.keysym.sym == m_ssHotKey) && (event.key.keysym.mod == m_ssHotKeyMod); if (ssHotKeyPressed) { takeScreenshot(); return true; } } return false; }
MainWindow::MainWindow(QWidget *parent) : QMainWindow(parent), ui(new Ui::MainWindow) { ui->setupUi(this); mode_group = new QButtonGroup(this); mode_group->addButton(ui->radioButtonFoto,MODE_FOTO); mode_group->addButton(ui->radioButtonVideo,MODE_VIDEO); mode_group->addButton(ui->radioButtonTVideo,MODE_TIMED_VIDEO); mode_group->addButton(ui->radioButtonTFoto,MODE_TIMED_FOTO); cameraController = new CameraController(this); mVideoContainer = new QWidget(ui->videoFrame); mVideoContainer->setSizePolicy(QSizePolicy::Expanding,QSizePolicy::Expanding); mMediaPlayer = new QMediaPlayer; mVideoWidget = new QVideoWidget(mVideoContainer); mVideoWidget->setSizePolicy(QSizePolicy::Expanding,QSizePolicy::Expanding); mMediaPlayer->setVideoOutput(mVideoWidget); periodic_refresh = new QTimer(this); periodic_refresh->setInterval(30000); //30s connect(ui->startRecButton,SIGNAL(clicked()), cameraController,SLOT(startRecording())); connect(ui->stopRecButton,SIGNAL(clicked()), cameraController,SLOT(stopRecording())); connect(periodic_refresh,SIGNAL(timeout()),this,SLOT(periodic_check())); connect(mode_group,SIGNAL(buttonToggled(int,bool)),this,SLOT(on_mode_button_toggled(int,bool))); connect(cameraController,SIGNAL(batteryStatus(int)),ui->batteryBar,SLOT(setValue(int))); connect(cameraController,SIGNAL(cameraModeChanged(Camera_Modes)),this,SLOT(cameraMode(Camera_Modes))); connect(cameraController,SIGNAL(spaceLeft(QString)),ui->labelSpaceLeft,SLOT(setText(QString))); }
void Display::OnStart() { if(cbox->GetCount()<1) { MessageBox("No users are present"); return; } if(!isstart) return; if(doit==TRUE) talk->Play(0,-1,-1); cbox->GetLBText(cbox->GetCurSel(),curuser); selectflag=1; //Start recording and playing... startRecording(); startPlaying(); log.WriteString("\n New Target user is "+ curuser); //change state isstart=0; start->ShowWindow(FALSE); stop->ShowWindow(TRUE); }
Canvas2DLayerBridge::Canvas2DLayerBridge(PassOwnPtr<WebGraphicsContext3DProvider> contextProvider, PassRefPtr<SkSurface> surface, int msaaSampleCount, OpacityMode opacityMode) : m_surface(surface) , m_contextProvider(contextProvider) , m_imageBuffer(0) , m_msaaSampleCount(msaaSampleCount) , m_bytesAllocated(0) , m_haveRecordedDrawCommands(false) , m_framesPending(0) , m_destructionInProgress(false) , m_rateLimitingEnabled(false) , m_filterQuality(kLow_SkFilterQuality) , m_isHidden(false) , m_isDeferralEnabled(true) , m_lastImageId(0) , m_lastFilter(GL_LINEAR) , m_opacityMode(opacityMode) , m_size(m_surface->width(), m_surface->height()) { ASSERT(m_surface); ASSERT(m_contextProvider); m_initialSurfaceSaveCount = m_surface->getCanvas()->getSaveCount(); // Used by browser tests to detect the use of a Canvas2DLayerBridge. TRACE_EVENT_INSTANT0("test_gpu", "Canvas2DLayerBridgeCreation", TRACE_EVENT_SCOPE_GLOBAL); m_layer = adoptPtr(Platform::current()->compositorSupport()->createExternalTextureLayer(this)); m_layer->setOpaque(opacityMode == Opaque); m_layer->setBlendBackgroundColor(opacityMode != Opaque); GraphicsLayer::registerContentsLayer(m_layer->layer()); m_layer->setRateLimitContext(m_rateLimitingEnabled); m_layer->setNearestNeighbor(m_filterQuality == kNone_SkFilterQuality); startRecording(); #ifndef NDEBUG canvas2DLayerBridgeInstanceCounter.increment(); #endif }
SimondConnector::SimondConnector(QObject *parent) : QObject(parent), state(Unconnected), socket(new QSslSocket(this)), timeoutTimer(new QTimer(this)), response(new QDataStream(socket)), mic(new SoundInput(SOUND_CHANNELS, SOUND_SAMPLERATE, this)), passThroughSound(false) { connect(this, SIGNAL(connectionState(ConnectionState)), this, SLOT(setCurrentState(ConnectionState))); connect(socket, SIGNAL(readyRead()), this, SLOT(messageReceived())); connect(socket, SIGNAL(error(QAbstractSocket::SocketError)), this, SLOT(socketError())); connect(socket, SIGNAL(sslErrors(QList<QSslError>)), this, SLOT(socketError())); connect(socket, SIGNAL(connected()), this, SLOT(connectionEstablished())); connect(socket, SIGNAL(encrypted()), this, SLOT(connectionEstablished())); connect(socket, SIGNAL(disconnected()), this, SLOT(connectionLost())); connect(mic, SIGNAL(error(QString)), this, SIGNAL(error(QString))); connect(mic, SIGNAL(microphoneLevel(int,int,int)), this, SIGNAL(microphoneLevel(int,int,int))); connect(mic, SIGNAL(listening()), this, SLOT(startRecording())); connect(mic, SIGNAL(complete()), this, SLOT(commitRecording())); connect(mic, SIGNAL(readyRead()), this, SLOT(soundDataAvailable())); connect(timeoutTimer, SIGNAL(timeout()), this, SLOT(timeoutReached())); timeoutTimer->setSingleShot(true); timeoutTimer->setInterval(SOCKET_TIMEOUT); }
Canvas2DLayerBridge::Canvas2DLayerBridge(PassOwnPtr<WebGraphicsContext3DProvider> contextProvider, const IntSize& size, int msaaSampleCount, OpacityMode opacityMode, AccelerationMode accelerationMode) : m_contextProvider(contextProvider) , m_imageBuffer(0) , m_msaaSampleCount(msaaSampleCount) , m_bytesAllocated(0) , m_haveRecordedDrawCommands(false) , m_destructionInProgress(false) , m_filterQuality(kLow_SkFilterQuality) , m_isHidden(false) , m_isDeferralEnabled(true) , m_isRegisteredTaskObserver(false) , m_renderingTaskCompletedForCurrentFrame(false) , m_lastImageId(0) , m_lastFilter(GL_LINEAR) , m_accelerationMode(accelerationMode) , m_opacityMode(opacityMode) , m_size(size) { ASSERT(m_contextProvider); // Used by browser tests to detect the use of a Canvas2DLayerBridge. TRACE_EVENT_INSTANT0("test_gpu", "Canvas2DLayerBridgeCreation", TRACE_EVENT_SCOPE_GLOBAL); startRecording(); #ifndef NDEBUG canvas2DLayerBridgeInstanceCounter().increment(); #endif }
status_t BnCameraRecordingProxy::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { switch(code) { case START_RECORDING: { ALOGV("START_RECORDING"); CHECK_INTERFACE(ICameraRecordingProxy, data, reply); sp<ICameraRecordingProxyListener> listener = interface_cast<ICameraRecordingProxyListener>(data.readStrongBinder()); reply->writeInt32(startRecording(listener)); return NO_ERROR; } break; case STOP_RECORDING: { ALOGV("STOP_RECORDING"); CHECK_INTERFACE(ICameraRecordingProxy, data, reply); stopRecording(); return NO_ERROR; } break; case RELEASE_RECORDING_FRAME: { ALOGV("RELEASE_RECORDING_FRAME"); CHECK_INTERFACE(ICameraRecordingProxy, data, reply); sp<IMemory> mem = interface_cast<IMemory>(data.readStrongBinder()); releaseRecordingFrame(mem); return NO_ERROR; } break; default: return BBinder::onTransact(code, data, reply, flags); } }
//-------------------------------------------------------------- void ofApp::getOscData(){ while(oscReceiver.hasWaitingMessages()){ ofxOscMessage m; oscReceiver.getNextMessage(&m); if(m.getAddress() == "/fypRecord"){ int record = m.getArgAsInt32(0); if(record == 1){ bRecordingAvatar = true; startRecording(); cout << "start record from osc" << bRecordingAvatar << endl; }else if( record == 0 ){ bRecordingAvatar = false; endRecording(); cout << "stop record from osc" << bRecordingAvatar << endl; } }else if( m.getAddress() == "/fypNewUser"){ // clears for new user for(int i = 0; i < MAX_AVATARS; i++){ avatars[i].resetAvatar(); } totalAvatarsThisUser=0; currentAvatar=-1; recorder.q.empty(); bRecordingAvatar = false; bSavingRecords = false; if(recorder.isThreadRunning()) recorder.stopThread(); } } }
void fboRecorder::drawMenuEntry(){ ImGui::TextWrapped("Records the video output and saves it to a video file using a separate thread."); //ImGui::Selectable("Show Gui Window...", &bShowGuiWindow); if (ImGui::ListBoxHeader("Video Recording Mode", 3)){ // manually add new modes here if ( ImGui::Selectable("VIDEOREC_MODE_FILE_H264", fboRecMode==VIDEOREC_MODE_FILE_H264)) { setRecordMode(VIDEOREC_MODE_FILE_H264); } if ( ImGui::Selectable("VIDEOREC_MODE_FILE_PNG", fboRecMode==VIDEOREC_MODE_FILE_PNG)) { setRecordMode(VIDEOREC_MODE_FILE_PNG); } #ifdef KM_ENABLE_SYPHON if ( ImGui::Selectable("VIDEOREC_MODE_SYPHON", fboRecMode==VIDEOREC_MODE_SYPHON)) { setRecordMode(VIDEOREC_MODE_SYPHON); } #endif ImGui::ListBoxFooter(); } ImGui::Checkbox("Use grab screen instead of fbo", &useGrabScreen); ImGui::Checkbox("Show recorded output", &videoRecShowOutput); ImGui::Separator(); if( isEnabled() ){ if(!isRecording()){ if(ImGui::Button("Start Recording")){ startRecording(); } } else { if(ImGui::Button("Stop Recording")){ stopRecording(); } if(fbo.isAllocated()){ ImGui::TextWrapped("Recorded resolution: %gx%gpx", fbo.getWidth(), fbo.getHeight()); } } } switch (fboRecMode) { case VIDEOREC_MODE_FILE_H264 : case VIDEOREC_MODE_FILE_PNG : ImGui::TextWrapped("Video File Settings"); ImGui::InputInt("Bitrate", &videoRecBitRate); ImGui::InputInt("AA quality", &videoRecAA); ImGui::InputInt("Target video FPS", &videoRecFPS); break; #ifdef KM_ENABLE_SYPHON case VIDEOREC_MODE_SYPHON : ImGui::TextWrapped("Syphon Settings (not yet)"); break; #endif default: break; } }
/** * Called whenever the interpreter performs a non-local jump (i.e., * CALL[T], EVAL). * * Takes care of incrementing hotness counters and switching the * interpreter mode. * * @return address where to continue interpreting. */ BCIns * interpreterBranch(Capability *cap, JitState *J, BCIns *src_pc, BCIns *dst_pc, Word *base, BranchType branchType) { if (LC_UNLIKELY(J->mode == JIT_MODE_RECORDING)) { /* TODO: check for recording termination. */ return dst_pc; } else { if (dst_pc < src_pc && bc_op(*dst_pc) != BC_JFUNC) { if (incrementHotCounter(cap, J, dst_pc) && !(cap->flags & CF_NO_JIT)) { /* It's hot now. */ /* Start recording mode. */ Thread *T = cap->T; cap->dispatch = cap->dispatch_record; T->pc = dst_pc; TraceType traceType = branchType == BRANCH_RETURN ? RETURN_TRACE : FUNCTION_TRACE; startRecording(J, dst_pc, T, base, traceType); return cap->reload_state_pc; } } return dst_pc; } }
// Use this constructor to host a collaboration session AbiCollab::AbiCollab(PD_Document* pDoc, const UT_UTF8String& sSessionId, AccountHandler* pAclAccount, bool bLocallyOwned) : EV_MouseListener(), m_pDoc(pDoc), m_Import(this, pDoc), m_Export(this, pDoc), m_pAclAccount(pAclAccount), m_iDocListenerId(0), m_bExportMasked(false), m_sId(sSessionId), m_pController(BuddyPtr()), m_bLocallyOwned(bLocallyOwned), m_pActivePacket(NULL), m_bIsReverting(false), m_pRecorder(NULL), m_bDoingMouseDrag(false), m_eTakeoveState(STS_NONE), m_bProposedController(false), m_pProposedController(BuddyPtr()), m_bSessionFlushed(false) { // TODO: this can be made a lil' more efficient, as setDocument // will create import and export listeners, which is kinda useless // when there is no single collaborator yet _setDocument(pDoc); m_Import.masterInit(); m_Export.masterInit(); #ifdef ABICOLLAB_RECORD_ALWAYS startRecording( new DiskSessionRecorder( this ) ); #endif }
bool MagicMNMPage::handleEvent(gui_event_t *event) { if (BUTTON_DOWN(Buttons.BUTTON4)) { for (int i = Buttons.ENCODER1; i <= Buttons.ENCODER4; i++) { if (EVENT_PRESSED(event, i)) { GUI.setLine(GUI.LINE1); GUI.flash_string_fill("CLEAR"); GUI.setLine(GUI.LINE2); GUI.flash_put_value(0, i); clearRecording(i); return true; } } } if (EVENT_PRESSED(event, Buttons.BUTTON2)) { setToCurrentTrack(); return true; } if (EVENT_PRESSED(event, Buttons.BUTTON3)) { startRecording(); return true; } if (EVENT_RELEASED(event, Buttons.BUTTON3)) { stopRecording(); return true; } if (EVENT_PRESSED(event, Buttons.BUTTON4) || EVENT_RELEASED(event, Buttons.BUTTON4)) { return true; } return false; }
void RecordingManager::slotSetRecording(bool recording) { if (recording && !isRecordingActive()) { startRecording(); } else if (!recording && isRecordingActive()) { stopRecording(); } }
int main(int argc, char *argv[]) { QCoreApplication::addLibraryPath("app/native/plugins"); QApplication app(argc, argv); bool voiceControlled = true; SimondConnector *connector; if (voiceControlled) connector = new SimondConnector; else connector = 0; Recomment recomment; QMLRecommentView view(&recomment, voiceControlled); if (voiceControlled) { QObject::connect(&view, SIGNAL(connectToServer()), connector, SLOT(connectToServer())); QObject::connect(&view, SIGNAL(disconnectFromServer()), connector, SLOT(disconnectFromServer())); QObject::connect(&view, SIGNAL(startRecording()), connector, SLOT(startRecording())); QObject::connect(&view, SIGNAL(commitRecording()), connector, SLOT(commitRecording())); QObject::connect(&view, SIGNAL(configurationChanged()), connector, SLOT(configurationChanged())); QObject::connect(connector, SIGNAL(connectionState(ConnectionState)), &view, SLOT(displayConnectionState(ConnectionState))); QObject::connect(connector, SIGNAL(status(QString)), &view, SLOT(displayStatus(QString))); QObject::connect(connector, SIGNAL(error(QString)), &view, SLOT(displayError(QString))); QObject::connect(connector, SIGNAL(listening()), &view, SLOT(displayListening())); QObject::connect(connector, SIGNAL(recognizing()), &view, SLOT(displayRecognizing())); QObject::connect(connector, SIGNAL(microphoneLevel(int,int,int)), &view, SLOT(displayMicrophoneLevel(int,int,int))); //QObject::connect(connector, SIGNAL(recognized(QString)), &view, SLOT(displayExecutedAction(QString))); QObject::connect(connector, SIGNAL(recognized(QString)), &recomment, SLOT(critique(QString))); } QObject::connect(&recomment, SIGNAL(recommend(const Offer*, QString)), &view, SLOT(displayRecommendation(const Offer*, QString))); QObject::connect(&recomment, SIGNAL(noMatchFor(QString)), &view, SLOT(displayNoMatch(QString))); view.show(); if (voiceControlled) connector->init(); if (!recomment.init()) { qWarning() << "Failed to initialize Recomment; Aborting"; return -1; } int ret = app.exec(); delete connector; return ret; }
///////////////////////////////////////////////////////// // recordMess // ///////////////////////////////////////////////////////// void pix_record :: recordMess(bool on) { if (on) { startRecording(); }else{ stopRecording(); } }
void RecordBox::buttonPress(u16 button) { if(button & KEY_A) { startRecording(); #ifdef DEBUG printf("it worked 1\n"); #endif } else if(button & KEY_B) { if(!recording) { startRecording(); } else { stopRecording(); } } }
void LoopLayer::toggleRecording(uint64_t clockFrame) { if (recording) { stopRecording(clockFrame); } else { startRecording(clockFrame); } }
/** * @brief Widget::on_pushButton_clicked */ void Widget::on_pushButton_clicked() { if (!rchan) { //this->clearGraphs(); this->timer->start(300); this->timer_for_record->start(3000); startRecording(); } }
void kommuHandler(void){ int i; while(buildCommand(input_buf,command)){ switch(command[0]){ case 'a': if(kommuConnected){ sendCommand("a",1); kommuNoPing = 0; } break; case 'b': sendCommand("c",1); kommuConnected = 1; kommuNoPing = 0; HAL_GPIO_WritePin(GPIOD,GPIO_PIN_12,GPIO_PIN_SET); break; case 's': stopRecording(command[1]); break; case 'r': if(kommuConnected){ startRecording(command[1]); } break; case 'q': stopRecordingAll(); break; case 'x': if(kommuConnected){ changeValue(command+1); } break; } } if(kommuConnected){ for(i = 0; i < sendingCodesCurrent; i++){ sendCode(sendingCodes[i],buffer); } } if(kommuNoPing > maxPingsMissed){ //Max ping loss? kommuConnected = 0; HAL_GPIO_WritePin(GPIOD,GPIO_PIN_12,GPIO_PIN_RESET); }else{ kommuNoPing++; } if((output_currentPos>0) && !ptrUSART->GetStatus().tx_busy){ ptrUSART->Send(output_currentBuffer,output_currentPos); if(output_currentBufferNr == 1){ output_currentBuffer = output_buffer2; output_currentBufferNr = 2; }else{ output_currentBuffer = output_buffer1; output_currentBufferNr = 1; } output_currentPos = 0; } }
void Display::OnChange(int id) { CString bmesg; if(cbox->GetCount()<=0) return; if(id==IDC_RADIO1 && radio1->GetCheck()) //Send to client { cbox->EnableWindow(TRUE); if(cbox->GetCount()>=1) { curuser.Empty(); cbox->GetLBText(cbox->GetCurSel(),curuser); log.WriteString("\nConnected to user "+curuser); selectflag=1; startRecording(); } else { curuser.Empty(); selectflag=0; stopRecording(); //stopPlaying(); //****** REMOVED ******// } } if(id==IDC_RADIO2 && radio2->GetCheck()) //Send to All { curuser="******"; if(isstart==0) //Stop the record and play OnStop(); log.WriteString("\n Broadcasting message"); Broadcast bd(IDD_DIALOG5,this); bd.DoModal(); bmesg="OVER:"; selectflag=1; if(sockclt.Send(bmesg,bmesg.GetLength())) log.WriteString("\n Over Mesg sent to Server"); else log.WriteString("\n Unable to send over mesg to Server"); curuser.Empty(); radio1->SetCheck(1); radio2->SetCheck(0); } }