State * TeleKarma::MuteAutoHold(Action * a, State * s) { State * result = s; if (s->id == STATE_CONNECTED) { if (phone == NULL) { result = SetState(new State(result->id, result->turn, STATUS_FAILED, "Telephony service failed")); result = SetState(new State(STATE_UNINITIALIZED, result->turn+1)); } else { result = SetState(new State(STATE_MUTEAUTOHOLD, result->turn+1)); // clear the touch tone queue phone->ClearTones(); // XXX disable the speaker - implementation to go // disable the microphone phone->TurnOffMicrophone(); // play notification of recording IF not already recording if (!phone->IsRecording()) { result = StartRecording(result); } result = SetState(new State(STATE_MUTEAUTOHOLD, result->turn, STATUS_RECORDING)); phone->PlayWAVCall(AUTO_HOLD_WAV, IVR_REPEATS, PAUSE_TIME); } } else if (s->id == STATE_AUTOHOLD) { result = SetState(new State(STATE_MUTEAUTOHOLD, result->turn+1)); // XXX disable the speaker - implementation to go } return result; }
void InteractionEventRecorder::CreateQtPartControl( QWidget *parent ) { // create GUI widgets from the Qt Designer's .ui file m_Controls.setupUi( parent ); connect( m_Controls.btnStopRecording, SIGNAL(clicked()), this, SLOT(StopRecording()) ); connect( m_Controls.btnStartRecording, SIGNAL(clicked()), this, SLOT(StartRecording()) ); connect( m_Controls.btnPlay, SIGNAL(clicked()), this, SLOT(Play()) ); connect( m_Controls.btnOpenFile, SIGNAL(clicked()), this, SLOT(OpenFile()) ); connect( m_Controls.rotatePlanes, SIGNAL(clicked()), this, SLOT(RotatePlanes()) ); connect( m_Controls.rotate3D, SIGNAL(clicked()), this, SLOT(RotateView()) ); m_CurrentObserver = new mitk::EventRecorder(); // Register as listener via micro services us::ServiceProperties props; props["name"] = std::string("EventRecorder"); m_ServiceRegistration = us::GetModuleContext()->RegisterService<mitk::InteractionEventObserver>(m_CurrentObserver,props); /* delete m_CurrentObserverDEBUG; m_ServiceRegistrationDEBUG.Unregister(); */ }
static void MKeyEvent( void * v, int keycode, int down ) { struct RecorderPlugin * rp = (struct RecorderPlugin*)v; char c = toupper( keycode ); if( c == 'R' && down && !rp->is_recording ) StartRecording( rp ); if( c == 'S' && down && rp->is_recording ) StopRecording( rp ); }
bool FWebMRecord::Exec(UWorld* InWorld, const TCHAR* Cmd, FOutputDevice& Ar) { if (FParse::Command(&Cmd, TEXT("VIDEORECORD"))) { if (!bRecording) { StartRecording(-1); } else { StopRecording(); } return true; } // DEBUG: test muxing the audio and video from raw source else if (FParse::Command(&Cmd, TEXT("VIDEOMUX"))) { EncodeVideoAndAudio(TEXT("")); return true; } // DEBUG: upload the last video written to youtube else if (FParse::Command(&Cmd, TEXT("UPLOADVIDEO"))) { DebugUploadLastVideo(InWorld); return true; } return false; }
void SpeechRecognition::Start(const Optional<NonNull<DOMMediaStream>>& aStream, ErrorResult& aRv) { if (mCurrentState != STATE_IDLE) { aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR); return; } nsAutoCString speechRecognitionServiceCID; GetRecognitionServiceCID(speechRecognitionServiceCID); nsresult rv; mRecognitionService = do_GetService(speechRecognitionServiceCID.get(), &rv); NS_ENSURE_SUCCESS_VOID(rv); rv = mRecognitionService->Initialize(this); NS_ENSURE_SUCCESS_VOID(rv); MediaStreamConstraints constraints; constraints.mAudio.SetAsBoolean() = true; if (aStream.WasPassed()) { StartRecording(&aStream.Value()); } else { MediaManager* manager = MediaManager::Get(); manager->GetUserMedia(false, GetOwner(), constraints, new GetUserMediaSuccessCallback(this), new GetUserMediaErrorCallback(this)); } nsRefPtr<SpeechEvent> event = new SpeechEvent(this, EVENT_START); NS_DispatchToMainThread(event); }
void Movie::PromptForRecording() { FileSpecifier dst_file; if (!dst_file.WriteDialog(_typecode_movie, "EXPORT FILM", "Untitled Movie.webm")) return; StartRecording(dst_file.GetPath()); }
uint32 FVoiceEngineSteam::StartLocalVoiceProcessing(uint32 LocalUserNum) { uint32 Return = E_FAIL; if (IsOwningUser(LocalUserNum)) { if (!bIsCapturing) { // Update the current recording state, if VOIP data was still being read VoiceCaptureUpdate(); if (!IsRecording()) { StartRecording(); } bIsCapturing = true; } Return = S_OK; } else { UE_LOG(LogVoiceEncode, Error, TEXT("StartLocalVoiceProcessing(): Device is currently owned by another user")); } return Return; }
void OBS::ToggleRecording() { if(!bRecording) StartRecording(); else StopRecording(); }
void SpeechRecognition::ProcessTestEventRequest(nsISupports* aSubject, const nsAString& aEventName) { if (aEventName.EqualsLiteral("EVENT_START")) { ErrorResult err; Start(err); } else if (aEventName.EqualsLiteral("EVENT_STOP")) { Stop(); } else if (aEventName.EqualsLiteral("EVENT_ABORT")) { Abort(); } else if (aEventName.EqualsLiteral("EVENT_AUDIO_ERROR")) { DispatchError(SpeechRecognition::EVENT_AUDIO_ERROR, SpeechRecognitionErrorCode::Audio_capture, // TODO different codes? NS_LITERAL_STRING("AUDIO_ERROR test event")); } else if (aEventName.EqualsLiteral("EVENT_AUDIO_DATA")) { StartRecording(static_cast<DOMMediaStream*>(aSubject)); } else { NS_ASSERTION(mTestConfig.mFakeRecognitionService, "Got request for fake recognition service event, but " TEST_PREFERENCE_FAKE_RECOGNITION_SERVICE " is unset"); // let the fake recognition service handle the request } return; }
void FVoiceEngineSteam::VoiceCaptureUpdate() const { if (bPendingFinalCapture) { uint32 CompressedSize; const EVoiceResult RecordingState = SteamUserPtr->GetAvailableVoice(&CompressedSize, NULL, 0); // If no data is available, we have finished capture the last (post-StopRecording) half-second of voice data if (RecordingState == k_EVoiceResultNotRecording) { UE_LOG(LogVoiceEncode, Log, TEXT("Internal voice capture complete.")); bPendingFinalCapture = false; // If a new recording session has begun since the call to 'StopRecording', kick that off if (bIsCapturing) { StartRecording(); } else { // Marks that recording has successfully stopped StoppedRecording(); } } } }
// Queue management functions void DeviceVulkanw::Flush( std::uint32_t queue ) { if(false == m_is_command_buffer_recording) { StartRecording(); } EndRecording(true, nullptr); }
void GOrgueMidiRecorder::ButtonChanged(int id) { switch(id) { case ID_MIDI_RECORDER_STOP: StopRecording(); break; case ID_MIDI_RECORDER_RECORD: StartRecording(false); break; case ID_MIDI_RECORDER_RECORD_RENAME: StartRecording(true); break; } }
void StatAggregator::Snapshot() { // Ignore this call if not recording if (state_ != RECORDING) return; StopRecording(); Generate(); Reset(); StartRecording(); }
/* ** ToggleRecording */ void ACMIRecorder::ToggleRecording( void ) { F4EnterCriticalSection( _csect ); if ( IsRecording() ) StopRecording(); else StartRecording(); F4LeaveCriticalSection( _csect ); }
void OBS::ToggleRecording() { if (!bRecording) { if (!bRunning && !bStreaming) Start(true); StartRecording(); } else StopRecording(); }
void CHLTVDemoRecorder::StartAutoRecording() { char fileName[MAX_OSPATH]; tm today; VCRHook_LocalTime( &today ); Q_snprintf( fileName, sizeof(fileName), "auto-%04i%02i%02i-%02i%02i-%s.dem", 1900 + today.tm_year, today.tm_mon+1, today.tm_mday, today.tm_hour, today.tm_min, hltv->GetMapName() ); StartRecording( fileName, false ); }
int32_t fake_audiodevice::RegisterAudioCallback(AudioTransport* audioCallback) { bool is_playing = is_playing_; bool is_recording = is_recording_; StopPlayout(); StopRecording(); // Stop the threads that uses audioCallback audioCallback_ = audioCallback; if(is_playing) StartPlayout(); if(is_recording) StartRecording(); return 0; }
void SpeechRecognition::Start(const Optional<NonNull<DOMMediaStream>>& aStream, CallerType aCallerType, ErrorResult& aRv) { if (mCurrentState != STATE_IDLE) { aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR); return; } if (!SetRecognitionService(aRv)) { return; } if (!ValidateAndSetGrammarList(aRv)) { return; } nsresult rv; rv = mRecognitionService->Initialize(this); if (NS_WARN_IF(NS_FAILED(rv))) { return; } MediaStreamConstraints constraints; constraints.mAudio.SetAsBoolean() = true; if (aStream.WasPassed()) { StartRecording(&aStream.Value()); } else { AutoNoJSAPI(); MediaManager* manager = MediaManager::Get(); manager->GetUserMedia(GetOwner(), constraints, new GetUserMediaSuccessCallback(this), new GetUserMediaErrorCallback(this), aCallerType); } RefPtr<SpeechEvent> event = new SpeechEvent(this, EVENT_START); NS_DispatchToMainThread(event); }
void CCollector::ConstructL() { // User::LeaveIfError(iMutex1.CreateLocal()); User::LeaveIfError(iMutex2.CreateLocal()); iInterThreadObject = CInterThreadObject::NewL(this); iRecordFile = CFileWriter::NewL(iNode); iReportFile = CFileWriter::NewL(iNode); // add reporter if(iNode->iReport) { StartReporting(); } // add recorder if(iNode->iRecord) { StartRecording(); } CActiveScheduler::Add(this); // Add to scheduler }
GraphicsTracingWidget::GraphicsTracingWidget(std::shared_ptr<Pica::DebugContext> debug_context, QWidget* parent) : BreakPointObserverDock(debug_context, tr("CiTrace Recorder"), parent) { setObjectName("CiTracing"); QPushButton* start_recording = new QPushButton(tr("Start Recording")); QPushButton* stop_recording = new QPushButton(QIcon::fromTheme("document-save"), tr("Stop and Save")); QPushButton* abort_recording = new QPushButton(tr("Abort Recording")); connect(this, SIGNAL(SetStartTracingButtonEnabled(bool)), start_recording, SLOT(setVisible(bool))); connect(this, SIGNAL(SetStopTracingButtonEnabled(bool)), stop_recording, SLOT(setVisible(bool))); connect(this, SIGNAL(SetAbortTracingButtonEnabled(bool)), abort_recording, SLOT(setVisible(bool))); connect(start_recording, SIGNAL(clicked()), this, SLOT(StartRecording())); connect(stop_recording, SIGNAL(clicked()), this, SLOT(StopRecording())); connect(abort_recording, SIGNAL(clicked()), this, SLOT(AbortRecording())); stop_recording->setVisible(false); abort_recording->setVisible(false); auto main_widget = new QWidget; auto main_layout = new QVBoxLayout; { auto sub_layout = new QHBoxLayout; sub_layout->addWidget(start_recording); sub_layout->addWidget(stop_recording); sub_layout->addWidget(abort_recording); main_layout->addLayout(sub_layout); } main_widget->setLayout(main_layout); setWidget(main_widget); }
void OBS::Start(bool recordingOnly) { if(bRunning && !bRecording) return; int networkMode = AppConfig->GetInt(TEXT("Publish"), TEXT("Mode"), 2); DWORD delayTime = (DWORD)AppConfig->GetInt(TEXT("Publish"), TEXT("Delay")); if (bRecording && networkMode != 0) return; if(bRecording && networkMode == 0 && delayTime == 0 && !recordingOnly) { bFirstConnect = !bReconnecting; if (network) { NetworkStream *net = network; network = nullptr; delete net; } network = CreateRTMPPublisher(); Log(TEXT("=====Stream Start (while recording): %s============================="), CurrentDateTimeString().Array()); bSentHeaders = false; bStreaming = true; ReportStartStreamingTrigger(); ConfigureStreamButtons(); return; } bStartingUp = true; OSEnterMutex(hStartupShutdownMutex); DisableMenusWhileStreaming(true); scenesConfig.Save(); //------------------------------------------------------------- fps = AppConfig->GetInt(TEXT("Video"), TEXT("FPS"), 30); frameTime = 1000/fps; //------------------------------------------------------------- if(!bLoggedSystemStats) { LogSystemStats(); bLoggedSystemStats = TRUE; } OSCheckForBuggyDLLs(); //------------------------------------------------------------- retryHookTest: bool alreadyWarnedAboutModules = false; if (OSIncompatibleModulesLoaded()) { Log(TEXT("Incompatible modules (pre-D3D) detected.")); int ret = MessageBox(hwndMain, Str("IncompatibleModules"), NULL, MB_ICONERROR | MB_ABORTRETRYIGNORE); if (ret == IDABORT) { DisableMenusWhileStreaming(false); OSLeaveMutex (hStartupShutdownMutex); bStartingUp = false; return; } else if (ret == IDRETRY) { goto retryHookTest; } alreadyWarnedAboutModules = true; } String strPatchesError; if (OSIncompatiblePatchesLoaded(strPatchesError)) { DisableMenusWhileStreaming(true); OSLeaveMutex (hStartupShutdownMutex); MessageBox(hwndMain, strPatchesError.Array(), NULL, MB_ICONERROR); Log(TEXT("Incompatible patches detected.")); bStartingUp = false; return; } //------------------------------------------------------------- String processPriority = AppConfig->GetString(TEXT("General"), TEXT("Priority"), TEXT("Normal")); if (!scmp(processPriority, TEXT("Idle"))) SetPriorityClass(GetCurrentProcess(), IDLE_PRIORITY_CLASS); else if (!scmp(processPriority, TEXT("Above Normal"))) SetPriorityClass(GetCurrentProcess(), ABOVE_NORMAL_PRIORITY_CLASS); else if (!scmp(processPriority, TEXT("High"))) SetPriorityClass(GetCurrentProcess(), HIGH_PRIORITY_CLASS); networkMode = AppConfig->GetInt(TEXT("Publish"), TEXT("Mode"), 2); delayTime = (DWORD)AppConfig->GetInt(TEXT("Publish"), TEXT("Delay")); String strError; bFirstConnect = !bReconnecting; if(bTestStream || recordingOnly) network = CreateNullNetwork(); else { switch(networkMode) { case 0: network = (delayTime > 0) ? CreateDelayedPublisher(delayTime) : CreateRTMPPublisher(); break; case 1: network = CreateNullNetwork(); break; } } if(!network) { DisableMenusWhileStreaming(false); OSLeaveMutex (hStartupShutdownMutex); if(!bReconnecting) MessageBox(hwndMain, strError, NULL, MB_ICONERROR); else DialogBox(hinstMain, MAKEINTRESOURCE(IDD_RECONNECTING), hwndMain, OBS::ReconnectDialogProc); bStartingUp = false; return; } bReconnecting = false; //------------------------------------------------------------- Log(TEXT("=====Stream Start: %s==============================================="), CurrentDateTimeString().Array()); //------------------------------------------------------------- bEnableProjectorCursor = GlobalConfig->GetInt(L"General", L"EnableProjectorCursor", 1) != 0; bPleaseEnableProjector = bPleaseDisableProjector = false; int monitorID = AppConfig->GetInt(TEXT("Video"), TEXT("Monitor")); if(monitorID >= (int)monitors.Num()) monitorID = 0; RECT &screenRect = monitors[monitorID].rect; int defCX = screenRect.right - screenRect.left; int defCY = screenRect.bottom - screenRect.top; downscaleType = AppConfig->GetInt(TEXT("Video"), TEXT("Filter"), 0); downscale = AppConfig->GetFloat(TEXT("Video"), TEXT("Downscale"), 1.0f); baseCX = AppConfig->GetInt(TEXT("Video"), TEXT("BaseWidth"), defCX); baseCY = AppConfig->GetInt(TEXT("Video"), TEXT("BaseHeight"), defCY); baseCX = MIN(MAX(baseCX, 128), 4096); baseCY = MIN(MAX(baseCY, 128), 4096); scaleCX = UINT(double(baseCX) / double(downscale)); scaleCY = UINT(double(baseCY) / double(downscale)); //align width to 128bit for fast SSE YUV4:2:0 conversion outputCX = scaleCX & 0xFFFFFFFC; outputCY = scaleCY & 0xFFFFFFFE; bUseMultithreadedOptimizations = AppConfig->GetInt(TEXT("General"), TEXT("UseMultithreadedOptimizations"), TRUE) != 0; Log(TEXT(" Multithreaded optimizations: %s"), (CTSTR)(bUseMultithreadedOptimizations ? TEXT("On") : TEXT("Off"))); encoderSkipThreshold = GlobalConfig->GetInt(TEXT("Video"), TEXT("EncoderSkipThreshold"), fps/4); //------------------------------------------------------------------ Log(TEXT(" Base resolution: %ux%u"), baseCX, baseCY); Log(TEXT(" Output resolution: %ux%u"), outputCX, outputCY); Log(TEXT("------------------------------------------")); //------------------------------------------------------------------ GS = new D3D10System; GS->Init(); //Thanks to ASUS OSD hooking the goddamn user mode driver framework (!!!!), we have to re-check for dangerous //hooks after initializing D3D. retryHookTestV2: if (!alreadyWarnedAboutModules) { if (OSIncompatibleModulesLoaded()) { Log(TEXT("Incompatible modules (post-D3D) detected.")); int ret = MessageBox(hwndMain, Str("IncompatibleModules"), NULL, MB_ICONERROR | MB_ABORTRETRYIGNORE); if (ret == IDABORT) { //FIXME: really need a better way to abort startup than this... delete network; delete GS; DisableMenusWhileStreaming(false); OSLeaveMutex (hStartupShutdownMutex); bStartingUp = false; return; } else if (ret == IDRETRY) { goto retryHookTestV2; } } } //------------------------------------------------------------- mainVertexShader = CreateVertexShaderFromFile(TEXT("shaders/DrawTexture.vShader")); mainPixelShader = CreatePixelShaderFromFile(TEXT("shaders/DrawTexture.pShader")); solidVertexShader = CreateVertexShaderFromFile(TEXT("shaders/DrawSolid.vShader")); solidPixelShader = CreatePixelShaderFromFile(TEXT("shaders/DrawSolid.pShader")); if(!mainVertexShader || !mainPixelShader) CrashError(TEXT("Unable to load DrawTexture shaders")); if(!solidVertexShader || !solidPixelShader) CrashError(TEXT("Unable to load DrawSolid shaders")); //------------------------------------------------------------------ CTSTR lpShader; if(CloseFloat(downscale, 1.0)) lpShader = TEXT("shaders/DrawYUVTexture.pShader"); else if(downscale < 2.01) { switch(downscaleType) { case 0: lpShader = TEXT("shaders/DownscaleBilinear1YUV.pShader"); break; case 1: lpShader = TEXT("shaders/DownscaleBicubicYUV.pShader"); break; case 2: lpShader = TEXT("shaders/DownscaleLanczos6tapYUV.pShader"); break; } } else if(downscale < 3.01) lpShader = TEXT("shaders/DownscaleBilinear9YUV.pShader"); else CrashError(TEXT("Invalid downscale value (must be either 1.0, 1.5, 2.0, 2.25, or 3.0)")); yuvScalePixelShader = CreatePixelShaderFromFile(lpShader); if (!yuvScalePixelShader) CrashError(TEXT("Unable to create shader from file %s"), lpShader); //------------------------------------------------------------- for(UINT i=0; i<NUM_RENDER_BUFFERS; i++) { mainRenderTextures[i] = CreateRenderTarget(baseCX, baseCY, GS_BGRA, FALSE); yuvRenderTextures[i] = CreateRenderTarget(outputCX, outputCY, GS_BGRA, FALSE); } //------------------------------------------------------------- D3D10_TEXTURE2D_DESC td; zero(&td, sizeof(td)); td.Width = outputCX; td.Height = outputCY; td.Format = DXGI_FORMAT_B8G8R8A8_UNORM; td.MipLevels = 1; td.ArraySize = 1; td.SampleDesc.Count = 1; td.ArraySize = 1; td.Usage = D3D10_USAGE_STAGING; td.CPUAccessFlags = D3D10_CPU_ACCESS_READ; for(UINT i=0; i<NUM_RENDER_BUFFERS; i++) { HRESULT err = GetD3D()->CreateTexture2D(&td, NULL, ©Textures[i]); if(FAILED(err)) { CrashError(TEXT("Unable to create copy texture")); //todo - better error handling } } //------------------------------------------------------------------ String strEncoder = AppConfig->GetString(TEXT("Audio Encoding"), TEXT("Codec"), TEXT("AAC")); BOOL isAAC = strEncoder.CompareI(TEXT("AAC")); UINT format = AppConfig->GetInt(L"Audio Encoding", L"Format", 1); if (!isAAC) format = 0; switch (format) { case 0: sampleRateHz = 44100; break; default: case 1: sampleRateHz = 48000; break; } Log(L"------------------------------------------"); Log(L"Audio Format: %uhz", sampleRateHz); //------------------------------------------------------------------ AudioDeviceList playbackDevices; bool useInputDevices = AppConfig->GetInt(L"Audio", L"UseInputDevices", false) != 0; GetAudioDevices(playbackDevices, useInputDevices ? ADT_RECORDING : ADT_PLAYBACK); String strPlaybackDevice = AppConfig->GetString(TEXT("Audio"), TEXT("PlaybackDevice"), TEXT("Default")); if(strPlaybackDevice.IsEmpty() || !playbackDevices.HasID(strPlaybackDevice)) { AppConfig->SetString(TEXT("Audio"), TEXT("PlaybackDevice"), TEXT("Default")); strPlaybackDevice = TEXT("Default"); } Log(TEXT("Playback device %s"), strPlaybackDevice.Array()); playbackDevices.FreeData(); desktopAudio = CreateAudioSource(false, strPlaybackDevice); if(!desktopAudio) { CrashError(TEXT("Cannot initialize desktop audio sound, more info in the log file.")); } AudioDeviceList audioDevices; GetAudioDevices(audioDevices, ADT_RECORDING, false, true); String strDevice = AppConfig->GetString(TEXT("Audio"), TEXT("Device"), NULL); if(strDevice.IsEmpty() || !audioDevices.HasID(strDevice)) { AppConfig->SetString(TEXT("Audio"), TEXT("Device"), TEXT("Disable")); strDevice = TEXT("Disable"); } audioDevices.FreeData(); String strDefaultMic; bool bHasDefault = GetDefaultMicID(strDefaultMic); if(strDevice.CompareI(TEXT("Disable"))) EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), FALSE); else { bool bUseDefault = strDevice.CompareI(TEXT("Default")) != 0; if(!bUseDefault || bHasDefault) { if(bUseDefault) strDevice = strDefaultMic; micAudio = CreateAudioSource(true, strDevice); if(!micAudio) MessageBox(hwndMain, Str("MicrophoneFailure"), NULL, 0); else micAudio->SetTimeOffset(AppConfig->GetInt(TEXT("Audio"), TEXT("MicTimeOffset"), 0)); EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), micAudio != NULL); } else EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), FALSE); } //------------------------------------------------------------- bool bDisableEncoding = false; if (bTestStream) bDisableEncoding = GlobalConfig->GetInt(TEXT("General"), TEXT("DisablePreviewEncoding"), false) != 0; //------------------------------------------------------------- UINT bitRate = (UINT)AppConfig->GetInt(TEXT("Audio Encoding"), TEXT("Bitrate"), 96); if (bDisableEncoding) audioEncoder = CreateNullAudioEncoder(); else #ifdef USE_AAC if(isAAC) // && OSGetVersion() >= 7) audioEncoder = CreateAACEncoder(bitRate); else #endif audioEncoder = CreateMP3Encoder(bitRate); //------------------------------------------------------------- desktopVol = AppConfig->GetFloat(TEXT("Audio"), TEXT("DesktopVolume"), 1.0f); micVol = AppConfig->GetFloat(TEXT("Audio"), TEXT("MicVolume"), 1.0f); //------------------------------------------------------------- bRunning = true; if(sceneElement) { scene = CreateScene(sceneElement->GetString(TEXT("class")), sceneElement->GetElement(TEXT("data"))); XElement *sources = sceneElement->GetElement(TEXT("sources")); if(sources) { UINT numSources = sources->NumElements(); for(UINT i=0; i<numSources; i++) { SceneItem *item = scene->AddImageSource(sources->GetElementByID(i)); if(item) { if(ListView_GetItemState(GetDlgItem(hwndMain, ID_SOURCES), i, LVIS_SELECTED) > 0) item->Select(true); } } } scene->BeginScene(); unsigned int numSources = scene->sceneItems.Num(); for(UINT i=0; i<numSources; i++) { XElement *source = scene->sceneItems[i]->GetElement(); String className = source->GetString(TEXT("class")); if(scene->sceneItems[i]->bRender && className == "GlobalSource") { XElement *globalSourceData = source->GetElement(TEXT("data")); String globalSourceName = globalSourceData->GetString(TEXT("name")); if(App->GetGlobalSource(globalSourceName) != NULL) { App->GetGlobalSource(globalSourceName)->GlobalSourceEnterScene(); } } } } if(scene && scene->HasMissingSources()) MessageBox(hwndMain, Str("Scene.MissingSources"), NULL, 0); //------------------------------------------------------------- int maxBitRate = AppConfig->GetInt (TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000); int bufferSize = AppConfig->GetInt (TEXT("Video Encoding"), TEXT("BufferSize"), 1000); int quality = AppConfig->GetInt (TEXT("Video Encoding"), TEXT("Quality"), 8); String preset = AppConfig->GetString(TEXT("Video Encoding"), TEXT("Preset"), TEXT("veryfast")); bUsing444 = false;//AppConfig->GetInt (TEXT("Video Encoding"), TEXT("Use444"), 0) != 0; bUseCFR = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseCFR"), 1) != 0; //------------------------------------------------------------- bufferingTime = GlobalConfig->GetInt(TEXT("General"), TEXT("SceneBufferingTime"), 700); Log(TEXT("Scene buffering time set to %u"), bufferingTime); //------------------------------------------------------------- bForceMicMono = AppConfig->GetInt(TEXT("Audio"), TEXT("ForceMicMono")) != 0; bRecievedFirstAudioFrame = false; //hRequestAudioEvent = CreateSemaphore(NULL, 0, 0x7FFFFFFFL, NULL); hSoundDataMutex = OSCreateMutex(); hSoundThread = OSCreateThread((XTHREAD)OBS::MainAudioThread, NULL); //------------------------------------------------------------- if (!useInputDevices) StartBlankSoundPlayback(strPlaybackDevice); //------------------------------------------------------------- colorDesc.fullRange = false; colorDesc.primaries = ColorPrimaries_BT709; colorDesc.transfer = ColorTransfer_IEC6196621; colorDesc.matrix = outputCX >= 1280 || outputCY > 576 ? ColorMatrix_BT709 : ColorMatrix_SMPTE170M; videoEncoder = nullptr; String videoEncoderErrors; if (bDisableEncoding) videoEncoder = CreateNullVideoEncoder(); else if(AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0) videoEncoder = CreateQSVEncoder(fps, outputCX, outputCY, quality, preset, bUsing444, colorDesc, maxBitRate, bufferSize, bUseCFR, videoEncoderErrors); else if(AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseNVENC")) != 0) videoEncoder = CreateNVENCEncoder(fps, outputCX, outputCY, quality, preset, bUsing444, colorDesc, maxBitRate, bufferSize, bUseCFR, videoEncoderErrors); else videoEncoder = CreateX264Encoder(fps, outputCX, outputCY, quality, preset, bUsing444, colorDesc, maxBitRate, bufferSize, bUseCFR); if (!videoEncoder) { Log(L"Couldn't initialize encoder"); Stop(true); if (videoEncoderErrors.IsEmpty()) videoEncoderErrors = Str("Encoder.InitFailed"); else videoEncoderErrors = String(Str("Encoder.InitFailedWithReason")) + videoEncoderErrors; MessageBox(hwndMain, videoEncoderErrors.Array(), nullptr, MB_OK | MB_ICONERROR); //might want to defer localization until here to automatically //output english localization to logfile return; } if ((bStreaming = !recordingOnly)) ReportStartStreamingTrigger(); //------------------------------------------------------------- // Ensure that the render frame is properly sized ResizeRenderFrame(true); //------------------------------------------------------------- StartRecording(); //------------------------------------------------------------- curFramePic = NULL; bShutdownVideoThread = false; bShutdownEncodeThread = false; //ResetEvent(hVideoThread); hEncodeThread = OSCreateThread((XTHREAD)OBS::EncodeThread, NULL); hVideoThread = OSCreateThread((XTHREAD)OBS::MainCaptureThread, NULL); EnableWindow(GetDlgItem(hwndMain, ID_SCENEEDITOR), TRUE); //------------------------------------------------------------- ReportStartStreamTrigger(); SystemParametersInfo(SPI_SETSCREENSAVEACTIVE, 0, 0, 0); SetThreadExecutionState(ES_CONTINUOUS | ES_SYSTEM_REQUIRED | ES_AWAYMODE_REQUIRED | ES_DISPLAY_REQUIRED); UpdateRenderViewMessage(); //update notification icon to reflect current status UpdateNotificationAreaIcon(); OSLeaveMutex (hStartupShutdownMutex); bStartingUp = false; ConfigureStreamButtons(); }
void mitk::NavigationDataRecorderDeprecated::StartRecording() { if(!m_Recording) { if (m_Stream == nullptr) { std::stringstream ss; std::ostream* stream; //An existing extension will be cut and replaced with .xml std::string tmpPath = itksys::SystemTools::GetFilenamePath(m_FileName); m_FileName = itksys::SystemTools::GetFilenameWithoutExtension(m_FileName); std::string extension = ".xml"; if (m_OutputFormat == mitk::NavigationDataRecorderDeprecated::csv) extension = ".csv"; ss << tmpPath << "/" << m_FileName << "-" << m_NumberOfRecordedFiles << extension; if( m_DoNotOverwriteFiles ) { unsigned int index = m_NumberOfRecordedFiles+1; while( itksys::SystemTools::FileExists( ss.str().c_str() ) ) { ss.str(""); ss << tmpPath << "/" << m_FileName << "-" << index << extension; index++; } } switch(m_RecordingMode) { case Console: stream = &std::cout; break; case NormalFile: if (m_FileName == "") //Check if there is a file name and path { std::string message = "No file name or file path set."; MITK_ERROR << message; mitkThrowException(mitk::IGTException) << message; } else { stream = new std::ofstream(ss.str().c_str()); } break; case ZipFile: stream = &std::cout; MITK_WARN << "Sorry no ZipFile support yet"; break; default: stream = &std::cout; break; } m_Stream = stream; m_StreamMustBeDeleted = true; m_firstLine = true; m_RecordCounter = 0; StartRecording(stream); } } else if (m_Recording) { MITK_WARN << "Already recording please stop before start new recording session"; return; } }
//--------------------------------------------------------------------------- void __fastcall TForm1::Button1Click(TObject *Sender) { StartRecording(); //Chart1-> }
// Execution Not thread safe void DeviceVulkanw::Execute( Function const* func, std::uint32_t queue, size_t global_size, size_t local_size, Event** e ) { FunctionVulkan* vulkan_function = ConstCast<FunctionVulkan>( func ); uint32_t number_of_parameters = (uint32_t)( vulkan_function->GetParameters().size() ); // get the Function's descriptor set group Anvil::DescriptorSetGroup* new_descriptor_set = vulkan_function->GetDescriptorSetGroup(); // if it's empty, this is 1st run of the Function so we have to create it if ( nullptr == new_descriptor_set ) { // allocate it through Anvil new_descriptor_set = new Anvil::DescriptorSetGroup( m_anvil_device, false, 1 ); // add bindings and items (Buffers) to the new DSG for ( uint32_t i = 0; i < number_of_parameters; ++i ) { const Buffer* parameter = vulkan_function->GetParameters()[ i ]; BufferVulkan* buffer = ConstCast<BufferVulkan>( parameter ); new_descriptor_set->add_binding( 0, i, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT ); } // set it to the Function to be reused during any subsequent run vulkan_function->SetDescriptorSetGroup( new_descriptor_set ); } // bind new items (Buffers), releasing the old ones for ( uint32_t i = 0; i < number_of_parameters; ++i ) { const Buffer* parameter = vulkan_function->GetParameters()[ i ]; BufferVulkan* buffer = ConstCast<BufferVulkan>( parameter ); new_descriptor_set->set_binding_item( 0, i, buffer->GetAnvilBuffer() ); } // get the Function's pipeline Anvil::ComputePipelineID pipeline_id = vulkan_function->GetPipelineID(); // if it is invalid, this is 1st run of the Function so we have to create it if ( ~0u == pipeline_id ) { // create the pipeline through Anvil with the shader module as a parameter m_anvil_device->get_compute_pipeline_manager()->add_regular_pipeline( false, false, vulkan_function->GetFunctionEntryPoint(), &pipeline_id ); // attach the DSG to it m_anvil_device->get_compute_pipeline_manager()->attach_dsg_to_pipeline( pipeline_id, new_descriptor_set ); // remember the pipeline for any seubsequent run vulkan_function->SetPipelineID( pipeline_id ); } // indicate we'll be recording Vulkan commands to the CommandBuffer from now on StartRecording(); // attach pipeline m_command_buffer->record_bind_pipeline( VK_PIPELINE_BIND_POINT_COMPUTE, pipeline_id ); Anvil::PipelineLayout* pipeline_layout = m_anvil_device->get_compute_pipeline_manager()->get_compute_pipeline_layout( pipeline_id ); Anvil::DescriptorSet* descriptor_set = new_descriptor_set->get_descriptor_set( 0 ); // attach layout and 0 descriptor set (we don't use any other set currently) m_command_buffer->record_bind_descriptor_sets( VK_PIPELINE_BIND_POINT_COMPUTE, pipeline_layout, 0, 1, &descriptor_set, 0, nullptr ); // set memory barriers for ( uint32_t i = 0; i < number_of_parameters; ++i ) { const Buffer* parameter = vulkan_function->GetParameters()[ i ]; BufferVulkan* buffer = ConstCast<BufferVulkan>( parameter ); Anvil::BufferBarrier bufferBarrier( VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT, GetQueue()->get_queue_family_index(), GetQueue()->get_queue_family_index(), buffer->GetAnvilBuffer(), 0, buffer->GetSize() ); m_command_buffer->record_pipeline_barrier( VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_FALSE, 0, nullptr, 1, &bufferBarrier, 0, nullptr ); // tell buffer that we are used by this submit buffer->SetFenceId( GetFenceId() ); } // dispatch the Function's shader module m_command_buffer->record_dispatch( (uint32_t)global_size, 1, 1 ); // end recording EndRecording( false, e ); // remove references to buffers. they were already referenced by the CommandBuffer. vulkan_function->UnreferenceParametersBuffers(); }
void MenuBar::AddMovieMenu() { auto* movie_menu = addMenu(tr("&Movie")); m_recording_start = AddAction(movie_menu, tr("Start Re&cording Input"), this, [this] { emit StartRecording(); }); m_recording_play = AddAction(movie_menu, tr("P&lay Input Recording..."), this, [this] { emit PlayRecording(); }); m_recording_stop = AddAction(movie_menu, tr("Stop Playing/Recording Input"), this, [this] { emit StopRecording(); }); m_recording_export = AddAction(movie_menu, tr("Export Recording..."), this, [this] { emit ExportRecording(); }); m_recording_start->setEnabled(false); m_recording_play->setEnabled(false); m_recording_stop->setEnabled(false); m_recording_export->setEnabled(false); m_recording_read_only = movie_menu->addAction(tr("&Read-Only Mode")); m_recording_read_only->setCheckable(true); m_recording_read_only->setChecked(Movie::IsReadOnly()); connect(m_recording_read_only, &QAction::toggled, [](bool value) { Movie::SetReadOnly(value); }); movie_menu->addSeparator(); auto* pause_at_end = movie_menu->addAction(tr("Pause at End of Movie")); pause_at_end->setCheckable(true); pause_at_end->setChecked(SConfig::GetInstance().m_PauseMovie); connect(pause_at_end, &QAction::toggled, [](bool value) { SConfig::GetInstance().m_PauseMovie = value; }); auto* lag_counter = movie_menu->addAction(tr("Show Lag Counter")); lag_counter->setCheckable(true); lag_counter->setChecked(SConfig::GetInstance().m_ShowLag); connect(lag_counter, &QAction::toggled, [](bool value) { SConfig::GetInstance().m_ShowLag = value; }); auto* frame_counter = movie_menu->addAction(tr("Show Frame Counter")); frame_counter->setCheckable(true); frame_counter->setChecked(SConfig::GetInstance().m_ShowFrameCount); connect(frame_counter, &QAction::toggled, [](bool value) { SConfig::GetInstance().m_ShowFrameCount = value; }); auto* input_display = movie_menu->addAction(tr("Show Input Display")); input_display->setCheckable(true); input_display->setChecked(SConfig::GetInstance().m_ShowInputDisplay); connect(frame_counter, &QAction::toggled, [](bool value) { SConfig::GetInstance().m_ShowInputDisplay = value; }); auto* system_clock = movie_menu->addAction(tr("Show System Clock")); system_clock->setCheckable(true); system_clock->setChecked(SConfig::GetInstance().m_ShowRTC); connect(system_clock, &QAction::toggled, [](bool value) { SConfig::GetInstance().m_ShowRTC = value; }); movie_menu->addSeparator(); auto* dump_frames = movie_menu->addAction(tr("Dump Frames")); dump_frames->setCheckable(true); dump_frames->setChecked(SConfig::GetInstance().m_DumpFrames); connect(dump_frames, &QAction::toggled, [](bool value) { SConfig::GetInstance().m_DumpFrames = value; }); auto* dump_audio = movie_menu->addAction(tr("Dump Audio")); dump_audio->setCheckable(true); dump_audio->setChecked(SConfig::GetInstance().m_DumpAudio); connect(dump_audio, &QAction::toggled, [](bool value) { SConfig::GetInstance().m_DumpAudio = value; }); }
// record demo script after saving state -------------------------------------- // PRIVATE int Cmd_Recdem( char *command ) { return StartRecording( command, TRUE ); }
INT_PTR CALLBACK dialogproc(HWND h,UINT m,WPARAM w,LPARAM l) { switch (m) { case WM_TIMER: { // update the recording/playback counter char text[30]=""; if (rchan) // recording sprintf(text,"%d",reclen-44); else if (chan) { if (BASS_ChannelIsActive(chan)) // playing sprintf(text,"%I64d / %I64d",BASS_ChannelGetPosition(chan,BASS_POS_BYTE),BASS_ChannelGetLength(chan,BASS_POS_BYTE)); else sprintf(text,"%I64d",BASS_ChannelGetLength(chan,BASS_POS_BYTE)); } MESS(20,WM_SETTEXT,0,text); } break; case WM_COMMAND: switch (LOWORD(w)) { case IDCANCEL: DestroyWindow(h); break; case 10: if (!rchan) StartRecording(); else StopRecording(); break; case 11: BASS_ChannelPlay(chan,TRUE); // play the recorded data break; case 12: WriteToDisk(); break; case 13: if (HIWORD(w)==CBN_SELCHANGE) { // input selection changed int i; input=MESS(13,CB_GETCURSEL,0,0); // get the selection // enable the selected input for (i=0;BASS_RecordSetInput(i,BASS_INPUT_OFF,-1);i++) ; // 1st disable all inputs, then... BASS_RecordSetInput(input,BASS_INPUT_ON,-1); // enable the selected UpdateInputInfo(); } break; case 16: if (HIWORD(w)==CBN_SELCHANGE) { // device selection changed int i=MESS(16,CB_GETCURSEL,0,0); // get the selection // initialize the selected device if (InitDevice(i)) { if (rchan) { // continue recording on the new device... HRECORD newrchan=BASS_RecordStart(FREQ,CHANS,0,RecordingCallback,0); if (!newrchan) Error("Couldn't start recording"); else rchan=newrchan; } } } break; } break; case WM_HSCROLL: if (l) { // set input source level float level=SendMessage((HWND)l,TBM_GETPOS,0,0)/100.f; if (!BASS_RecordSetInput(input,0,level)) // failed to set input level BASS_RecordSetInput(-1,0,level); // try master level instead } break; case WM_INITDIALOG: win=h; MESS(14,TBM_SETRANGE,FALSE,MAKELONG(0,100)); { // get list of recording devices int c,def; BASS_DEVICEINFO di; for (c=0;BASS_RecordGetDeviceInfo(c,&di);c++) { MESS(16,CB_ADDSTRING,0,di.name); if (di.flags&BASS_DEVICE_DEFAULT) { // got the default device MESS(16,CB_SETCURSEL,c,0); def=c; } } InitDevice(def); // initialize default recording device } SetTimer(h,0,200,0); // timer to update the position display return 1; case WM_DESTROY: // release all BASS stuff BASS_RecordFree(); BASS_Free(); break; } return 0; }
BOOL CALLBACK dialogproc(HWND h,UINT m,WPARAM w,LPARAM l) { switch (m) { case WM_TIMER: { // update the recording/playback counter char text[30]=""; if (rchan) // recording sprintf(text,"%I64d",BASS_ChannelGetPosition(rchan,BASS_POS_BYTE)); else if (chan) { if (BASS_ChannelIsActive(chan)) // playing sprintf(text,"%I64d / %I64d",BASS_ChannelGetPosition(chan,BASS_POS_BYTE),BASS_ChannelGetLength(chan,BASS_POS_BYTE)); else sprintf(text,"%I64d",BASS_ChannelGetLength(chan,BASS_POS_BYTE)); } MESS(20,WM_SETTEXT,0,text); } break; case WM_COMMAND: switch (LOWORD(w)) { case IDCANCEL: DestroyWindow(h); break; case 10: if (!rchan) StartRecording(); else StopRecording(); break; case 11: BASS_ChannelPlay(chan,TRUE); // play the recorded data break; case 12: WriteToDisk(); break; case 13: if (HIWORD(w)==CBN_SELCHANGE) { // input selection changed int i; input=MESS(13,CB_GETCURSEL,0,0); // get the selection // enable the selected input for (i=0;BASS_RecordSetInput(i,BASS_INPUT_OFF,-1);i++) ; // 1st disable all inputs, then... BASS_RecordSetInput(input,BASS_INPUT_ON,-1); // enable the selected UpdateInputInfo(); // update info } break; } break; case WM_HSCROLL: if (l) { // set input source level float level=SendMessage((HWND)l,TBM_GETPOS,0,0)/100.f; if (!BASS_RecordSetInput(input,0,level)) // failed to set input level BASS_RecordSetInput(-1,0,level); // try master level instead } break; case WM_INITDIALOG: win=h; // setup recording device (using default device) if (!BASS_RecordInit(-1)) { Error("Can't initialize recording device"); DestroyWindow(win); } else { // get list of inputs int c; const char *i; MESS(14,TBM_SETRANGE,FALSE,MAKELONG(0,100)); // initialize input level slider for (c=0;i=BASS_RecordGetInputName(c);c++) { MESS(13,CB_ADDSTRING,0,i); if (!(BASS_RecordGetInput(c,NULL)&BASS_INPUT_OFF)) { // this 1 is currently "on" input=c; MESS(13,CB_SETCURSEL,input,0); UpdateInputInfo(); // display info } } SetTimer(h,0,200,0); // timer to update the position display return 1; } break; case WM_DESTROY: // release all BASS stuff BASS_RecordFree(); BASS_Free(); break; } return 0; }
// record demo script without saving state first ------------------------------ // PRIVATE int Cmd_Record( char *command ) { return StartRecording( command, FALSE ); }
BOOL CALLBACK dialogproc(HWND h,UINT m,WPARAM w,LPARAM l) { switch (m) { case WM_TIMER: { // update the recording/playback counter wchar_t text[30]=L""; if (rchan) // recording wsprintf(text,L"%I64d",BASS_ChannelGetPosition(rchan,BASS_POS_BYTE)); else if (chan) { if (BASS_ChannelIsActive(chan)) // playing wsprintf(text,L"%I64d / %I64d",BASS_ChannelGetPosition(chan,BASS_POS_BYTE),BASS_ChannelGetLength(chan,BASS_POS_BYTE)); else wsprintf(text,L"%I64d",BASS_ChannelGetLength(chan,BASS_POS_BYTE)); } MESS(20,WM_SETTEXT,0,text); { // display free disk space ULARGE_INTEGER space; if (GetDiskFreeSpaceEx(NULL,&space,NULL,NULL)) { wsprintf(text,L"%I64d",space.QuadPart); MESS(21,WM_SETTEXT,0,text); } } } break; case WM_COMMAND: switch (LOWORD(w)) { case IDOK: case IDCANCEL: EndDialog(h,LOWORD(w)); break; case 10: if (!rchan) StartRecording(); else StopRecording(); break; case 11: BASS_ChannelPlay(chan,TRUE); // play the recorded data break; } break; case WM_INITDIALOG: win=h; // setup recording device (using default device) if (!BASS_RecordInit(-1)) { Error(L"Can't initialize recording device"); EndDialog(h,0); return 0; } { // go full screen SHINITDLGINFO shidi; shidi.dwMask = SHIDIM_FLAGS; shidi.dwFlags = SHIDIF_DONEBUTTON|SHIDIF_SIZEDLGFULLSCREEN|SHIDIF_EMPTYMENU|SHIDIF_SIPDOWN; shidi.hDlg = h; SHInitDialog(&shidi); } SetTimer(h,1,200,0); // timer to update the position display return 1; case WM_DESTROY: // release all BASS stuff BASS_RecordFree(); BASS_Free(); break; } return 0; }