Exemple #1
0
void EngineRecord::process(const CSAMPLE* pBuffer, const int iBufferSize) {
    // if recording is disabled
    if (m_recReady->get() == RECORD_OFF) {
        //qDebug("Setting record flag to: OFF");
        if (fileOpen()) {
            closeFile();    //close file and free encoder
            emit(isRecording(false));
        }
    }

    // if we are ready for recording, i.e, the output file has been selected, we
    // open a new file
    if (m_recReady->get() == RECORD_READY) {
        updateFromPreferences();	//update file location from pref
        if (openFile()) {
            qDebug("Setting record flag to: ON");
            m_recReady->slotSet(RECORD_ON);
            emit(isRecording(true)); //will notify the RecordingManager

            // Since we just started recording, timeout and clear the metadata.
            m_iMetaDataLife = kMetaDataLifeTimeout;
            m_pCurrentTrack = TrackPointer();

            if (m_bCueIsEnabled) {
                openCueFile();
                m_cuesamplepos = 0;
                m_cuetrack = 0;
            }
        } else { // Maybe the encoder could not be initialized
            qDebug("Setting record flag to: OFF");
            m_recReady->slotSet(RECORD_OFF);
            emit(isRecording(false));
        }
    }

    // If recording is enabled process audio to compressed or uncompressed data.
    if (m_recReady->get() == RECORD_ON) {
        if (m_Encoding == ENCODING_WAVE || m_Encoding == ENCODING_AIFF) {
            if (m_sndfile != NULL) {
                sf_write_float(m_sndfile, pBuffer, iBufferSize);
                emit(bytesRecorded(iBufferSize));
            }
        } else {
            if (m_encoder) {
                // Compress audio. Encoder will call method 'write()' below to
                // write a file stream
                m_encoder->encodeBuffer(pBuffer, iBufferSize);
            }
        }

        if (m_bCueIsEnabled) {
            if (metaDataHasChanged()) {
                m_cuetrack++;
                writeCueLine();
                m_cuefile.flush();
            }
            m_cuesamplepos += iBufferSize;
        }
  	}
}
Exemple #2
0
bool ASimModeBase::toggleRecording()
{
    if (isRecording())
        stopRecording();
    else
        startRecording();

    return isRecording();
}
Exemple #3
0
void DvbStream::recordingState()
{
	int i;

	for ( i=0; i<(int)out.count(); i++ ) {
		if ( out.at(i)->hasRec() ) {
			emit isRecording( true );
			return;
		}
	}
	emit isRecording( false );
}
	void VideoRecorderControls::submitNewFrame(HdlTexture& texture)
	{
		static bool lock = false;

		if(lock)
			return;

		lock = true;

		if(!isRecording())
		{
			lock = false;
			throw Exception("VideoRecorderControls::submitNewFrame - Not recording.", __FILE__, __LINE__);
		}
		else
		{
			try
			{
				(*recorder) << texture << OutputDevice::Process;

				recordingDialog.updateFrameCount(recorder->getNumEncodedFrames());
				recordingDialog.updateDuration(recorder->getTotalVideoDurationSec());
			}
			catch(Exception& e)
			{
				lock = false;
				throw e;
			}
		}

		lock = false;
	}
Exemple #5
0
std::vector<std::vector<int> > SpikeMonitor::getSpikeVector2D() {
	std::string funcName = "getSpikeVector2D()";
	UserErrors::assertTrue(!isRecording(), UserErrors::CANNOT_BE_ON, funcName, "Recording");
	UserErrors::assertTrue(getMode()==AER, UserErrors::CAN_ONLY_BE_CALLED_IN_MODE, funcName, funcName, "AER");

	return spikeMonitorCorePtr_->getSpikeVector2D();
}
Exemple #6
0
float SpikeMonitor::getNeuronMeanFiringRate(int neurId) {
	std::string funcName = "getNeuronMeanFiringRate()";
	UserErrors::assertTrue(!isRecording(), UserErrors::CANNOT_BE_ON, funcName, "Recording");

	return spikeMonitorCorePtr_->getNeuronMeanFiringRate(neurId);

}
int SpikeMonitorCore::getNeuronNumSpikes(int neurId) {
	assert(!isRecording());
	assert(neurId>=0 && neurId<nNeurons_);
	assert(getMode()==AER);

	return spkVector_[neurId].size();
}
EmulationHandler::~EmulationHandler() {
    if (isRecording())
        clearScript();
    delete m_teEmu;
    delete m_teWid;
    delete m_log;
}
bool RecordingDevice::start(int samples, int sampleRate, int bitDepth, int channels)
{
	ALenum format = Audio::getFormat(bitDepth, channels);
	if (format == AL_NONE)
		throw InvalidFormatException(channels, bitDepth);

	if (samples <= 0)
		throw love::Exception("Invalid number of samples.");

	if (sampleRate <= 0)
		throw love::Exception("Invalid sample rate.");

	if (isRecording())
		stop();

	device = alcCaptureOpenDevice(name.c_str(), sampleRate, format, samples);
	if (device == nullptr)
		return false;

	alcCaptureStart(device);

	this->samples = samples;
	this->sampleRate = sampleRate;
	this->bitDepth = bitDepth;
	this->channels = channels;

	return true;
}
Exemple #10
0
void GraphicsContext::setFillGradient(Ref<Gradient>&& gradient)
{
    m_state.fillGradient = WTFMove(gradient);
    m_state.fillPattern = nullptr;
    if (isRecording())
        m_displayListRecorder->updateState(m_state, GraphicsContextState::FillGradientChange); // FIXME: also fill pattern?
}
void fboRecorder::drawMenuEntry(){
	ImGui::TextWrapped("Records the video output and saves it to a video file using a separate thread.");
	
	//ImGui::Selectable("Show Gui Window...", &bShowGuiWindow);
	
	if (ImGui::ListBoxHeader("Video Recording Mode", 3)){
		
		// manually add new modes here
		if ( ImGui::Selectable("VIDEOREC_MODE_FILE_H264", fboRecMode==VIDEOREC_MODE_FILE_H264)) {
			setRecordMode(VIDEOREC_MODE_FILE_H264);
		}
		if ( ImGui::Selectable("VIDEOREC_MODE_FILE_PNG", fboRecMode==VIDEOREC_MODE_FILE_PNG)) {
			setRecordMode(VIDEOREC_MODE_FILE_PNG);
		}
#ifdef KM_ENABLE_SYPHON
		if ( ImGui::Selectable("VIDEOREC_MODE_SYPHON", fboRecMode==VIDEOREC_MODE_SYPHON)) {
			setRecordMode(VIDEOREC_MODE_SYPHON);
		}
#endif
		ImGui::ListBoxFooter();
	}
	ImGui::Checkbox("Use grab screen instead of fbo", &useGrabScreen);
	ImGui::Checkbox("Show recorded output", &videoRecShowOutput);
	ImGui::Separator();
	
	if( isEnabled() ){
		if(!isRecording()){
			if(ImGui::Button("Start Recording")){
				startRecording();
			}
		}
		else {
			if(ImGui::Button("Stop Recording")){
				stopRecording();
			}
			if(fbo.isAllocated()){
				ImGui::TextWrapped("Recorded resolution: %gx%gpx", fbo.getWidth(), fbo.getHeight());
			}
		}
	}
	
	switch (fboRecMode) {
		case VIDEOREC_MODE_FILE_H264 :
		case VIDEOREC_MODE_FILE_PNG :
			ImGui::TextWrapped("Video File Settings");
			ImGui::InputInt("Bitrate", &videoRecBitRate);
			ImGui::InputInt("AA quality", &videoRecAA);
			ImGui::InputInt("Target video FPS", &videoRecFPS);
			break;
#ifdef KM_ENABLE_SYPHON
		case VIDEOREC_MODE_SYPHON :
			ImGui::TextWrapped("Syphon Settings (not yet)");
			break;
#endif
		default:
			break;
	}
	
	
}
Exemple #12
0
void AutomationPattern::processMidiTime( const MidiTime & time )
{
	if( ! isRecording() )
	{
		if( time >= 0 && hasAutomation() )
		{
			const float val = valueAt( time );
			for( objectVector::iterator it = m_objects.begin();
							it != m_objects.end(); ++it )
			{
				if( *it )
				{
					( *it )->setAutomatedValue( val );
				}

			}	
		}
	}
	else
	{
		if( time >= 0 && ! m_objects.isEmpty() )
		{
			const float value = static_cast<float>( firstObject()->value<float>() );
			if( value != m_lastRecordedValue ) 
			{
				putValue( time, value, true );
				m_lastRecordedValue = value;
			}
			else if( valueAt( time ) != value )
			{
				removeValue( time, false );
			}
		}
	}
}
Exemple #13
0
void QedGroupControl::adjustLiveWorldViewStopped(QmcTime::Packet *packet)
{
    if (isActive(packet)) {
	newButtonState(packet->state, packet->mode, isRecording(packet));
	updateTimeButton();
    }
}
Exemple #14
0
void QedGroupControl::adjustLiveWorldViewForward(QmcTime::Packet *packet)
{
    console->post("QedGroupControl::adjustLiveWorldViewForward");

    if (isActive(packet))
	newButtonState(packet->state, packet->mode, isRecording(packet));
}
int AudioClassifier<T>::classify()
{
	auto sound = -1;

    auto ready = classifierReady.load();
      
    if (!ready || isRecording())
        return -1;
   
    if (noteOnsetDetected())
    {
	    switch (currentClassfierType.load())
	    {
			case AudioClassifyOptions::ClassifierType::nearestNeighbour:
				if (reducedVarianceSize > 0)
					sound = knn.classify(currentInstanceVectorReduced);
				else
					sound = knn.classify(currentInstanceVector);
				break;
			case AudioClassifyOptions::ClassifierType::naiveBayes:
				if (reducedVarianceSize > 0)
					sound = nbc.Classify(currentInstanceVectorReduced);
				else
					sound = nbc.Classify(currentInstanceVector);
				break;
			default: break; // Sound returned -1 (Invalid label. Valid labels are 0 to numSounds)
	    }
    }

    return sound;
}
float SpikeMonitorCore::getMinFiringRate(){
	assert(!isRecording());

	std::vector<float> rates = getAllFiringRatesSorted();

	return rates.front();
}
Exemple #17
0
void QedGroupControl::step(QmcTime::Packet *packet)
{
    double stepPosition = QedApp::timevalToSeconds(packet->position);

    console->post(QedApp::DebugProtocol,
	"GroupControl::step: stepping to time %.2f, delta=%.2f, state=%s",
	stepPosition, my.realDelta, timeState());

    if ((packet->source == QmcTime::ArchiveSource &&
	((packet->state == QmcTime::ForwardState &&
		my.timeState != ForwardState) ||
	 (packet->state == QmcTime::BackwardState &&
		my.timeState != BackwardState))) ||
	 sideStep(stepPosition, my.realPosition, my.realDelta))
	return adjustWorldView(packet, false);

    my.pmtimeState = packet->state;
    my.position = packet->position;
    my.realPosition = stepPosition;

    adjustStep(packet);
    fetch();

    if (isActive(packet))
	newButtonState(packet->state, packet->mode, isRecording(packet));
}
Exemple #18
0
void GraphicsContext::setStrokeGradient(Ref<Gradient>&& gradient)
{
    m_state.strokeGradient = WTFMove(gradient);
    m_state.strokePattern = nullptr;
    if (isRecording())
        m_displayListRecorder->updateState(m_state, GraphicsContextState::StrokeGradientChange);
}
Exemple #19
0
void GraphicsContext::setFillPattern(Ref<Pattern>&& pattern)
{
    m_state.fillGradient = nullptr;
    m_state.fillPattern = WTFMove(pattern);
    if (isRecording())
        m_displayListRecorder->updateState(m_state, GraphicsContextState::FillPatternChange);
}
Exemple #20
0
qint64 Recorder::elapsed() const {
    if (isRecording()) {
        return _timer.elapsed();
    } else {
        return 0;
    }
}
Exemple #21
0
void RecordingManager::slotIsRecording(bool isRecordingActive)
{
    //qDebug() << "SlotIsRecording " << isRecording;

    //Notify the GUI controls, see dlgrecording.cpp
    m_isRecording = isRecordingActive;
    emit(isRecording(isRecordingActive));
}
std::vector<float> SpikeMonitorCore::getAllFiringRatesSorted() {
	assert(!isRecording());

	// if necessary, get data structures up-to-date
	sortFiringRates();

	return firingRatesSorted_;
}
SaveFileForwarder::SaveFileForwarder(const SaveImages::ptr& save_images, const NetworkDispatcher::ptr& dispatcher) : NetworkReceiver{dispatcher}, dptr(save_images)
{
  register_handler(SaveFileProtocol::StartRecording, [this](const NetworkPacket::ptr &) { d->save_images->startRecording(d->imager); });
  register_handler(SaveFileProtocol::EndRecording, [this](const NetworkPacket::ptr &) { d->save_images->endRecording(); });
  QObject::connect(save_images.get(), &SaveImages::saveFPS, save_images.get(), [this](double fps) { this->dispatcher()->queue_send(SaveFileProtocol::packetsignalSaveFPS() << QVariant{fps}); } );
  QObject::connect(save_images.get(), &SaveImages::meanFPS, save_images.get(), [this](double fps) { this->dispatcher()->queue_send(SaveFileProtocol::packetsignalMeanFPS() << QVariant{fps}); } );
  QObject::connect(save_images.get(), &SaveImages::savedFrames, save_images.get(), [this](uint64_t frames) { this->dispatcher()->queue_send(SaveFileProtocol::packetsignalSavedFrames() << QVariant{static_cast<qlonglong>(frames)}); } );
  QObject::connect(save_images.get(), &SaveImages::droppedFrames, save_images.get(), [this](uint64_t frames) { this->dispatcher()->queue_send(SaveFileProtocol::packetsignalDroppedFrames() << QVariant{static_cast<qlonglong>(frames)}); } );
  QObject::connect(save_images.get(), &SaveImages::recording, save_images.get(), [this](const QString &file) {
    emit isRecording(true);
    this->dispatcher()->queue_send(SaveFileProtocol::packetsignalRecording() << QVariant{file});
  } );
  QObject::connect(save_images.get(), &SaveImages::finished, save_images.get(), [this]{
    this->dispatcher()->queue_send(SaveFileProtocol::packetsignalFinished());
    emit isRecording(false);
  } );
}
void EmulationHandler::recvEmulation(const char* src, int len ) {
    QByteArray ar(len);

    memcpy(ar.data(), src, sizeof(char) * len );
    if (isRecording())
        m_script->append(ar);
    emit send(ar);
}
QmlProfilerTraceClient::~QmlProfilerTraceClient()
{
    //Disable profiling if started by client
    //Profiling data will be lost!!
    if (isRecording())
        setRecording(false);
    delete d;
}
float SpikeMonitorCore::getPopMeanFiringRate() {
	assert(!isRecording());

	if (totalTime_==0)
		return 0.0f;

	return getPopNumSpikes()*1000.0/(getRecordingTotalTime()*nNeurons_);
}
Exemple #27
0
void GraphicsContext::beginTransparencyLayer(float opacity)
{
    if (isRecording()) {
        m_displayListRecorder->beginTransparencyLayer(opacity);
        return;
    }
    beginPlatformTransparencyLayer(opacity);
    ++m_transparencyCount;
}
Exemple #28
0
void GraphicsContext::setAlpha(float alpha)
{
    m_state.alpha = alpha;
    if (isRecording()) {
        m_displayListRecorder->updateState(m_state, GraphicsContextState::AlphaChange);
        return;
    }
    setPlatformAlpha(alpha);
}
Exemple #29
0
void QedGroupControl::adjustArchiveWorldViewBackward(QmcTime::Packet *packet, bool setup)
{
    console->post("QedGroupControl::adjustArchiveWorldViewBackward");

    if (setup)
	packet->state = QmcTime::StoppedState;
    if (isActive(packet))
	newButtonState(packet->state, packet->mode, isRecording(packet));
}
bool FlycamOne::stopRecording()
{
	if (isBusy() || isRecording() == false || m_camMode == CamMode_Photo)
	{
		return false;
	}
	setCommand(Command_StartStop);
	return true;
}