status_t EMBeOutputNode::HandleMessage(int32 message, const void* data, size_t size) { if(message == EM_PORT_MESSAGE_INCOMING_BUFFER) { m_opBuffer = *((BBuffer**) data); if(RunState() != B_STARTED) { emerr << "ERROR! Node received buffer when not running!" << endl; m_opBuffer -> Recycle(); return B_OK; } if(m_opBuffer != NULL) { vCount++; EMMediaFormat* opFormat = GetConnectedEMMediaFormat(); if((opFormat -> m_eType & EM_TYPE_ANY_VIDEO) > 0) { opFormat -> m_vFrameRate = *(static_cast<float*>(EMMediaEngine::Instance() -> GetSettingsRepository() -> GetSetting(SETTING_VIDEO_FRAMERATE))); } int64 vFrames = EMBeMediaUtility::FramesInBuffer(m_opBuffer, opFormat, EM_TYPE_RAW_AUDIO); // + 44100 / 4; m_vNumberOfReceivedFrames += vFrames; int64 vTime = EMBeMediaUtility::FramesToTime(m_vNumberOfReceivedFrames, m_opSystemAudioFormat); int64 vNextBufferPerf = static_cast<uint64>(floor(static_cast<float>(m_vStartingTime) + vTime)); m_opBuffer -> Header() -> start_time = vNextBufferPerf; media_timed_event sEvent(vNextBufferPerf, BTimedEventQueue::B_HANDLE_BUFFER); sEvent.pointer = m_opBuffer; sEvent.cleanup = BTimedEventQueue::B_RECYCLE_BUFFER; status_t vResult = EventQueue() -> AddEvent(sEvent); if(vResult != B_OK) { emerr << "ERROR! Couldn't add the B_HANDLE_BUFFER event to the queue: " << strerror(vResult) << endl; m_opBuffer -> Recycle(); } } else emerr << "ERROR! Node received invalid EM_NODE_BUFFER_ADDED message!" << endl; return B_OK; } else if(message == EM_PORT_MESSAGE_FLUSH_QUEUE) { media_timed_event sEvent(TimeSource() -> Now(), EM_TIMED_EVENT_FLUSH_CASH); EventQueue() -> AddEvent(sEvent); } else if(message == EM_PORT_MESSAGE_RESET) { media_timed_event sEvent(TimeSource() -> Now(), EM_TIMED_EVENT_RESET); EventQueue() -> AddEvent(sEvent); } else { ;//emout_commented_out_4_release << "ERROR! Received unknown message!" << endl; BMediaNode::HandleBadMessage(message, data, size); } return B_OK; }
bool EMBeOutputNode::Shutdown() { Quit(); while(EventQueue() -> FindFirstMatch(0, BTimedEventQueue::B_ALWAYS, true, BTimedEventQueue::B_HANDLE_BUFFER) != NULL) EventQueue() -> RemoveEvent(EventQueue() -> FindFirstMatch(0, BTimedEventQueue::B_ALWAYS, true, BTimedEventQueue::B_HANDLE_BUFFER)); delete m_opConnectedEMMediaFormat; //if(m_opBuffer != NULL) // m_opBuffer -> Recycle(); return true; }
status_t ClientNode::_ScheduleOutputEvent(bigtime_t event) { media_timed_event nextBufferEvent(event, NEW_BUFFER_EVENT); EventQueue()->AddEvent(nextBufferEvent); }
void AudioFilterNode::SetParameterValue( int32 id, bigtime_t changeTime, const void* value, size_t size) { // not running? set parameter now if(RunState() != B_STARTED) { ASSERT(m_parameterSet); m_parameterSet->setValue( id, changeTime, value, size); return; } // queue a parameter-change event if(size > 64) { // +++++ hard-coded limitation in media_timed_event DEBUGGER(( "!!! AudioFilterNode::SetParameterValue(): parameter data too large\n")); } media_timed_event ev( changeTime, BTimedEventQueue::B_PARAMETER, 0, BTimedEventQueue::B_NO_CLEANUP, size, id, (char*)value, size); EventQueue()->AddEvent(ev); }
void AbstractFileInterfaceNode::SetParameterValue( int32 id, bigtime_t when, const void * value, size_t size) { PRINT("AbstractFileInterfaceNode::SetParameterValue(id=%ld,when=%lld,size=%ld)\n",id,when,int32(size)); switch (id) { case DEFAULT_CHUNK_SIZE_PARAM: case DEFAULT_BIT_RATE_PARAM: case DEFAULT_BUFFER_PERIOD_PARAM: { media_timed_event event(when, BTimedEventQueue::B_PARAMETER, NULL, BTimedEventQueue::B_NO_CLEANUP, size, id, (char*) value, size); EventQueue()->AddEvent(event); } break; default: PRINT("AbstractFileInterfaceNode::SetParameterValue unknown id (%ld)\n",id); break; } }
void FlangerNode::SetParameterValue( int32 id, bigtime_t changeTime, const void* pValue, size_t size) { switch(id) { case P_MIX_RATIO: case P_SWEEP_RATE: case P_DELAY: case P_DEPTH: case P_FEEDBACK: { if(size < sizeof(float)) break; // this is from ToneProducer. it's fishy. // if(size > sizeof(float)) // size = sizeof(float); media_timed_event ev( changeTime, BTimedEventQueue::B_PARAMETER, 0, BTimedEventQueue::B_NO_CLEANUP, size, id, (char*)pValue, size); EventQueue()->AddEvent(ev); break; } } }
// -------------------------------------------------------- // // implementation for BMediaEventLooper // -------------------------------------------------------- // // protected: status_t MediaReader::HandleBuffer( const media_timed_event *event, bigtime_t lateness, bool realTimeEvent) { CALLED(); if (output.destination == media_destination::null) return B_MEDIA_NOT_CONNECTED; status_t status = B_OK; BBuffer * buffer = fBufferGroup->RequestBuffer(output.format.u.multistream.max_chunk_size,fBufferPeriod); if (buffer != 0) { status = FillFileBuffer(buffer); if (status != B_OK) { PRINT("MediaReader::HandleEvent got an error from FillFileBuffer.\n"); buffer->Recycle(); } else { if (fOutputEnabled) { status = SendBuffer(buffer,output.destination); if (status != B_OK) { PRINT("MediaReader::HandleEvent got an error from SendBuffer.\n"); buffer->Recycle(); } } else { buffer->Recycle(); } } } bigtime_t nextEventTime = event->event_time+fBufferPeriod; media_timed_event nextBufferEvent(nextEventTime, BTimedEventQueue::B_HANDLE_BUFFER); EventQueue()->AddEvent(nextBufferEvent); return status; }
void LoggingConsumer::SetParameterValue(int32 id, bigtime_t performance_time, const void* value, size_t size) { log_message logMsg; logMsg.now = TimeSource()->Now(); logMsg.param.id = id; mLogger->Log(LOG_SET_PARAM_VALUE, logMsg); // if it's one of our parameters, enqueue a "set parameter" event for handling at the appropriate time switch (id) { case LATENCY_PARAM: case CPU_SPIN_PARAM: case PRIORITY_PARAM: { // !!! Change from B_USER_EVENT to B_SET_PARAMETER once it's defined media_timed_event event(performance_time, BTimedEventQueue::B_USER_EVENT, (void*) value, BTimedEventQueue::B_NO_CLEANUP, size, id, NULL); EventQueue()->AddEvent(event); } break; default: // do nothing for other parameter IDs break; } return; }
void VideoConsumer::HandleEvent(const media_timed_event* event, bigtime_t lateness, bool realTimeEvent) { LOOP("VideoConsumer::HandleEvent\n"); switch (event->type) { case BTimedEventQueue::B_START: PROGRESS("VideoConsumer::HandleEvent - START\n"); _SetPerformanceTimeBase(event->event_time); break; case BTimedEventQueue::B_WARP: case BTimedEventQueue::B_SEEK: PROGRESS("VideoConsumer::HandleEvent - WARP or SEEK\n"); _SetPerformanceTimeBase(event->bigdata); break; case BTimedEventQueue::B_STOP: PROGRESS("VideoConsumer::HandleEvent - STOP\n"); EventQueue()->FlushEvents(event->event_time, BTimedEventQueue::B_ALWAYS, true, BTimedEventQueue::B_HANDLE_BUFFER); // unset the target's bitmap _UnsetTargetBuffer(); break; case BTimedEventQueue::B_HANDLE_BUFFER: LOOP("VideoConsumer::HandleEvent - HANDLE BUFFER\n"); _HandleBuffer(static_cast<BBuffer*>(event->pointer)); break; default: ERROR("VideoConsumer::HandleEvent - BAD EVENT\n"); break; } }
void AudioConsumer::BufferReceived(BBuffer* buffer) { media_timed_event event(buffer->Header()->start_time, BTimedEventQueue::B_HANDLE_BUFFER, buffer, BTimedEventQueue::B_RECYCLE_BUFFER); EventQueue()->AddEvent(event); }
void ConsumerNode::BufferReceived( BBuffer * buffer) { out("ConsumerNode::BufferReceived, sheduled time = %5.4f\n",buffer->Header()->start_time / 1E6); media_timed_event event(buffer->Header()->start_time,BTimedEventQueue::B_HANDLE_BUFFER, buffer, BTimedEventQueue::B_RECYCLE_BUFFER); EventQueue()->AddEvent(event); return; }
status_t AbstractFileInterfaceNode::HandleStop( const media_timed_event *event, bigtime_t lateness, bool realTimeEvent) { CALLED(); // flush the queue so downstreamers don't get any more EventQueue()->FlushEvents(0, BTimedEventQueue::B_ALWAYS, true, BTimedEventQueue::B_HANDLE_BUFFER); return B_OK; }
status_t SoundPlayNode::HandleStop(const media_timed_event* event, bigtime_t lateness, bool realTimeEvent) { CALLED(); // flush the queue so downstreamers don't get any more EventQueue()->FlushEvents(0, BTimedEventQueue::B_ALWAYS, true, SEND_NEW_BUFFER_EVENT); return B_OK; }
status_t ESDSinkNode::TimeSourceOp(const time_source_op_info &op, void *_reserved) { CALLED(); switch(op.op) { case B_TIMESOURCE_START: PRINT(("TimeSourceOp op B_TIMESOURCE_START\n")); if (RunState() != BMediaEventLooper::B_STARTED) { fTimeSourceStarted = true; media_timed_event startEvent(0, BTimedEventQueue::B_START); EventQueue()->AddEvent(startEvent); } break; case B_TIMESOURCE_STOP: PRINT(("TimeSourceOp op B_TIMESOURCE_STOP\n")); if (RunState() == BMediaEventLooper::B_STARTED) { media_timed_event stopEvent(0, BTimedEventQueue::B_STOP); EventQueue()->AddEvent(stopEvent); fTimeSourceStarted = false; PublishTime(0, 0, 0); } break; case B_TIMESOURCE_STOP_IMMEDIATELY: PRINT(("TimeSourceOp op B_TIMESOURCE_STOP_IMMEDIATELY\n")); if (RunState() == BMediaEventLooper::B_STARTED) { media_timed_event stopEvent(0, BTimedEventQueue::B_STOP); EventQueue()->AddEvent(stopEvent); fTimeSourceStarted = false; PublishTime(0, 0, 0); } break; case B_TIMESOURCE_SEEK: PRINT(("TimeSourceOp op B_TIMESOURCE_SEEK\n")); BroadcastTimeWarp(op.real_time, op.performance_time); break; default: break; } return B_OK; }
/* virtual */ status_t BMediaEventLooper::AddTimer(bigtime_t at_performance_time, int32 cookie) { CALLED(); media_timed_event event(at_performance_time, BTimedEventQueue::B_TIMER, NULL, BTimedEventQueue::B_EXPIRE_TIMER); event.data = cookie; return EventQueue()->AddEvent(event); }
void ConsumerNode::ProducerDataStatus( const media_destination & for_whom, int32 status, bigtime_t at_performance_time) { out("ConsumerNode::ProducerDataStatus\n"); if (for_whom == mInput.destination) { media_timed_event event(at_performance_time,BTimedEventQueue::B_DATA_STATUS, &mInput, BTimedEventQueue::B_NO_CLEANUP, status, 0, NULL); EventQueue()->AddEvent(event); } }
void EqualizerNode::SetParameterValue(int32 id, bigtime_t time, const void* value, size_t size) { if (id == P_PREAMP || id == P_BYPASS || id == P_MUTE || (id >= P_BANDS && id < P_BANDS + fEqualizer.BandCount())) { media_timed_event ev(time, BTimedEventQueue::B_PARAMETER, (void*)value, BTimedEventQueue::B_NO_CLEANUP, size, id, (char*)"EQ"); //dirty hack for parameter processing (mediakit bug????) ParameterEventProcessing(&ev); EventQueue()->AddEvent(ev); } }
void LoggingConsumer::ProducerDataStatus(const media_destination& for_whom, int32 status, bigtime_t at_performance_time) { log_message logMsg; logMsg.now = TimeSource()->Now(); logMsg.data_status.status = status; mLogger->Log(LOG_PRODUCER_DATA_STATUS, logMsg); if (for_whom == mInput.destination) { media_timed_event event(at_performance_time, BTimedEventQueue::B_DATA_STATUS, &mInput, BTimedEventQueue::B_NO_CLEANUP, status, 0, NULL); EventQueue()->AddEvent(event); } }
void VideoConsumer::BufferReceived(BBuffer* buffer) { LOOP("VideoConsumer::Buffer #%ld received\n", buffer->ID()); if (RunState() == B_STOPPED) { buffer->Recycle(); return; } media_timed_event event(buffer->Header()->start_time, BTimedEventQueue::B_HANDLE_BUFFER, buffer, BTimedEventQueue::B_RECYCLE_BUFFER); EventQueue()->AddEvent(event); }
void ESDSinkNode::ProducerDataStatus( const media_destination & for_whom, int32 status, bigtime_t at_performance_time) { CALLED(); if(fInput.destination != for_whom) { fprintf(stderr,"invalid destination received in ESDSinkNode::ProducerDataStatus\n"); return; } media_timed_event event(at_performance_time, BTimedEventQueue::B_DATA_STATUS, &fInput, BTimedEventQueue::B_NO_CLEANUP, status, 0, NULL); EventQueue()->AddEvent(event); }
void VideoRecorderNode::BufferReceived(BBuffer* inBuffer) { INFO("VideoRecorderNode::BufferReceived():\n"); if( RunMode() == B_OFFLINE ) { // int32 destinationID = inBuffer->Header()->destination; SetOfflineTime( inBuffer->Header()->start_time ); } status_t err; media_timed_event event(inBuffer->Header()->start_time, BTimedEventQueue::B_HANDLE_BUFFER, inBuffer, BTimedEventQueue::B_RECYCLE_BUFFER ); err = EventQueue()->AddEvent( event ); if( err ) inBuffer->Recycle(); }
void VideoNode::BufferReceived(BBuffer * buffer) { if (RunState() != B_STARTED) { buffer->Recycle(); return; } if (fOverlayActive && fDirectOverlayBuffer) { HandleBuffer(buffer); } else { media_timed_event event(buffer->Header()->start_time, BTimedEventQueue::B_HANDLE_BUFFER, buffer, BTimedEventQueue::B_RECYCLE_BUFFER); EventQueue()->AddEvent(event); } }
// protected: status_t AbstractFileInterfaceNode::HandleStart( const media_timed_event *event, bigtime_t lateness, bool realTimeEvent) { CALLED(); if (RunState() != B_STARTED) { // XXX: Either use the following line or the lines that are not commented. // There doesn't seem to be a practical difference that i can tell. // HandleBuffer(event,lateness,realTimeEvent); media_timed_event firstBufferEvent(event->event_time, BTimedEventQueue::B_HANDLE_BUFFER); HandleEvent(&firstBufferEvent, 0, false); EventQueue()->AddEvent(firstBufferEvent); } return B_OK; }
void LegacyAudioConsumer::BufferReceived( BBuffer *buffer ) { if ( ! mRunning ) { buffer->Recycle(); } else { media_timed_event event( buffer->Header()->start_time, BTimedEventQueue::B_HANDLE_BUFFER, buffer, BTimedEventQueue::B_RECYCLE_BUFFER ); EventQueue()->AddEvent( event ); } }
void VideoNode::HandleEvent(const media_timed_event *event, bigtime_t lateness, bool realTimeEvent) { switch (event->type) { case BTimedEventQueue::B_START: break; case BTimedEventQueue::B_STOP: EventQueue()->FlushEvents(event->event_time, BTimedEventQueue::B_ALWAYS, true, BTimedEventQueue::B_HANDLE_BUFFER); break; case BTimedEventQueue::B_HANDLE_BUFFER: HandleBuffer((BBuffer *)event->pointer); break; default: printf("VideoNode::HandleEvent unknown event"); break; } }
void ESDSinkNode::BufferReceived( BBuffer * buffer) { CALLED(); switch (buffer->Header()->type) { /*case B_MEDIA_PARAMETERS: { status_t status = ApplyParameterData(buffer->Data(),buffer->SizeUsed()); if (status != B_OK) { fprintf(stderr,"ApplyParameterData in ESDSinkNode::BufferReceived failed\n"); } buffer->Recycle(); } break;*/ case B_MEDIA_RAW_AUDIO: #if 0 if (buffer->Flags() & BBuffer::B_SMALL_BUFFER) { fprintf(stderr,"NOT IMPLEMENTED: B_SMALL_BUFFER in ESDSinkNode::BufferReceived\n"); // XXX: implement this part buffer->Recycle(); } else { media_timed_event event(buffer->Header()->start_time, BTimedEventQueue::B_HANDLE_BUFFER, buffer, BTimedEventQueue::B_RECYCLE_BUFFER); status_t status = EventQueue()->AddEvent(event); if (status != B_OK) { fprintf(stderr,"EventQueue()->AddEvent(event) in ESDSinkNode::BufferReceived failed\n"); buffer->Recycle(); } } #endif if (fDevice->CanSend()) { fDevice->Write(buffer->Data(), buffer->SizeUsed()); } buffer->Recycle(); break; default: fprintf(stderr,"unexpected buffer type in ESDSinkNode::BufferReceived\n"); buffer->Recycle(); break; } }
void VideoRecorderNode::HandleEvent(const media_timed_event *event, bigtime_t lateness, bool realTimeEvent) { INFO("MediaSplitter::HandleEvent()\n"); switch( event->type ) { case BTimedEventQueue::B_HANDLE_BUFFER: { BBuffer *buffer = const_cast<BBuffer*>((BBuffer*)event->pointer); if( buffer ) { HandleBufferWrap( buffer, lateness ); } } break; case BTimedEventQueue::B_START: running = true; break; case BTimedEventQueue::B_STOP: running = false; EventQueue()->FlushEvents( 0, BTimedEventQueue::B_ALWAYS, true, BTimedEventQueue::B_HANDLE_BUFFER ); break; case BTimedEventQueue::B_DATA_STATUS: { media_destination *dest = (media_destination*)event->pointer; if( filterInput.destination== *dest) ; //**data available or not use ist later } break; default: ERROR("Unhandled Event in FMediaNode (%i)\n", (int)event->type ); break; } }
void LoggingConsumer::BufferReceived(BBuffer* buffer) { bigtime_t bufferStart = buffer->Header()->start_time; bigtime_t now = TimeSource()->Now(); bigtime_t how_early = bufferStart - EventLatency() - SchedulingLatency() - now; log_message logMsg; logMsg.now = now; logMsg.buffer_data.start_time = bufferStart; logMsg.buffer_data.offset = how_early; mLogger->Log(LOG_BUFFER_RECEIVED, logMsg); // There's a special case here with handling B_MEDIA_PARAMETERS buffers. // These contain sets of parameter value changes, with their own performance // times embedded in the buffers. So, we want to dispatch those parameter // changes as their own events rather than pushing this buffer on the queue to // be handled later. if (B_MEDIA_PARAMETERS == buffer->Header()->type) { ApplyParameterData(buffer->Data(), buffer->SizeUsed()); buffer->Recycle(); } else // ahh, it's a regular media buffer, so push it on the event queue { status_t err; media_timed_event event(buffer->Header()->start_time, BTimedEventQueue::B_HANDLE_BUFFER, buffer, BTimedEventQueue::B_RECYCLE_BUFFER); err = EventQueue()->AddEvent(event); // HandleEvent() will recycle the buffer. However, if we incurred an error trying to // put the event into the queue, we have to recycle it ourselves, since HandleEvent() // will never see the buffer in that case. if (err) buffer->Recycle(); } }
status_t SoundPlayNode::HandleStart(const media_timed_event* event, bigtime_t lateness, bool realTimeEvent) { CALLED(); // don't do anything if we're already running if (RunState() != B_STARTED) { // We want to start sending buffers now, so we set up the buffer-sending // bookkeeping and fire off the first "produce a buffer" event. fFramesSent = 0; fStartTime = event->event_time; media_timed_event firstBufferEvent(event->event_time, SEND_NEW_BUFFER_EVENT); // Alternatively, we could call HandleEvent() directly with this event, // to avoid a trip through the event queue, like this: // // this->HandleEvent(&firstBufferEvent, 0, false); // EventQueue()->AddEvent(firstBufferEvent); } return B_OK; }
void ToneProducer::SetParameterValue(int32 id, bigtime_t performance_time, const void* value, size_t size) { switch (id) { case FREQUENCY_PARAM: case GAIN_PARAM: case WAVEFORM_PARAM: { // floats and int32s are the same size, so we need only check the block's size once if (size > sizeof(float)) size = sizeof(float); // submit the parameter change as a performance event, to be handled at the // appropriate time media_timed_event event(performance_time, _PARAMETER_EVENT, NULL, BTimedEventQueue::B_NO_CLEANUP, size, id, (char*) value, size); EventQueue()->AddEvent(event); } break; default: break; } }