示例#1
0
// -------------------------------------------------------- //
// implementation for BMediaEventLooper
// -------------------------------------------------------- //
// protected:
status_t MediaReader::HandleBuffer(
				const media_timed_event *event,
				bigtime_t lateness,
				bool realTimeEvent)
{
	CALLED();

	if (output.destination == media_destination::null)
		return B_MEDIA_NOT_CONNECTED;

	status_t status = B_OK;
	BBuffer * buffer = fBufferGroup->RequestBuffer(output.format.u.multistream.max_chunk_size,fBufferPeriod);
	if (buffer != 0) {
	    status = FillFileBuffer(buffer);
	    if (status != B_OK) {
			PRINT("MediaReader::HandleEvent got an error from FillFileBuffer.\n");
			buffer->Recycle();
		} else {
			if (fOutputEnabled) {
				status = SendBuffer(buffer,output.destination);
				if (status != B_OK) {
					PRINT("MediaReader::HandleEvent got an error from SendBuffer.\n");
					buffer->Recycle();
				}
			} else {
				buffer->Recycle();
			}
		}
	}
	bigtime_t nextEventTime = event->event_time+fBufferPeriod;
	media_timed_event nextBufferEvent(nextEventTime, BTimedEventQueue::B_HANDLE_BUFFER);
	EventQueue()->AddEvent(nextBufferEvent);
	return status;
}
示例#2
0
status_t
ClientNode::_ScheduleOutputEvent(bigtime_t event)
{
    media_timed_event nextBufferEvent(event, NEW_BUFFER_EVENT);

    EventQueue()->AddEvent(nextBufferEvent);
}
示例#3
0
void
AudioProducer::HandleEvent(const media_timed_event* event, bigtime_t lateness,
	bool realTimeEvent)
{
	TRACE_BUFFER("%p->AudioProducer::HandleEvent()\n", this);

	switch (event->type) {
		case BTimedEventQueue::B_START:
			TRACE("AudioProducer::HandleEvent(B_START)\n");
			if (RunState() != B_STARTED) {
				fFramesSent = 0;
				fStartTime = event->event_time + fSupplier->InitialLatency();
printf("B_START: start time: %lld\n", fStartTime);
				media_timed_event firstBufferEvent(
					fStartTime - fSupplier->InitialLatency(),
					BTimedEventQueue::B_HANDLE_BUFFER);
				EventQueue()->AddEvent(firstBufferEvent);
			}
			TRACE("AudioProducer::HandleEvent(B_START) done\n");
			break;

		case BTimedEventQueue::B_STOP:
			TRACE("AudioProducer::HandleEvent(B_STOP)\n");
			EventQueue()->FlushEvents(0, BTimedEventQueue::B_ALWAYS, true,
				BTimedEventQueue::B_HANDLE_BUFFER);
			TRACE("AudioProducer::HandleEvent(B_STOP) done\n");
			break;

		case BTimedEventQueue::B_HANDLE_BUFFER:
		{
			TRACE_BUFFER("AudioProducer::HandleEvent(B_HANDLE_BUFFER)\n");
			if (RunState() == BMediaEventLooper::B_STARTED
				&& fOutput.destination != media_destination::null) {
				BBuffer* buffer = _FillNextBuffer(event->event_time);
				if (buffer) {
					status_t err = B_ERROR;
					if (fOutputEnabled) {
						err = SendBuffer(buffer, fOutput.source,
							fOutput.destination);
					}
					if (err)
						buffer->Recycle();
				}
				size_t sampleSize = fOutput.format.u.raw_audio.format
						& media_raw_audio_format::B_AUDIO_SIZE_MASK;

				size_t nFrames = fOutput.format.u.raw_audio.buffer_size
					/ (sampleSize * fOutput.format.u.raw_audio.channel_count);
				fFramesSent += nFrames;

				fNextScheduledBuffer = fStartTime
					+ bigtime_t(double(fFramesSent) * 1000000.0
						/ double(fOutput.format.u.raw_audio.frame_rate));
				media_timed_event nextBufferEvent(fNextScheduledBuffer,
					BTimedEventQueue::B_HANDLE_BUFFER);
				EventQueue()->AddEvent(nextBufferEvent);
			} else {
				ERROR("B_HANDLE_BUFFER, but not started!\n");
			}
			TRACE_BUFFER("AudioProducer::HandleEvent(B_HANDLE_BUFFER) done\n");
			break;
		}
		default:
			break;
	}
}
示例#4
0
// how should we handle late buffers?  drop them?
// notify the producer?
status_t
SoundPlayNode::SendNewBuffer(const media_timed_event* event,
	bigtime_t lateness, bool realTimeEvent)
{
	CALLED();
	// printf("latency = %12Ld, event = %12Ld, sched = %5Ld, arrive at %12Ld, now %12Ld, current lateness %12Ld\n", EventLatency() + SchedulingLatency(), EventLatency(), SchedulingLatency(), event->event_time, TimeSource()->Now(), lateness);

	// make sure we're both started *and* connected before delivering a buffer
	if (RunState() != BMediaEventLooper::B_STARTED
		|| fOutput.destination == media_destination::null)
		return B_OK;

	// The event->event_time is the time at which the buffer we are preparing
	// here should arrive at it's destination. The MediaEventLooper should have
	// scheduled us early enough (based on EventLatency() and the
	// SchedulingLatency()) to make this possible.
	// lateness is independent of EventLatency()!

	if (lateness > (BufferDuration() / 3) ) {
		printf("SoundPlayNode::SendNewBuffer, event scheduled much too late, "
			"lateness is %Ld\n", lateness);
	}

	// skip buffer creation if output not enabled
	if (fOutputEnabled) {

		// Get the next buffer of data
		BBuffer* buffer = FillNextBuffer(event->event_time);

		if (buffer) {

			// If we are ready way too early, decrase internal latency
/*
			bigtime_t how_early = event->event_time - TimeSource()->Now() - fLatency - fInternalLatency;
			if (how_early > 5000) {

				printf("SoundPlayNode::SendNewBuffer, event scheduled too early, how_early is %Ld\n", how_early);

				if (fTooEarlyCount++ == 5) {
					fInternalLatency -= how_early;
					if (fInternalLatency < 500)
						fInternalLatency = 500;
					printf("SoundPlayNode::SendNewBuffer setting internal latency to %Ld\n", fInternalLatency);
					SetEventLatency(fLatency + fInternalLatency);
					fTooEarlyCount = 0;
				}
			}
*/
			// send the buffer downstream if and only if output is enabled
			if (SendBuffer(buffer, fOutput.source, fOutput.destination)
					!= B_OK) {
				// we need to recycle the buffer
				// if the call to SendBuffer() fails
				printf("SoundPlayNode::SendNewBuffer: Buffer sending "
					"failed\n");
				buffer->Recycle();
			}
		}
	}

	// track how much media we've delivered so far
	size_t nFrames = fOutput.format.u.raw_audio.buffer_size
		/ ((fOutput.format.u.raw_audio.format
			& media_raw_audio_format::B_AUDIO_SIZE_MASK)
		* fOutput.format.u.raw_audio.channel_count);
	fFramesSent += nFrames;

	// The buffer is on its way; now schedule the next one to go
	// nextEvent is the time at which the buffer should arrive at it's
	// destination
	bigtime_t nextEvent = fStartTime + bigtime_t((1000000LL * fFramesSent)
		/ (int32)fOutput.format.u.raw_audio.frame_rate);
	media_timed_event nextBufferEvent(nextEvent, SEND_NEW_BUFFER_EVENT);
	EventQueue()->AddEvent(nextBufferEvent);

	return B_OK;
}
示例#5
0
void
ToneProducer::HandleEvent(const media_timed_event* event, bigtime_t lateness, bool realTimeEvent)
{
//	FPRINTF(stderr, "ToneProducer::HandleEvent\n");
	switch (event->type)
	{
	case BTimedEventQueue::B_START:
		// don't do anything if we're already running
		if (RunState() != B_STARTED)
		{
			// We want to start sending buffers now, so we set up the buffer-sending bookkeeping
			// and fire off the first "produce a buffer" event.
			mFramesSent = 0;
			mTheta = 0;
			mStartTime = event->event_time;
			media_timed_event firstBufferEvent(mStartTime, BTimedEventQueue::B_HANDLE_BUFFER);

			// Alternatively, we could call HandleEvent() directly with this event, to avoid a trip through
			// the event queue, like this:
			//
			//		this->HandleEvent(&firstBufferEvent, 0, false);
			//
			EventQueue()->AddEvent(firstBufferEvent);
		}
		break;

	case BTimedEventQueue::B_STOP:
		FPRINTF(stderr, "Handling B_STOP event\n");

		// When we handle a stop, we must ensure that downstream consumers don't
		// get any more buffers from us.  This means we have to flush any pending
		// buffer-producing events from the queue.
		EventQueue()->FlushEvents(0, BTimedEventQueue::B_ALWAYS, true, BTimedEventQueue::B_HANDLE_BUFFER);
		break;

	case _PARAMETER_EVENT:
		{
			size_t dataSize = size_t(event->data);
			int32 param = int32(event->bigdata);
			if (dataSize >= sizeof(float)) switch (param)
			{
			case FREQUENCY_PARAM:
				{
					float newValue = *((float*) event->user_data);
					if (mFrequency != newValue)		// an actual change in the value?
					{
						mFrequency = newValue;
						mFreqLastChanged = TimeSource()->Now();
						BroadcastNewParameterValue(mFreqLastChanged, param, &mFrequency, sizeof(mFrequency));
					}
				}
				break;

			case GAIN_PARAM:
				{
					float newValue = *((float*) event->user_data);
					if (mGain != newValue)
					{
						mGain = newValue;
						mGainLastChanged = TimeSource()->Now();
						BroadcastNewParameterValue(mGainLastChanged, param, &mGain, sizeof(mGain));
					}
				}
				break;

			case WAVEFORM_PARAM:
				{
					int32 newValue = *((int32*) event->user_data);
					if (mWaveform != newValue)
					{
						mWaveform = newValue;
						mTheta = 0;			// reset the generator parameters when we change waveforms
						mWaveAscending = true;
						mWaveLastChanged = TimeSource()->Now();
						BroadcastNewParameterValue(mWaveLastChanged, param, &mWaveform, sizeof(mWaveform));
					}
				}
				break;

			default:
				FPRINTF(stderr, "Hmmm... got a B_PARAMETER event for a parameter we don't have? (%" B_PRId32 ")\n", param);
				break;
			}
		}
		break;

	case BTimedEventQueue::B_HANDLE_BUFFER:
		{
			// make sure we're both started *and* connected before delivering a buffer
			if (RunState() == BMediaEventLooper::B_STARTED
				&& mOutput.destination != media_destination::null) {
				// Get the next buffer of data
				BBuffer* buffer = FillNextBuffer(event->event_time);
				if (buffer) {
					// send the buffer downstream if and only if output is enabled
					status_t err = B_ERROR;
					if (mOutputEnabled) {
						err = SendBuffer(buffer, mOutput.source,
							mOutput.destination);
					}
					if (err) {
						// we need to recycle the buffer ourselves if output is disabled or
						// if the call to SendBuffer() fails
						buffer->Recycle();
					}
				}

				// track how much media we've delivered so far
				size_t nFrames = mOutput.format.u.raw_audio.buffer_size /
					(sizeof(float) * mOutput.format.u.raw_audio.channel_count);
				mFramesSent += nFrames;

				// The buffer is on its way; now schedule the next one to go
				bigtime_t nextEvent = mStartTime + bigtime_t(double(mFramesSent)
					/ double(mOutput.format.u.raw_audio.frame_rate) * 1000000.0);
				media_timed_event nextBufferEvent(nextEvent,
					BTimedEventQueue::B_HANDLE_BUFFER);
				EventQueue()->AddEvent(nextBufferEvent);
			}
		}
		break;

	default:
		break;
	}
}
示例#6
0
void
GameProducer::HandleEvent(const media_timed_event* event, bigtime_t lateness,
	bool realTimeEvent)
{
//	FPRINTF(stderr, "ToneProducer::HandleEvent\n");
	switch (event->type)
	{
	case BTimedEventQueue::B_START:
		// don't do anything if we're already running
		if (RunState() != B_STARTED) {
			// Going to start sending buffers so setup the needed bookkeeping
			fFramesSent = 0;
			fStartTime = event->event_time;
			media_timed_event firstBufferEvent(fStartTime,
				BTimedEventQueue::B_HANDLE_BUFFER);

			// Alternatively, we could call HandleEvent() directly with this
			// event, to avoid a trip through the event queue like this:
			//		this->HandleEvent(&firstBufferEvent, 0, false);
			EventQueue()->AddEvent(firstBufferEvent);
		}
		break;

	case BTimedEventQueue::B_STOP:
		// When we handle a stop, we must ensure that downstream consumers don't
		// get any more buffers from us.  This means we have to flush any
		// pending buffer-producing events from the queue.
		EventQueue()->FlushEvents(0, BTimedEventQueue::B_ALWAYS, true,
			BTimedEventQueue::B_HANDLE_BUFFER);
		break;

	case BTimedEventQueue::B_HANDLE_BUFFER:
		{
			// Ensure we're both started and connected before delivering buffer
			if ((RunState() == BMediaEventLooper::B_STARTED)
				&& (fOutput.destination != media_destination::null)) {
				// Get the next buffer of data
				BBuffer* buffer = FillNextBuffer(event->event_time);
				if (buffer) {
					// Send the buffer downstream if output is enabled
					status_t err = B_ERROR;
					if (fOutputEnabled) {
						err = SendBuffer(buffer, fOutput.source,
							fOutput.destination);
					}
					if (err) {
						// we need to recycle the buffer ourselves if output is
						// disabled or if the call to SendBuffer() fails
						buffer->Recycle();
					}
				}

				// track how much media we've delivered so far
				size_t nFrames = fBufferSize / fFrameSize;
				fFramesSent += nFrames;

				// The buffer is on its way; now schedule the next one to go
				bigtime_t nextEvent = fStartTime + bigtime_t(double(fFramesSent)
					/ double(fOutput.format.u.raw_audio.frame_rate)
					* 1000000.0);
				media_timed_event nextBufferEvent(nextEvent,
					BTimedEventQueue::B_HANDLE_BUFFER);
				EventQueue()->AddEvent(nextBufferEvent);
			}
		}
		break;

	default:
		break;
	}
}