示例#1
0
status_t MediaReader::FillFileBuffer(
				BBuffer * buffer)
{
	CALLED();

	if (GetCurrentFile() == 0) {
		PRINT("\t<- B_NO_INIT\n");
		return B_NO_INIT;
	}
	PRINT("\t%ld buffer bytes used, %ld buffer bytes available\n",
			buffer->SizeUsed(), buffer->SizeAvailable());
	off_t position = GetCurrentFile()->Position();
	ssize_t bytesRead = GetCurrentFile()->Read(buffer->Data(),buffer->SizeAvailable());
	if (bytesRead < 0) {
		PRINT("\t<- B_FILE_ERROR\n");
		return B_FILE_ERROR; // some sort of file related error
	}
	PRINT("\t%ld file bytes read at position %ld.\n",
			bytesRead, position);

	buffer->SetSizeUsed(bytesRead);
	media_header * header = buffer->Header();
	header->type = B_MEDIA_MULTISTREAM;
	header->size_used = bytesRead;
	header->file_pos = position;
	header->orig_size = bytesRead;
	header->time_source = TimeSource()->ID();
	header->start_time = TimeSource()->Now();
	// nothing more to say?
	return B_OK;
}
示例#2
0
status_t EMBeOutputNode::HandleMessage(int32 message, const void* data, size_t size)
{
	if(message == EM_PORT_MESSAGE_INCOMING_BUFFER)
	{
		m_opBuffer = *((BBuffer**) data);
		if(RunState() != B_STARTED)
		{
			emerr << "ERROR! Node received buffer when not running!" << endl;
			m_opBuffer -> Recycle();
			return B_OK;
		}

		if(m_opBuffer != NULL)
		{
			vCount++;
			EMMediaFormat* opFormat = GetConnectedEMMediaFormat();
			if((opFormat -> m_eType & EM_TYPE_ANY_VIDEO) > 0)
			{
				opFormat -> m_vFrameRate = *(static_cast<float*>(EMMediaEngine::Instance() -> GetSettingsRepository() -> GetSetting(SETTING_VIDEO_FRAMERATE)));			
			}

			int64 vFrames = EMBeMediaUtility::FramesInBuffer(m_opBuffer, opFormat, EM_TYPE_RAW_AUDIO); // + 44100 / 4;
			m_vNumberOfReceivedFrames += vFrames;
			int64 vTime = EMBeMediaUtility::FramesToTime(m_vNumberOfReceivedFrames, m_opSystemAudioFormat);
			int64 vNextBufferPerf = static_cast<uint64>(floor(static_cast<float>(m_vStartingTime) + vTime));
			
			m_opBuffer -> Header() -> start_time = vNextBufferPerf;
			
			media_timed_event sEvent(vNextBufferPerf, BTimedEventQueue::B_HANDLE_BUFFER);
			sEvent.pointer = m_opBuffer;
			sEvent.cleanup = BTimedEventQueue::B_RECYCLE_BUFFER;
			status_t vResult = EventQueue() -> AddEvent(sEvent);
			if(vResult != B_OK)
			{
				emerr << "ERROR! Couldn't add the B_HANDLE_BUFFER event to the queue: " << strerror(vResult) << endl;
				m_opBuffer -> Recycle();
			}
		}
		else
			emerr << "ERROR! Node received invalid EM_NODE_BUFFER_ADDED message!" << endl;
		return B_OK;
	}
	else if(message == EM_PORT_MESSAGE_FLUSH_QUEUE)
	{
		media_timed_event sEvent(TimeSource() -> Now(), EM_TIMED_EVENT_FLUSH_CASH);
		EventQueue() -> AddEvent(sEvent);
	}
	else if(message == EM_PORT_MESSAGE_RESET)
	{
		media_timed_event sEvent(TimeSource() -> Now(), EM_TIMED_EVENT_RESET);
		EventQueue() -> AddEvent(sEvent);
	}
	else 
	{
		;//emout_commented_out_4_release << "ERROR! Received unknown message!" << endl;
		BMediaNode::HandleBadMessage(message, data, size);
	}
	return B_OK;
}
示例#3
0
void 
LoggingConsumer::Start(bigtime_t performance_time)
{
	PRINT(("LoggingConsumer::Start(%Ld): now %Ld\n", performance_time, TimeSource()->Now()));

	log_message logMsg;
	logMsg.now = TimeSource()->Now();
	mLogger->Log(LOG_START, logMsg);

	BMediaEventLooper::Start(performance_time);
}
示例#4
0
void
FireWireDVNode::card_reader_thread()
{
	status_t err;
	size_t rbufsize;
	int rcount;

	fCard->GetBufInfo(&rbufsize, &rcount);
	delete fBufferGroupEncVideo;
	fBufferGroupEncVideo = new BBufferGroup(rbufsize, rcount);
	while (!fTerminateThreads) {
		void *data, *end;
		ssize_t sizeUsed = fCard->Read(&data);
		if (sizeUsed < 0) {
			TRACE("FireWireDVNode::%s: %s\n", __FUNCTION__,
				strerror(sizeUsed));
			continue;
		}

		end = (char*)data + sizeUsed;

		while (data < end) {
			BBuffer* buf = fBufferGroupEncVideo->RequestBuffer(rbufsize, 10000);
			if (!buf) {
				TRACE("OutVideo: request buffer timout\n");
				continue;
			}
			
			err = fCard->Extract(buf->Data(), &data, &sizeUsed);
			if (err) {
				buf->Recycle();
				printf("OutVideo Extract error %s\n", strerror(err));
				continue;
			}
	
			media_header* hdr = buf->Header();
			hdr->type = B_MEDIA_ENCODED_VIDEO;
			hdr->size_used = sizeUsed;
			hdr->time_source = TimeSource()->ID();	// set time source id
			//what should the start_time be?
			hdr->start_time = TimeSource()->PerformanceTimeFor(system_time());

			fLock.Lock();
			if (SendBuffer(buf, fOutputEncVideo.source,
					fOutputEncVideo.destination) != B_OK) {
				TRACE("OutVideo: sending buffer failed\n");
				buf->Recycle();
			} 
			fLock.Unlock();
		}
		
	}
}
示例#5
0
void 
LoggingConsumer::SetParameterValue(int32 id, bigtime_t performance_time, const void* value, size_t size)
{
	log_message logMsg;
	logMsg.now = TimeSource()->Now();
	logMsg.param.id = id;
	mLogger->Log(LOG_SET_PARAM_VALUE, logMsg);

	// if it's one of our parameters, enqueue a "set parameter" event for handling at the appropriate time
	switch (id)
	{
	case LATENCY_PARAM:
	case CPU_SPIN_PARAM:
	case PRIORITY_PARAM:
		{
			// !!! Change from B_USER_EVENT to B_SET_PARAMETER once it's defined
			media_timed_event event(performance_time, BTimedEventQueue::B_USER_EVENT,
				(void*) value, BTimedEventQueue::B_NO_CLEANUP, size, id, NULL);
			EventQueue()->AddEvent(event);
		}
		break;

	default:		// do nothing for other parameter IDs
		break;
	}
	return;
}
示例#6
0
status_t ESDSinkNode::GetLatencyFor(
				const media_destination & for_whom,
				bigtime_t * out_latency,
				media_node_id * out_timesource)
{
	CALLED();
	if ((out_latency == 0) || (out_timesource == 0)) {
		fprintf(stderr,"<- B_BAD_VALUE\n");
		return B_BAD_VALUE;
	}
	
	if(fInput.destination != for_whom) {
		fprintf(stderr,"<- B_MEDIA_BAD_DESTINATION\n");
		return B_MEDIA_BAD_DESTINATION;
	}
	
	bigtime_t intl = EventLatency();
	bigtime_t netl = 0LL;
	if (fDevice)
		netl = fDevice->Latency();
	// I don't want to swap
	if (netl > 500000)
		netl = 500000;
	*out_latency = intl + netl;
	fprintf(stderr, "int latency %Ld, net latency %Ld, total latency %Ld\n", intl, netl, *out_latency);
	*out_timesource = TimeSource()->ID();
	return B_OK;
}
示例#7
0
void FlangerNode::BufferReceived(
	BBuffer* pBuffer) {
	ASSERT(pBuffer);

	// check buffer destination
	if(pBuffer->Header()->destination !=
		m_input.destination.id) {
		PRINT(("FlangerNode::BufferReceived():\n"
			"\tBad destination.\n"));
		pBuffer->Recycle();
		return;
	}

	if(pBuffer->Header()->time_source != TimeSource()->ID()) {
		PRINT(("* timesource mismatch\n"));
	}

	// check output
	if(m_output.destination == media_destination::null ||
		!m_outputEnabled) {
		pBuffer->Recycle();
		return;
	}

	// process and retransmit buffer
	filterBuffer(pBuffer);

	status_t err = SendBuffer(pBuffer, m_output.source, m_output.destination);
	if (err < B_OK) {
		PRINT(("FlangerNode::BufferReceived():\n"
			"\tSendBuffer() failed: %s\n", strerror(err)));
		pBuffer->Recycle();
	}
	// sent!
}
示例#8
0
status_t 
LoggingConsumer::Connected(
	const media_source& producer,
	const media_destination& where,
	const media_format& with_format,
	media_input* out_input)
{

	char formatStr[256];
	string_for_format(with_format, formatStr, 255);
	PRINT(("LoggingConsumer::Connected:\n\tformat %s\n", formatStr));
	string_for_format(mInput.format, formatStr, 255);
	PRINT(("\tinput format %s\n", formatStr));

	log_message logMsg;
	logMsg.now = TimeSource()->Now();
	mLogger->Log(LOG_CONNECTED, logMsg);

	if (where != mInput.destination) return B_MEDIA_BAD_DESTINATION;

	// calculate my latency here, because it may depend on buffer sizes/durations, then
	// tell the BMediaEventLooper how early we need to get the buffers
	SetEventLatency(mLatency);

	// record useful information about the connection, and return success
	// * e.moon [14jun99]: stores format
	mInput.format = with_format;
	mInput.source = producer;
	*out_input = mInput;
	return B_OK;
}
示例#9
0
void
ProducerNode::BufferProducer()
{
	// this thread produces one buffer each two seconds,
	// and shedules it to be handled one second later than produced
	// assuming a realtime timesource

	status_t rv;
	for (;;) {
		rv = acquire_sem_etc(mBufferProducerSem,1,B_RELATIVE_TIMEOUT,DELAY);
		if (rv == B_INTERRUPTED) {
			continue;
		} else if (rv == B_OK) {
			// triggered by AdditionalBufferRequested
			release_sem(mBufferProducerSem);
		} else if (rv != B_TIMED_OUT) {
			// triggered by deleting the semaphore (stop request)
			break;
		}
		if (!mOutputEnabled)
			continue;
			
		BBuffer *buffer;
//		out("ProducerNode: RequestBuffer\n");
		buffer = mBufferGroup->RequestBuffer(2048);
		if (!buffer) {
		}
		buffer->Header()->start_time = TimeSource()->Now() + DELAY / 2;
		out("ProducerNode: SendBuffer, sheduled time = %5.4f\n",buffer->Header()->start_time / 1E6);
		rv = SendBuffer(buffer, mOutput.destination);
		if (rv != B_OK) {
		}
	}
}
示例#10
0
BBuffer*
SoundPlayNode::FillNextBuffer(bigtime_t eventTime)
{
	CALLED();

	// get a buffer from our buffer group
	BBuffer* buffer = fBufferGroup->RequestBuffer(
		fOutput.format.u.raw_audio.buffer_size, BufferDuration() / 2);

	// If we fail to get a buffer (for example, if the request times out), we
	// skip this buffer and go on to the next, to avoid locking up the control
	// thread
	if (buffer == NULL) {
		ERROR("SoundPlayNode::FillNextBuffer: RequestBuffer failed\n");
		return NULL;
	}

	if (fPlayer->HasData()) {
		fPlayer->PlayBuffer(buffer->Data(),
			fOutput.format.u.raw_audio.buffer_size, fOutput.format.u.raw_audio);
	} else
		memset(buffer->Data(), 0, fOutput.format.u.raw_audio.buffer_size);

	// fill in the buffer header
	media_header* header = buffer->Header();
	header->type = B_MEDIA_RAW_AUDIO;
	header->size_used = fOutput.format.u.raw_audio.buffer_size;
	header->time_source = TimeSource()->ID();
	header->start_time = eventTime;

	return buffer;
}
示例#11
0
void 
LoggingConsumer::Preroll()
{
	log_message logMsg;
	logMsg.now = TimeSource()->Now();
	mLogger->Log(LOG_PREROLL, logMsg);

	BMediaEventLooper::Preroll();
}
示例#12
0
void 
LoggingConsumer::SetTimeSource(BTimeSource* time_source)
{
	log_message logMsg;
	logMsg.now = TimeSource()->Now();
	mLogger->Log(LOG_SET_TIME_SOURCE, logMsg);

	BMediaNode::SetTimeSource(time_source);
}
示例#13
0
void 
LoggingConsumer::TimeWarp(bigtime_t at_real_time, bigtime_t to_performance_time)
{
	log_message logMsg;
	logMsg.now = TimeSource()->Now();
	mLogger->Log(LOG_TIMEWARP, logMsg);

	BMediaEventLooper::TimeWarp(at_real_time, to_performance_time);
}
示例#14
0
void 
LoggingConsumer::Seek(bigtime_t media_time, bigtime_t performance_time)
{
	log_message logMsg;
	logMsg.now = TimeSource()->Now();
	mLogger->Log(LOG_SEEK, logMsg);

	BMediaEventLooper::Seek(media_time, performance_time);
}
示例#15
0
void 
LoggingConsumer::Stop(bigtime_t performance_time, bool immediate)
{
	log_message logMsg;
	logMsg.now = TimeSource()->Now();
	mLogger->Log(LOG_STOP, logMsg);

	BMediaEventLooper::Stop(performance_time, immediate);
}
示例#16
0
status_t 
LoggingConsumer::RequestCompleted(const media_request_info &info)
{
	log_message logMsg;
	logMsg.now = TimeSource()->Now();
	mLogger->Log(LOG_REQUEST_COMPLETED, logMsg);

	return BMediaNode::RequestCompleted(info);
}
示例#17
0
void 
LoggingConsumer::SetRunMode(run_mode mode)
{
	// !!! Need to handle offline mode etc. properly!
	log_message logMsg;
	logMsg.now = TimeSource()->Now();
	mLogger->Log(LOG_SET_RUN_MODE, logMsg);

	BMediaEventLooper::SetRunMode(mode);
}
示例#18
0
void
TVideoPreviewView::Stop( bigtime_t performance_time, bool immediate)
{
	FUNCTION("TVideoPreviewView::Stop() @ %.4f, now: %.4f\n",
	         (double)performance_time/M1, (double)TimeSource()->Now()/M1);

	if (!mStarting || performance_time > mStartTime) {
		if (mRunning || mStarting) {
			mStopping = true;
			mStopTime = performance_time;
		}
	}

	if (immediate) {
		mRunning = false;
		mStopping = true;
		mStopTime = TimeSource()->Now();
	}
}
示例#19
0
status_t
TVideoPreviewView::GetLatencyFor(
	const media_destination & /* input */,
	bigtime_t* out_latency,
	media_node_id* out_timesource)
{
	FUNCTION("TVideoPreviewView::GetLatencyFor()\n");
	*out_latency = mMyLatency;
	*out_timesource = TimeSource()->ID();
	return B_OK;
}
示例#20
0
status_t
EqualizerNode::GetLatencyFor(const media_destination &dst, bigtime_t* latency,
                             media_node_id* outTimeSource)
{

    if (dst != fInputMedia.destination)
        return B_MEDIA_BAD_DESTINATION;

    *latency = fDownstreamLatency + fProcessLatency;
    *outTimeSource = TimeSource()->ID();
    return B_OK;
}
示例#21
0
void 
LoggingConsumer::Disconnected(
	const media_source& producer,
	const media_destination& where)
{
	log_message logMsg;
	logMsg.now = TimeSource()->Now();
	mLogger->Log(LOG_DISCONNECTED, logMsg);

	// wipe out our input record
	memset(&mInput, 0, sizeof(mInput));
}
示例#22
0
status_t 
LoggingConsumer::GetLatencyFor(const media_destination& for_whom, bigtime_t* out_latency, media_node_id* out_timesource)
{
	// make sure this is one of my valid inputs
	if (for_whom != mInput.destination) return B_MEDIA_BAD_DESTINATION;

	// report internal latency + downstream latency here, NOT including scheduling latency.
	// we're a final consumer (no outputs), so we have no downstream latency.
	*out_latency = mLatency;
	*out_timesource = TimeSource()->ID();
	return B_OK;
}
示例#23
0
status_t
VideoNode::GetLatencyFor(const media_destination &dst,
						 bigtime_t *out_latency,
						 media_node_id *out_id)
{
	if (dst != fInput.destination)
		return B_MEDIA_BAD_DESTINATION;
	
	*out_latency = 10000;
	*out_id = TimeSource()->ID();
	return B_OK;
}
示例#24
0
void
EqualizerNode::ParameterEventProcessing(const media_timed_event* event)
{
    float value = 0.0;
    int32 value32 = 0;

    int32 id = event->bigdata;
    size_t size = event->data;
    bigtime_t now = TimeSource()->Now();

    type_code v_type = B_FLOAT_TYPE;

    BParameter* web_param;

    for (int i = 0; i < fWeb->CountParameters(); i++) {
        web_param = fWeb->ParameterAt(i);
        if (web_param->ID() == id) {
            v_type=web_param->ValueType();
            break;
        }
    }

    if (v_type == B_FLOAT_TYPE)
        value = *((float*)event->pointer);
    else if (v_type == B_INT32_TYPE) {
        value32 = *((int32*)event->pointer);
        value = (float)value32;
    }

    if (id == P_MUTE) {
        fMute = value32;
        fMuteLastChanged = now;
        BroadcastNewParameterValue(now,	id,	event->pointer, size);
    } else if (id == P_BYPASS) {
        fByPass = value32;
        fByPassLastChanged = now;
        BroadcastNewParameterValue(now,	id,	event->pointer, size);
    } else if (id == P_PREAMP) {
        if (value != fEqualizer.PreAmp()) {
            fEqualizer.SetPreAmp(value);
            fPreAmpLastChanged = now;
            BroadcastNewParameterValue(now,	id,	&value,	size);
        }
    } else if (id >= P_BANDS && id < P_BANDS + fEqualizer.BandCount()) {
        int band = id - P_BANDS;
        if (value != fEqualizer.Band(band)) {
            fEqualizer.SetBand(band, value);
            fBandsLastChanged[band] = now;
            BroadcastNewParameterValue(now,	id,	&value,	size);
        }
    }
}
示例#25
0
status_t 
LoggingConsumer::FormatChanged(
	const media_source& producer,
	const media_destination& consumer,
	int32 change_tag,
	const media_format& format)
{
	log_message logMsg;
	logMsg.now = TimeSource()->Now();
	mLogger->Log(LOG_FORMAT_CHANGED, logMsg);

	return B_OK;
}
status_t
VideoConsumer::GetLatencyFor(const media_destination& whom,
	bigtime_t* _latency, media_node_id* _timeSource)
{
	FUNCTION("VideoConsumer::GetLatencyFor\n");
	
	if (whom != fIn.destination)
		return B_MEDIA_BAD_DESTINATION;
	
	*_latency = fMyLatency;
	*_timeSource = TimeSource()->ID();
	return B_OK;
}
示例#27
0
void 
ToneProducer::Start(bigtime_t performance_time)
{
	PRINT(("ToneProducer::Start(%Ld): now %Ld\n", performance_time, TimeSource()->Now()));

	// send 'data available' message
	if(mOutput.destination != media_destination::null)
		SendDataStatus(B_DATA_AVAILABLE, mOutput.destination, performance_time);

	// A bug in the current PowerPC compiler demands that we implement
	// this, even though it just calls up to the inherited implementation.
	BMediaEventLooper::Start(performance_time);
}
void OffsetFilter::filterBuffer(BBuffer* inBuffer)
{
	if (!inBuffer) 
		return;
	/* here is where we do all of the real work */
	if (RunMode() != B_OFFLINE)
		CALL("FilterBuffer now: %Ld\n", TimeSource()->Now());
	else
		CALL("FilterBuffer now: %Ld\n", OfflineTime());

	media_header *inHeader = inBuffer->Header();

	CALL("now: %Ld start_time: %Ld\n", TimeSource()->Now(), inHeader->start_time);
	
	uint32 *inData = (uint32*) inBuffer->Data();

/* Sans BBitmap  */

	uint32	*po   = inData;
	uint32	*pi   = (uint32*)malloc(inHeader->size_used);
	uint32  *last = inData + inHeader->size_used/4;
	uint32 i=0,C,L,deltax,deltay,lin,col;
	
	memcpy(pi,inData,inHeader->size_used);
	
	C=m_format.u.raw_video.display.line_width;
	L=m_format.u.raw_video.display.line_count;
	
	deltax=C*DELTA_X/1000;
	deltay=L*DELTA_Y/1000;
	
	while ( po < last)
		*po++=pi[(((i/C)+deltay)%L)*C+((i++%C)+deltax)%C];

	free(pi);
	
// Fin Sans Bitmap	
}
示例#29
0
void ESDSinkNode::NodeRegistered(void)
{
	CALLED();
	
	if (fInitCheckStatus != B_OK) {
		ReportError(B_NODE_IN_DISTRESS);
		return;
	}
	
//	media_input *input = new media_input;

	fInput.format = fPreferredFormat;
	fInput.destination.port = ControlPort();
	fInput.destination.id = 0;
	fInput.node = Node();
	sprintf(fInput.name, "output %ld", fInput.destination.id);
		
	fOutput.format = fPreferredFormat;
	fOutput.destination = media_destination::null;
	fOutput.source.port = ControlPort();
	fOutput.source.id = 0;
	fOutput.node = Node();
	sprintf(fOutput.name, "input %ld", fOutput.source.id);
		
	// Set up our parameter web
	fWeb = MakeParameterWeb();
	SetParameterWeb(fWeb);
	
	/* apply configuration */
#ifdef PRINTING
	bigtime_t start = system_time();
#endif
		
	int32 index = 0;
	int32 parameterID = 0;
	const void *data;
	ssize_t size;
	while(fConfig.FindInt32("parameterID", index, &parameterID) == B_OK) {
		if(fConfig.FindData("parameterData", B_RAW_TYPE, index, &data, &size) == B_OK)
			SetParameterValue(parameterID, TimeSource()->Now(), data, size);
		index++;
	}
	
#ifdef PRINTING
	PRINT(("apply configuration in : %lld\n", system_time() - start));
#endif

	SetPriority(B_REAL_TIME_PRIORITY);
	Run();
}
示例#30
0
status_t
AudioConsumer::GetLatencyFor(const media_destination& dst,
	bigtime_t* latency, media_node_id* time_src)
{
	// we have multiple inputs with different IDs, but
	// the port number must match our ControlPort()
	if (dst.port != ControlPort())
		return B_MEDIA_BAD_DESTINATION;

	*latency = EventLatency();
	*time_src += TimeSource()->ID();

	return B_OK;
}