SamplePlayHandle::SamplePlayHandle( SampleBuffer* sampleBuffer ) :
	PlayHandle( TypeSamplePlayHandle ),
	m_sampleBuffer( sharedObject::ref( sampleBuffer ) ),
	m_doneMayReturnTrue( true ),
	m_frame( 0 ),
	m_ownAudioPort( true ),
	m_defaultVolumeModel( DefaultVolume, MinVolume, MaxVolume, 1 ),
	m_volumeModel( &m_defaultVolumeModel ),
	m_track( NULL ),
	m_bbTrack( NULL )
{
	setAudioPort( new AudioPort( "SamplePlayHandle", false ) );
}
SamplePlayHandle::SamplePlayHandle( SampleTCO* tco ) :
	PlayHandle( TypeSamplePlayHandle ),
	m_sampleBuffer( sharedObject::ref( tco->sampleBuffer() ) ),
	m_doneMayReturnTrue( true ),
	m_frame( 0 ),
	m_ownAudioPort( false ),
	m_defaultVolumeModel( DefaultVolume, MinVolume, MaxVolume, 1 ),
	m_volumeModel( &m_defaultVolumeModel ),
	m_track( tco->getTrack() ),
	m_bbTrack( NULL )
{
	setAudioPort( ( (SampleTrack *)tco->getTrack() )->audioPort() );
}
SamplePlayHandle::SamplePlayHandle( const QString& sampleFile ) :
	PlayHandle( TypeSamplePlayHandle ),
	m_sampleBuffer( new SampleBuffer( sampleFile ) ),
	m_doneMayReturnTrue( true ),
	m_frame( 0 ),
	m_ownAudioPort( true ),
	m_defaultVolumeModel( DefaultVolume, MinVolume, MaxVolume, 1 ),
	m_volumeModel( &m_defaultVolumeModel ),
	m_track( NULL ),
	m_bbTrack( NULL )
{
	setAudioPort( new AudioPort( "SamplePlayHandle", false ) );
}
Beispiel #4
0
int SdpBuild::checkCodec( bool testReInvite, SipCallMember *member, bool forceHold , bool *srtpErr)
{
	bool holdMe=false;
	cntParam=0;
	setRecvonly(false);
	ReInvite=testReInvite;
        int status = (int)SipCall::GarKeinCall;
	int isRtpMap=0;
	int i=0, j=0;
	QString transT;
        bool evaluateV=false, evaluateA=false, evaluateAV=false;
        QString mstr = member->getSessionDescription();
        QString ipaddress;
        QString curline;
        QString portstr;
        QString codecstr;
        QString codecstr1;
        QString codecstr2;
        int receivedAudioMediaNumber = 0, receivedVideoMediaNumber = 0;
        QString dp=":";
	QString sec;
        mstr += '\n';                        // Guarentee termination
        if( mstr.left( mstr.find( '\n' ) ) != "v=0" ) {
	return doReturn (status);
        }
        mstr.remove( 0, mstr.find( '\n' ) + 1 );

        while( mstr.length() > 2 ) {
                curline = mstr.left( mstr.find( '\n' ) );
                mstr.remove( 0, mstr.find( '\n' ) + 1 );
	        if( curline.left( curline.find( '=' ) ) == "c" ) {
            		if( curline.contains("IP4") ) {
		        ipaddress = curline.mid( curline.find( "IP4" ) + 4 ).stripWhiteSpace();
			} else {
		        ipaddress = curline.mid( curline.find( "IP6" ) + 4 ).stripWhiteSpace();
			}

		}
		if( curline.contains("m=audio")) {
			if(ipaddress.contains("0.0.0.0") || ipaddress.contains("::0") || forceHold){
                                status = (int) SipCall::putMeOnHold;
				setExtHost(ipaddress);
				holdMe = true;
			} else {
				if(evaluateV==true) {
					evaluateAV=true;
					status = (int)SipCall::auviCall;
				} else {
					status = (int)SipCall::StandardCall;
					evaluateA=true;
				}
				if ( ( curline.contains("RTP/SAVP") && (sessionC->getSrtpMode() != 1) )|| ( curline.contains("RTP/AVP") && (sessionC->getSrtpMode() != 0) ) ) {
				    *srtpErr = true;
				}
				setAudioPort(curline.section(' ',1,1));
				setExtHost(ipaddress);
				int i = (-1);
				do {
					i++;
					acc[i] = curline.section(' ', i+3, i+3);
					
				} while (acc[i].length() != 0);
				receivedAudioMediaNumber = i;
					
			}
                }
                if( curline.contains("m=video")) {
			if(ipaddress.contains("0.0.0.0")|| ipaddress.contains("::0") || forceHold){
                        	status = (int) SipCall::putMeOnHold;
				setExtHost(ipaddress);
				holdMe = true;
			} else {
				if(evaluateA==true) {
					evaluateAV=true;
					status = (int)SipCall::auviCall;
				} else {
					status = (int)SipCall::videoCall;
					evaluateV=true;
				}
				setExtPort(curline.section(' ',1,1));
				setExtHost(ipaddress);
				int i = (-1);
				do {
					i++;
					vcc[i] = curline.section(' ', i+3, i+3);
					
				} while (vcc[i].length() != 0);
				receivedVideoMediaNumber = i;
                        }
                } 
                if( ( curline.contains("m=application") ) ) {
			if(ipaddress.contains("0.0.0.0") || ipaddress.contains("::0") || forceHold){;
                	     	status = (int) SipCall::putMeOnHold;
				setExtHost(ipaddress);
				holdMe = true;
			} else {
				setExtHost(ipaddress);
                        	setExtPort(curline.section(' ',1,1));
				transT=curline.section(' ',2,2);
				sec=curline.section(' ',3,3);
				status = checkVectorId(status,sec); // find the load type
				call->setRemoteStart(false); //
				if(status==0){
					return  doReturn (status); // error
				}
				if(!testTrans(status,transT))  return  doReturn (0);
				
			}
		}                
		if( curline.contains("a=")) {
			if (curline.contains("a=sendonly") || curline.contains("a=inactive") || curline.contains("a=recvonly") ) {
                    	    status = (int) SipCall::putMeOnHold;
			    if( curline.contains("a=sendonly")) {
			    setRecvonly(true);
			    }
			    if(KStatics::haveIPv6){
				setExtHost( "::0");
			    } else { 
				setExtHost("0.0.0.0");
			    }
    			    return  doReturn (status);
			}
			if (holdMe) {
			    return  doReturn (status); //hold was detected by ipaddress
			}
 
			//
			if (curline.contains("a=fmtp:" + sec) ) {// we have a=vec or a=cmd
				if(curline.contains("rstart")) {
				call->setRemoteStart(true); 
				return  doReturn (status);
				}
			}
			// we look for audio and video
			if( curline.contains("a=rtpmap")) {
				isRtpMap++;
				codecstr=curline.section(' ',1,1);
				codecstr1=curline.section(':',1,1);
				codecstr2=codecstr1.section(' ',0,0);
				// STORE RECEIVED MEDIA ATTRIBUTES
				bool concerningAudio = false;
				// search for audio
				i=0;
				while (  i<receivedAudioMediaNumber) {
					if (codecstr2 == acc[i]) {
						acstr[i] = codecstr.section('/',0,0);
						concerningAudio = true;
						break;
					} 
					i++;
				}
                     		// search for video, if not audio
				if (concerningAudio == false) {
					for (i=0; i<receivedVideoMediaNumber; i++) {
						if (codecstr2 == vcc[i]) {
							vcstr[i] = codecstr.section('/',0,0);
							break;
						}
					}	
				}
    			}
			if ( curline.contains("a=fmtp:97")) {
			// looks strange, but works, as only ILBC uses responsePay
			    if ( curline.contains ("mode=30") ) tempresponsepay = 240;
			    if ( curline.contains ("mode=20") ) tempresponsepay = 160;
			
			}
		}
	} // END OF WHILE-LOOP
	if( (isRtpMap<receivedAudioMediaNumber) && ( evaluateA || evaluateV || evaluateAV) ){
	    for(i=0;i<=receivedAudioMediaNumber;i++) {
		for(j=0;j<=NAUDIO;j++) {
		    if (acc[i] == sessionC->getACodecNum(j) ) {
			if(acstr[i] == "" )acstr[i] = sessionC->getACodecName(j);
			break;
		    }
		}
	    }

	    for(i=0;i<=receivedVideoMediaNumber;i++) {
		for(j=0;j<=NAUDIO;j++) {
		    if (vcc[i] == sessionC->getCodecNum(j) ) {
			    vcstr[i] = sessionC->getCodecName(j);
			    break;
		    }	
		}
	    }
	}

	if( evaluateV || evaluateAV) {


	// *** VIDEO ***
		if (videoMediaNegotiationINVITE(receivedVideoMediaNumber) == false) {
        		status=0;     // no good codec for us; audiocheck is not necessary any longer
			return  doReturn (status);
		}
	}
	if( evaluateA || evaluateAV) {
    	// *** AUDIO ***
        	if (audioMediaNegotiationINVITE(receivedAudioMediaNumber) == false) {
            		status=0;     // no good codec for us;
			return  doReturn (status);
		}
	}
	// We have a good Codec
	return  doReturn (status);
}
NotePlayHandle::NotePlayHandle( InstrumentTrack* instrumentTrack,
								const f_cnt_t _offset,
								const f_cnt_t _frames,
								const Note& n,
								NotePlayHandle *parent,
								int midiEventChannel,
								Origin origin ) :
	PlayHandle( TypeNotePlayHandle, _offset ),
	Note( n.length(), n.pos(), n.key(), n.getVolume(), n.getPanning(), n.detuning() ),
	m_pluginData( NULL ),
	m_filter( NULL ),
	m_instrumentTrack( instrumentTrack ),
	m_frames( 0 ),
	m_totalFramesPlayed( 0 ),
	m_framesBeforeRelease( 0 ),
	m_releaseFramesToDo( 0 ),
	m_releaseFramesDone( 0 ),
	m_subNotes(),
	m_released( false ),
	m_hasParent( parent != NULL  ),
	m_parent( parent ),
	m_hadChildren( false ),
	m_muted( false ),
	m_bbTrack( NULL ),
	m_origTempo( Engine::getSong()->getTempo() ),
	m_origBaseNote( instrumentTrack->baseNote() ),
	m_frequency( 0 ),
	m_unpitchedFrequency( 0 ),
	m_baseDetuning( NULL ),
	m_songGlobalParentOffset( 0 ),
	m_midiChannel( midiEventChannel >= 0 ? midiEventChannel : instrumentTrack->midiPort()->realOutputChannel() ),
	m_origin( origin ),
	m_frequencyNeedsUpdate( false )
{
	lock();
	if( hasParent() == false )
	{
		m_baseDetuning = new BaseDetuning( detuning() );
		m_instrumentTrack->m_processHandles.push_back( this );
	}
	else
	{
		m_baseDetuning = parent->m_baseDetuning;

		parent->m_subNotes.push_back( this );
		parent->m_hadChildren = true;

		m_bbTrack = parent->m_bbTrack;

		parent->setUsesBuffer( false );
	}

	updateFrequency();

	setFrames( _frames );

	// inform attached components about new MIDI note (used for recording in Piano Roll)
	if( m_origin == OriginMidiInput )
	{
		m_instrumentTrack->midiNoteOn( *this );
	}

	if( hasParent() || ! m_instrumentTrack->isArpeggioEnabled() )
	{
		const int baseVelocity = m_instrumentTrack->midiPort()->baseVelocity();

		// send MidiNoteOn event
		m_instrumentTrack->processOutEvent(
			MidiEvent( MidiNoteOn, midiChannel(), midiKey(), midiVelocity( baseVelocity ) ),
			MidiTime::fromFrames( offset(), Engine::framesPerTick() ),
			offset() );
	}

	if( m_instrumentTrack->instrument()->flags() & Instrument::IsSingleStreamed )
	{
		setUsesBuffer( false );
	}

	setAudioPort( instrumentTrack->audioPort() );

	unlock();
}
Beispiel #6
0
PresetPreviewPlayHandle::PresetPreviewPlayHandle( const QString & _preset_file, bool _load_by_plugin, DataFile *dataFile ) :
	PlayHandle( TypePresetPreviewHandle ),
	m_previewNote( NULL )
{
	s_previewTC->lockData();

	setUsesBuffer( false );

	if( s_previewTC->previewNote() != NULL )
	{
		s_previewTC->previewNote()->mute();
	}


	const bool j = Engine::projectJournal()->isJournalling();
	Engine::projectJournal()->setJournalling( false );

	if( _load_by_plugin )
	{
		Instrument * i = s_previewTC->previewInstrumentTrack()->instrument();
		const QString ext = QFileInfo( _preset_file ).
							suffix().toLower();
		if( i == NULL || !i->descriptor()->supportsFileType( ext ) )
		{
			i = s_previewTC->previewInstrumentTrack()->
				loadInstrument(pluginFactory->pluginSupportingExtension(ext).name());
		}
		if( i != NULL )
		{
			i->loadFile( _preset_file );
		}
	}
	else
	{
		bool dataFileCreated = false;
		if( dataFile == 0 )
		{
			dataFile = new DataFile( _preset_file );
			dataFileCreated = true;
		}

		// vestige previews are bug prone; fallback on 3xosc with volume of 0
		// without an instrument in preview track, it will segfault
		if(dataFile->content().elementsByTagName( "vestige" ).length() == 0 )
		{
			s_previewTC->previewInstrumentTrack()->
					loadTrackSpecificSettings(
						dataFile->content().firstChild().toElement() );
		}
		else
		{
			s_previewTC->previewInstrumentTrack()->loadInstrument("tripleoscillator");
			s_previewTC->previewInstrumentTrack()->setVolume( 0 );
		}
		if( dataFileCreated )
		{
			delete dataFile;
		}
	}
	dataFile = 0;
	// make sure, our preset-preview-track does not appear in any MIDI-
	// devices list, so just disable receiving/sending MIDI-events at all
	s_previewTC->previewInstrumentTrack()->
				midiPort()->setMode( MidiPort::Disabled );

	// create note-play-handle for it
	m_previewNote = NotePlayHandleManager::acquire(
			s_previewTC->previewInstrumentTrack(), 0,
			typeInfo<f_cnt_t>::max() / 2,
				Note( 0, 0, DefaultKey, 100 ) );

	setAudioPort( s_previewTC->previewInstrumentTrack()->audioPort() );

	s_previewTC->setPreviewNote( m_previewNote );

	Engine::mixer()->addPlayHandle( m_previewNote );

	s_previewTC->unlockData();
	Engine::projectJournal()->setJournalling( j );
}
InstrumentPlayHandle::InstrumentPlayHandle( Instrument * instrument, InstrumentTrack* instrumentTrack ) :
		PlayHandle( TypeInstrumentPlayHandle ),
		m_instrument( instrument )
{
	setAudioPort( instrumentTrack->audioPort() );
}