Exemple #1
0
static void TumbleNoteFunction(FunctionEvent& p, EventBuffer& midiOutput) {
   static NoteEvent note;           // temporary note for placing in buffer

   TumbleParameters& param = tparam[p.charValue(0)];
   int newnote = limit(param.current + param.dir * param.n[param.pos], 0, 127);

   // turn off algorithm if someone turned the algorithm off externally
   // or if the current note is too large or too small.
   if (param.active == 0 || newnote < A0 || newnote > C7) {
      param.active = 0;
      p.off(midiOutput);
      return;
   }

   // set the parameters of the output note:
   note.setOnDur(t_time, param.d[param.pos]);   // off time holds dur
   note.setVel(param.v[param.pos]);
   note.setChan(p.getChan());
   note.setKey(newnote);
   note.activate();
   note.action(midiOutput);     // start right now, avoiding any buffer delay
   midiOutput.insert(note);     // store the note for turning off later

   // update the parameters for the tumble algorithm
   p.setOnTime(p.getOnTime() + param.i[param.pos]);
   param.current = newnote;
   param.pos++;
   if (param.pos > param.n.getSize()) {
      param.pos = 0;
   }
}
Exemple #2
0
void createTrill(int key1, int key2, int velocity, int channel, int duration) {
   static FunctionEvent tn;   // a Temporary Note for copying into eventBuffer

   // key1 should always be smaller than key2
   int temp;
   if (key1 > key2) {
      temp = key1;
      key1 = key2;
      key2 = temp;
   }

   // setting the fields of the function note
   tn.setFunction(TrillAlgorithm);
   tn.setChannel(channel);
   tn.setKeyno(key1);
   tn.setVelocity(velocity);

   // set extra parameters
   tn.charValue(15) = 0;        // 0 = play key1 next, 1 = play key2 next
   tn.charValue(14) = key2;     // secondary pitch
   tn.intValue(10) = t_time;    // initialization time

   tn.setStatus(EVENT_STATUS_ACTIVE);

   // start time of function and the duration between calling it
   tn.setOnDur(t_time + duration, duration);

   eventBuffer.insert(tn);

   cout << "Trill = " << key1 << " to " << key2 
        << "\tRate = " << duration
        << endl;
}
Exemple #3
0
int startAlgorithm(TumbleParameters& p) {
   static FunctionEvent tn;   // a Temporary Note for copying into eventBuffer

   int ploc = storeParameters(tparam, p);
   if (ploc < 0) {
      cout << "Warning: Parameter space is full.  Not adding new algorithm"
           << endl;
      return -1;
   }

   // setting the fields of the function note
   tn.setFunction(TumbleNoteFunction);
   tn.setChannel(channel);
   tn.setKeyno(0);
   tn.setVelocity(0);
   tn.charValue(0) = (char)ploc;         // store location of the parameters
   tn.setStatus(EVENT_STATUS_ACTIVE);
   tn.setOnTime(t_time + p.i[0] - anticipation);

   // display the basic algorithm info
   cout << "Tumble: Time: " << t_time << "\tStart = " << (int)p.current
        << "\tPattern = . ";
   for (int i=1; i<p.n.getSize(); i++) {
      cout << (int)p.n[i] << " ";
   }
   cout << "(" << (int)p.n[0] << ")";
   cout << " ioi: " << p.i[0];
   cout << endl;

   return eventBuffer.insert(tn);
}
Exemple #4
0
void playgliss(int basenote, int loudness, int channel, int duration, 
      int distancee) { 
   static FunctionEvent tn;   // a Temporary Note for copying into eventBuffer
   
   // setting the fields of the function note
   tn.shortValue(14) = distancee;

   tn.setFunction(EnhanceFunction);
   tn.setChannel(channel);
   tn.setKeyno(basenote + distancee);
   tn.setVelocity(loudness - 5);
 
   tn.setStatus(EVENT_STATUS_ACTIVE);

   // start time of function and the duration between calling it
   tn.setOnDur(t_time, duration);

   eventBuffer.insert(tn);

   cout << "StartKey =    "  << basenote
        << "\tLoudness =  "  << loudness
        << "\tRate =      "  << duration
        << "\tDirection = "  << distancee
        << endl;
}
gd::String FunctionEvent::MangleFunctionName(const gd::Layout & layout, const FunctionEvent & functionEvent)
{
    //To generate a "unique" name for the function, the name is mangled and suffixed with the
    //pointer to the (original) event of the function.
    const gd::BaseEvent * ptr = &functionEvent;
    std::shared_ptr<gd::BaseEvent> originalEvent = functionEvent.originalEvent.lock();
    if (originalEvent != std::shared_ptr<gd::BaseEvent>()) {
        ptr = originalEvent.get();
    }

    return "GDFunction"+layout.GetMangledName()
        +gd::SceneNameMangler::GetMangledSceneName(functionEvent.GetName())
        +gd::String::From(ptr);
};
Exemple #6
0
void createDecay(int channel, int key, int duration, int velocity) {
   static FunctionEvent tn;   // temporary function for copying into eventBuffer

   tn.shortValue(14) = gap;    // gap between successive notes

   tn.setFunction(EchoAlgorithm);
   tn.setChannel(channel);
   tn.setKeyno(key);
   decaystates[key] = velocity * decayrate;
   tn.setVelocity((int)decaystates[key]);
   tn.setStatus(EVENT_STATUS_ACTIVE);

   // start time of function and the duration between calling it
   tn.setOnDur(t_time, duration);

   eventBuffer.insert(tn);

   cout << "Key=    "  << key
        << "\tDuration =  "  << duration + gap
        << "\tVelocity =  "  << velocity
        << endl;
}
Exemple #7
0
void NetStream::execute()
{
	if(downloader->hasFailed())
	{
		sys->currentVm->addEvent(this,Class<Event>::getInstanceS("ioError"));
		sys->downloadManager->destroy(downloader);
		return;
	}

	//The downloader hasn't failed yet at this point

	//mutex access to downloader
	istream s(downloader);
	s.exceptions ( istream::eofbit | istream::failbit | istream::badbit );

	ThreadProfile* profile=sys->allocateProfiler(RGB(0,0,200));
	profile->setTag("NetStream");
	//We need to catch possible EOF and other error condition in the non reliable stream
	uint32_t decodedAudioBytes=0;
	uint32_t decodedVideoFrames=0;
	//The decoded time is computed from the decodedAudioBytes to avoid drifts
	uint32_t decodedTime=0;
	bool waitForFlush=true;
	try
	{
		ScriptDataTag tag;
		Chronometer chronometer;
		STREAM_TYPE t=classifyStream(s);
		if(t==FLV_STREAM)
		{
			FLV_HEADER h(s);
			if(!h.isValid())
				throw ParseException("FLV is not valid");

			unsigned int prevSize=0;
			bool done=false;
			do
			{
				//Check if threadAbort has been called, if so, stop this loop
				if(closed)
					done = true;
				UI32 PreviousTagSize;
				s >> PreviousTagSize;
				PreviousTagSize.bswap();
				assert_and_throw(PreviousTagSize==prevSize);

				//Check tag type and read it
				UI8 TagType;
				s >> TagType;
				switch(TagType)
				{
					case 8:
					{
						AudioDataTag tag(s);
						prevSize=tag.getTotalLen();

						if(audioDecoder==NULL)
						{
							audioCodec=tag.SoundFormat;
							switch(tag.SoundFormat)
							{
								case AAC:
									assert_and_throw(tag.isHeader())
#ifdef ENABLE_LIBAVCODEC
									audioDecoder=new FFMpegAudioDecoder(tag.SoundFormat,
											tag.packetData, tag.packetLen);
#else
									audioDecoder=new NullAudioDecoder();
#endif
									tag.releaseBuffer();
									break;
								case MP3:
#ifdef ENABLE_LIBAVCODEC
									audioDecoder=new FFMpegAudioDecoder(tag.SoundFormat,NULL,0);
#else
									audioDecoder=new NullAudioDecoder();
#endif
									decodedAudioBytes+=
										audioDecoder->decodeData(tag.packetData,tag.packetLen,decodedTime);
									//Adjust timing
									decodedTime=decodedAudioBytes/audioDecoder->getBytesPerMSec();
									break;
								default:
									throw RunTimeException("Unsupported SoundFormat");
							}
							if(audioDecoder->isValid() && sys->audioManager->pluginLoaded())
								audioStream=sys->audioManager->createStreamPlugin(audioDecoder);
						}
						else
						{
							assert_and_throw(audioCodec==tag.SoundFormat);
							decodedAudioBytes+=
								audioDecoder->decodeData(tag.packetData,tag.packetLen,decodedTime);
							if(audioStream==0 && audioDecoder->isValid() && sys->audioManager->pluginLoaded())
								audioStream=sys->audioManager->createStreamPlugin(audioDecoder);
							//Adjust timing
							decodedTime=decodedAudioBytes/audioDecoder->getBytesPerMSec();
						}
						break;
					}
					case 9:
					{
						VideoDataTag tag(s);
						prevSize=tag.getTotalLen();
						//If the framerate is known give the right timing, otherwise use decodedTime from audio
						uint32_t frameTime=(frameRate!=0.0)?(decodedVideoFrames*1000/frameRate):decodedTime;

						if(videoDecoder==NULL)
						{
							//If the isHeader flag is on then the decoder becomes the owner of the data
							if(tag.isHeader())
							{
								//The tag is the header, initialize decoding
#ifdef ENABLE_LIBAVCODEC
								videoDecoder=
									new FFMpegVideoDecoder(tag.codec,tag.packetData,tag.packetLen, frameRate);
#else
								videoDecoder=new NullVideoDecoder();
#endif
								tag.releaseBuffer();
							}
							else if(videoDecoder==NULL)
							{
								//First packet but no special handling
#ifdef ENABLE_LIBAVCODEC
								videoDecoder=new FFMpegVideoDecoder(tag.codec,NULL,0,frameRate);
#else
								videoDecoder=new NullVideoDecoder();
#endif
								videoDecoder->decodeData(tag.packetData,tag.packetLen, frameTime);
								decodedVideoFrames++;
							}
							Event* status=Class<NetStatusEvent>::getInstanceS("status", "NetStream.Play.Start");
							getVm()->addEvent(this, status);
							status->decRef();
							status=Class<NetStatusEvent>::getInstanceS("status", "NetStream.Buffer.Full");
							getVm()->addEvent(this, status);
							status->decRef();
						}
						else
						{
							videoDecoder->decodeData(tag.packetData,tag.packetLen, frameTime);
							decodedVideoFrames++;
						}
						break;
					}
					case 18:
					{
						tag = ScriptDataTag(s);
						prevSize=tag.getTotalLen();

						//The frameRate of the container overrides the stream
						
						if(tag.metadataDouble.find("framerate") != tag.metadataDouble.end())
							frameRate=tag.metadataDouble["framerate"];
						break;
					}
					default:
						LOG(LOG_ERROR,_("Unexpected tag type ") << (int)TagType << _(" in FLV"));
						threadAbort();
				}
				if(!tickStarted && isReady())
				{
					{
						multiname onMetaDataName;
						onMetaDataName.name_type=multiname::NAME_STRING;
						onMetaDataName.name_s="onMetaData";
						onMetaDataName.ns.push_back(nsNameAndKind("",NAMESPACE));
						ASObject* callback = client->getVariableByMultiname(onMetaDataName);
						if(callback && callback->getObjectType() == T_FUNCTION)
						{
							ASObject* callbackArgs[1];
							ASObject* metadata = Class<ASObject>::getInstanceS();
							if(tag.metadataDouble.find("width") != tag.metadataDouble.end())
								metadata->setVariableByQName("width", "", 
										abstract_d(tag.metadataDouble["width"]));
							else
								metadata->setVariableByQName("width", "", abstract_d(getVideoWidth()));
							if(tag.metadataDouble.find("height") != tag.metadataDouble.end())
								metadata->setVariableByQName("height", "", 
										abstract_d(tag.metadataDouble["height"]));
							else
								metadata->setVariableByQName("height", "", abstract_d(getVideoHeight()));

							if(tag.metadataDouble.find("framerate") != tag.metadataDouble.end())
								metadata->setVariableByQName("framerate", "", 
										abstract_d(tag.metadataDouble["framerate"]));
							if(tag.metadataDouble.find("duration") != tag.metadataDouble.end())
								metadata->setVariableByQName("duration", "", 
										abstract_d(tag.metadataDouble["duration"]));
							if(tag.metadataInteger.find("canseekontime") != tag.metadataInteger.end())
								metadata->setVariableByQName("canSeekToEnd", "", 
										abstract_b(tag.metadataInteger["canseekontime"] == 1));

							if(tag.metadataDouble.find("audiodatarate") != tag.metadataDouble.end())
								metadata->setVariableByQName("audiodatarate", "", 
										abstract_d(tag.metadataDouble["audiodatarate"]));
							if(tag.metadataDouble.find("videodatarate") != tag.metadataDouble.end())
								metadata->setVariableByQName("videodatarate", "", 
										abstract_d(tag.metadataDouble["videodatarate"]));

							//TODO: missing: audiocodecid (Number), cuePoints (Object[]), 
							//videocodecid (Number), custommetadata's
							callbackArgs[0] = metadata;
							client->incRef();
							metadata->incRef();
							FunctionEvent* event = 
								new FunctionEvent(static_cast<IFunction*>(callback), client, callbackArgs, 1);
							getVm()->addEvent(NULL,event);
							event->decRef();
						}
					}

					tickStarted=true;
					if(frameRate==0)
					{
						assert(videoDecoder->frameRate);
						frameRate=videoDecoder->frameRate;
					}
					sys->addTick(1000/frameRate,this);
					//Also ask for a render rate equal to the video one (capped at 24)
					float localRenderRate=dmin(frameRate,24);
					sys->setRenderRate(localRenderRate);
				}
				profile->accountTime(chronometer.checkpoint());
				if(aborting)
				{
					throw JobTerminationException();
				}
			}
			while(!done);
		}
		else
			threadAbort();

	}
Exemple #8
0
static void EchoAlgorithm(FunctionEvent& p, EventBuffer& midiOutput) {
   static NoteEvent note;            // temporary note before placing in buffer

   // check if pausing
   if (decaystates[p.getKey()] < 0.0) {
      p.setOnTime(p.getOnTime() + p.getDur() + p.shortValue(14)); 
      return;
   }
      
   // set the parameters for the output note:
   note.setOnDur(t_time, p.getOffTime()); // off time holds dur
   note.setVel(p.getVel());
   note.setChan(p.getChan());
   note.setKey(p.getKey());

   // update the parameters for the function:
   decaystates[p.getKey()] *= decayrate;
   p.setVel((int)decaystates[p.getKey()]);

   // if note is too quiet, end the note
   if (p.getVel() <= 2) {
      p.off(midiOutput);
      decaystates[p.getKey()] = 0.0;
   }

   // next time includes a gap so that key can raise on keyboard
   p.setOnTime(p.getOnTime() + p.getDur() + p.shortValue(14)); 

   note.activate();
   note.action(midiOutput);       // start right now, avoiding any buffer delay
   midiOutput.insert(note);       // the note off message is being buffered

}
Exemple #9
0
static void TrillAlgorithm(FunctionEvent& p, EventBuffer& midiOutput) {
   static NoteEvent note;           // temporary note before placing in buffer

   int key1 = p.getKey();           // lower key of trill
   int key2 = p.charValue(14);      // upper key of trill
   int state = p.charValue(15);     // which note to play next
   int starttime = p.intValue(10);  // when trill was started
   int i;
  
   // turn off the trill if there is a note played inside the trill
   int range1 = key1;
   int range2 = key2;
   if (range2 - range1 == 1) {
      range1--;
      range2++;
   }
   for (i=range1; i<=range2; i++) {
      if (noteontimes[i] > starttime) {
         p.off(midiOutput);
         return;
      }
   }

   // set the next note to play
   int key = state ? key2 : key1;
   state = !state;
   p.charValue(15) = state;

   // set the parameters for the output note:
   note.setOnDur(t_time, p.getDur()); 
   note.setVel(p.getVel());
   note.setChan(p.getChan());
   note.setKey(key);

   // update the parameters for the gliss function:
   p.setOnTime(p.getOnTime() + p.getDur());  
   
   int value = p.getVel() + velcorrection;
   if (value < 100 && value > 3) {
      p.setVel(value);
   }
   if (p.getDur() + trillcorrection > MINTRIGTIME) {
      p.setDur(p.getDur() + trillcorrection);
   }

   note.activate();
   note.action(midiOutput);       // start right now, avoiding any buffer delay
   midiOutput.insert(note);       // the note off message is being buffered
}
Exemple #10
0
static void EnhanceFunction(FunctionEvent& p, EventBuffer& midiOutput) {
   static NoteEvent note;            // temporary note before placing in buffer

   // set the parameters for the output note:
   note.setOnDur(t_time, p.getOffTime()); // off time holds dur
   note.setVel(p.getVel());
   note.setChan(p.getChan());
   note.setKey(p.getKey());

   // if note is too quiet
   if (p.getVel() <= 5) {
      p.off(midiOutput);
   }

   // update the parameters for the function:
   p.setKey(p.getKey()+p.shortValue(14));
   p.setVel(p.getVel()-5);
   p.setOnTime(p.getOnTime() + p.getDur());  // OffTime stores duration

   note.activate();
   note.action(midiOutput);       // start right now, avoiding any buffer delay
   midiOutput.insert(note);       // the note off message is being buffered


   // check wether to kill the algorithm or not:

   // if note is off the range of the keyboard
   if (p.getKey() > C8 || p.getKey() < A0) {
      p.off(midiOutput);
   }

}