void SpaceShip::update(float elapsedTime)
{
	// Example on how to accelerate SpaceShip
	if(thrust){
		addThrust(5);
	} 
	else 
	{
		speed = 0;
	}

	// How firing could be handled with events
	if(isFiring)
	{
		ofSpaceShipFireEventArgs e = {position, ofPoint(cos(rotation), sin(rotation))};
		ofNotifyEvent(spaceShipFires, e, this);
		invulnerable = false;
		this->createBullets();
	}

	if (isTurning) {
		float temp = PI / (speed+10);
		if (turningRight) rotation += temp;
		else rotation -= temp;
	}

	direction = ofPoint( cos(rotation), sin(rotation) );

	position.x -= speed*direction.x*elapsedTime/2;
	position.y -= speed*direction.y*elapsedTime/2;

	marginsWrap();

	// each player updates its own bullets
	for (Bullet* bullet : bullets) bullet->update(elapsedTime);
}
示例#2
0
void ofxSimpleSerial::read()
{
	// if we've got new bytes
	if(available() > 0)
	{
		// we will keep reading until nothing is left
		while (available() > 0)
		{
			// we'll put the incoming bytes into bytesReturned
			readBytes(bytesReturned, NUM_BYTES);

			// if we find the splitter we put all the buffered messages 
			//   in the final message, stop listening for more data and 
			//   notify a possible listener
			// else we just keep filling the buffer with incoming bytes. 
			if(*bytesReturned == '\n')
			{
				message = messageBuffer;
				messageBuffer = "";
				ofRemoveListener(ofEvents().update, this, &ofxSimpleSerial::update);
				ofNotifyEvent(NEW_MESSAGE,message,this);

				break;
			}
			else 
			{
				if(*bytesReturned != '\r')
					messageBuffer += *bytesReturned;
			}
			//cout << "  messageBuffer: " << messageBuffer << "\n";
		}

		// clear the message buffer
		memset(bytesReturned,0,NUM_BYTES);
	}
}
void CountDown::update(ofEventArgs &args)
{
    if (mCount==num-1)
    {
        static ofEventArgs timerEventArgs;
        ofNotifyEvent(COUNTER_REACHED, timerEventArgs, this);
        ofLog(OF_LOG_NOTICE,string(typeid(*this).name())+" "+string(__FUNCTION__)+" Counter Reached");
        tween.setParameters(1,linearEase,ofxTween::easeOut,1,0,1000,500);
        fireComplete = false;
        shutter.play();
        stop();
        //rect.fadeIn();
        
        
    }
    else{
        button.play();
        ofLog(OF_LOG_NOTICE,"Counter : "+ofToString(mCount));
    }
    
    mCount++;
    
    //    if (mCount==num)
    //    {
    //        
    //        
    //         //rect.setAlpha(1);
    //        //rect.fadeOut();
    //    }
    //
    
    
    
    
    mCount%=num;
}
示例#4
0
void ImageButton::deselect()
{
    _selected = false;
    const UserInterfaceEvent args(type);
    ofNotifyEvent(_buttonDeselectEvent, args, this);
}
示例#5
0
//------------------------------------------
bool ofSendMessage(ofMessage msg){
	return ofNotifyEvent(ofEvents().messageEvent, msg);
}
示例#6
0
//------------------------------------------
bool ofCoreEvents::notifyWindowResized(int width, int height){
	ofResizeEventArgs resizeEventArgs(width,height);
	return ofNotifyEvent( windowResized, resizeEventArgs );
}
示例#7
0
//------------------------------------------
bool ofCoreEvents::notifyMouseExited(int x, int y){
	ofMouseEventArgs mouseEventArgs(ofMouseEventArgs::Exited,x,y);
	return ofNotifyEvent( mouseExited, mouseEventArgs );
}
示例#8
0
//------------------------------------------
bool ofCoreEvents::notifyUpdate(){
	return ofNotifyEvent( update, voidEventArgs );
}
void PMDeviceAudioAnalyzer::audioIn(float *input, int bufferSize, int nChannels)
{
    // Init of audio event params struct
    pParams.deviceID = deviceID;
    pParams.audioInputIndex = audioInputIndex;
    silenceParams silenceParams;
    silenceParams.deviceID = deviceID;
    silenceParams.audioInputIndex = audioInputIndex;
    eParams.deviceID = deviceID;
    eParams.audioInputIndex = audioInputIndex;
    onsetParams onsetParams;
    onsetParams.deviceID = deviceID;
    onsetParams.audioInputIndex = audioInputIndex;
    freqBandsParams freqBandsParams;
    freqBandsParams.deviceID = deviceID;
    freqBandsParams.audioInputIndex = audioInputIndex;

    aubioOnset->setThreshold(onsetsThreshold);

    aubioPitch->audioIn(input, bufferSize, nChannels);
    aubioOnset->audioIn(input, bufferSize, nChannels);
    aubioMelBands->audioIn(input, bufferSize, nChannels);

    /// PITCH
    ///////////
    float currentMidiNote = aubioPitch->latestPitch;
    float pitchConfidence = aubioPitch->pitchConfidence;
    pParams.confidence = pitchConfidence;
    pParams.minConfidence = 0.5;
    if(pitchConfidence<pParams.minConfidence)
    {
        currentMidiNote = pParams.min + (pParams.max-pParams.min) / 2.0;
    }
    //cout << "Pitch Confidence " << aubioPitch->pitchConfidence << endl;
    // SILENCE
    ////////////

    float absMean = getAbsMean(input,bufferSize);

    
    bool nowIsSilent = (eParams.smoothedEnergy < silenceThreshold);
    
    if(nowIsSilent && !alreadyInSilence)
    {
        silenceStarted();
//        cout << "SILENCE STARTED" << endl;
    }
    else if(nowIsSilent && ( (ofGetElapsedTimeMillis() - notSilenceBeginTime) > silenceTimeTreshold ) )
    {
        notifySilence(true);
//        cout << "SILENCE BROKEN DURATION DONE  ... Sent TRUE!! Mean : " << absMean << " __ thrs." << silenceThreshold  << endl;
        
    }
    else if(!nowIsSilent)
    {
        notifySilence(false);
        silenceStopped();
//        cout << "SILENCE ENDED ... Sent FALSE!" << absMean << " __ thrs." << silenceThreshold  << endl;
    }
    else
    {
//        cout << "ELSE " << (ofGetElapsedTimeMillis() - notSilenceBeginTime) << " .... " << silenceTimeTreshold << endl;
    }
    
    
    
    
    
    
    
//    {
//        if (wasSilent != isSilent) // Changes in silence (ON>OFF or OFF>ON)
//        {
//            if (isSilent)
//            {
//                silenceStarted();
//            } else
//            {
//                silenceEnded();
//            }
//            wasSilent = isSilent;
//        }
//
//        if (isInSilence)
//            sendSilenceEvent();
//    }

//    if(isSilent) cout << "in silence..." << endl;
//    else cout << "not silent" << endl;
    
    // Pitch
    {
        if (currentMidiNote)
        {
            pParams.midiNote = currentMidiNote;
            float pitchDelted = (pParams.deltaPitch)*pParams.midiNote + (1.0 - pParams.deltaPitch)*oldPitch;
            pParams.smoothedPitch = ofMap(pitchDelted,pParams.min,pParams.max,0,1,true);
    	    oldPitch = pitchDelted;
            ofNotifyEvent(eventPitchChanged, pParams, this);

            midiNoteHistory.push_front(currentMidiNote);

            if (midiNoteHistory.size() > ascDescAnalysisSize)
                midiNoteHistory.pop_back();

            // MELODY DIRECTION
            checkMelodyDirection();
        } else {
            if (midiNoteHistory.size() > 0)
                midiNoteHistory.pop_back();
        }
    }

    // Energy
    {
        // Raw Energy
        eParams.energy = absMean;

        // Smoothed and Mapped Energy = energySmoothed
	//isnan(oldEnergy);
	if(oldEnergy!=oldEnergy)
		oldEnergy=0.5;
        float energyDelted =(eParams.deltaEnergy)*eParams.energy + (1.0 - eParams.deltaEnergy)*oldEnergy;

//cout << "-" << eParams.deltaEnergy << "-"<< eParams.energy << "-" << oldEnergy << endl;
        eParams.smoothedEnergy = ofMap(energyDelted*digitalGain,eParams.min,eParams.max,0,1,false);
//	cout << "absMean. " << absMean << "/ smoth. " << eParams.smoothedEnergy << " / min. " << eParams.min << " / max. "<< eParams.max << " / energyDelted " << energyDelted << " digitalGain " << digitalGain << endl;
            
        oldEnergy = energyDelted;

	if ((absMean == absMean) /* && (eParams.smoothedEnergy == eParams.smoothedEnergy) */)
		ofNotifyEvent(eventEnergyChanged, eParams, this);
    }

    // Shhhht
    {
//        if (!isSilent) checkShtSound();
    }

    // Onsets
    {
        bool isOnset = aubioOnset->received();
        if (oldOnsetState != isOnset)
        {
            oldOnsetState = isOnset;
            onsetParams.isOnset = isOnset;
            ofNotifyEvent(eventOnsetStateChanged, onsetParams, this);
        }
    }

    //Call to the Recorder
    // FIXME: Això no hauria d'estar a l'analitzador d'àudio, sino fora!!!
    // FIXME: Now records all channels, better to chose how many chanels to record
    if (PMRecorder::getInstance().isRecording()) {
        PMRecorder::getInstance().addAudioBuffer(input, bufferSize, inChannels);
    }

//    cout << "g:" << digitalGain << " st:" << silenceThreshold << " sl:" << silenceTimeTreshold << endl;
}
示例#10
0
void SoundManager::onSoundPlayerLoaded() {
    static ofEventArgs evt;
    ofNotifyEvent(soundLoaded, evt, this);
}
//--------------------------------------------------------------
void ofxThreadedVideo::threadedFunction(){

    while (isThreadRunning()){

        lock();
        ofxThreadedVideoGlobalMutex.lock();
        if(!ofxThreadedVideoGlobalCritical && !bCriticalSection){
            ofxThreadedVideoGlobalCritical = true;
            bCriticalSection = true;
            int videoID = currentVideoID;
            ofxThreadedVideoCommand c = getCommand();
            bool bCanLoad = !bLoaded;
            bool bPopCommand = false;
            unlock();
            ofxThreadedVideoGlobalMutex.unlock();
            
            if(c.getInstance() == instanceID){
                
                if(c.getCommand() == "play"){
                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
                    video[videoID].play();
                    
                    lock();
                    bIsPlaying = true;
                    bIsPaused = false;
                    unlock();
                    
                    bPopCommand = true;
                }
                
                if(c.getCommand() == "setPosition"){
                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
                    lock();
                    position = c.getArgument<float>(0);
                    unlock();
                    video[videoID].setPosition(position);
                    bPopCommand = true;
                }

                if(c.getCommand() == "setVolume"){
                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
                    lock();
                    volume = c.getArgument<float>(0);
                    unlock();
                    video[videoID].setVolume(volume);
                    bPopCommand = true;
                }
                
#ifdef USE_QUICKTIME_7
                if(c.getCommand() == "setPan"){
                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
                    lock();
                    pan = c.getArgument<float>(0);
                    unlock();
                    video[videoID].setPan(pan);
                    bPopCommand = true;
                }
#endif
                
                if(c.getCommand() == "setLoopState"){
                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
                    lock();
                    loopState = (ofLoopType)c.getArgument<int>(0);
                    unlock();
                    video[videoID].setLoopState(loopState);
                    bPopCommand = true;
                }
                
//                if(c.getCommand() == "setSpeed"){
//                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
//                    lock();
//                    speed = c.getArgument<float>(0);
//                    unlock();
//                    video[videoID].setSpeed(speed);
//                    bPopCommand = true;
//                }
                
//                if(c.getCommand() == "setFrame"){
//                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
//                    int frameTarget = c.getArgument<int>(0);
//                    CLAMP(frameTarget, 0, frameTotal);
//                    video[videoID].setFrame(frameTarget);
//                    bForceFrameNew = true;
//                    bPopCommand = true;
//                }
                
//                if(c.getCommand() == "setFrame"){
//                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
//                    lock();
//                    int frameTarget = c.getArgument<int>(0);
//                    bForceFrameNew = true;
//                    frameTarget = CLAMP(frameTarget, 0, frameTotal);
//                    cout << "setframe A: " << frameTarget << " " << videoID << " " << bCriticalSection << endl;
//                    video[videoID].setFrame(frameTarget);
//                    cout << "setframe B: " << frameTarget << " " << videoID << " " << bCriticalSection << endl;
//                    unlock();
//                    bPopCommand = true;
//                }
                
                if(c.getCommand() == "setPaused"){
                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
                    lock();
                    bIsPaused = c.getArgument<bool>(0);
                    unlock();
                    video[videoID].setPaused(bIsPaused);
                    bPopCommand = true;
                }
                
                if(c.getCommand() == "setAnchorPercent"){
                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
                    video[videoID].setAnchorPercent(c.getArgument<float>(0), c.getArgument<float>(0));
                    bPopCommand = true;
                }

                if(c.getCommand() == "setAnchorPoint"){
                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
                    video[videoID].setAnchorPercent(c.getArgument<float>(0), c.getArgument<float>(0));
                    bPopCommand = true;
                }
                
                if(c.getCommand() == "resetAnchor"){
                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
                    video[videoID].resetAnchor();
                    bPopCommand = true;
                }
                
                if(c.getCommand() == "setFade"){
                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
                    
                    int frameEnd;
                    int frameStart = c.getArgument<int>(0);
                    int durationMillis = c.getArgument<int>(1);
                    float fadeTarget = c.getArgument<float>(2);
                    bool fadeSound = c.getArgument<bool>(3);
                    bool fadeVideo = c.getArgument<bool>(4);
                    bool fadeOnce = c.getArgument<bool>(5);
                    
                    CLAMP(fadeTarget, 0.0f, 1.0f);
                    
                    if(frameStart == -1){ // fade is durationMillis from the end
                        frameEnd = frameTotal;
                        frameStart = frameTotal - ((float)durationMillis / 1000.0) * 25.0;
                    }else{
                        frameEnd = frameStart + ((float)durationMillis / 1000.0) * 25.0;
                    }
                    
                    if(frameStart == frameEnd){
                        _fade = fadeTarget;
                        if(fadeVideo) fade = _fade;
                        lock();
                        if(fadeSound) video[videoID].setVolume(_fade);
                        unlock();
                    }else{
                        frameEnd -= 1;
                        
                        // assert(frameStart >= 0);
                        // assert(frameEnd >= frameStart);
                        // assert(frameEnd <= frameTotal);
                        
                        fades.push_back(ofxThreadedVideoFade(frameStart, frameEnd, fadeTarget, fadeSound, fadeVideo, fadeOnce));
                    }
                    

                    
                    bPopCommand = true;
                }
                
#ifdef USE_JACK_AUDIO
                if(c.getCommand() == "setAudioTrackToChannel"){
                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
                    video[videoID].setAudioTrackToChannel(c.getArgument<int>(0), c.getArgument<int>(1), c.getArgument<int>(2));
                    bPopCommand = true;
                }
                
                if(c.getCommand() == "setAudioDevice"){
                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
                    video[videoID].setAudioDevice(c.getArgument<string>(0));
                    bPopCommand = true;
                }
#endif
                
                if(c.getCommand() == "loadMovie" && bCanLoad){
                    if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString();
                    
                    if(video[videoID].loadMovie(c.getArgument<string>(0))){
                        
                        if(bVerbose) ofLogVerbose() << instanceID << " = " << c.getCommandAsString() << " executed in thread";;

//                        lock();
                        
                        fades.clear();
                        width = video[videoID].getWidth();
                        height = video[videoID].getHeight();
                        speed = video[videoID].getSpeed();
                        duration = video[videoID].getDuration();
                        position = video[videoID].getPosition();
                        frameCurrent = video[videoID].getCurrentFrame();
                        frameTotal = video[videoID].getTotalNumFrames();
#ifdef USE_QUICKTIME_7
                        volume = video[videoID].getVolume(); // we should implement for QT6
                        pan = video[videoID].getPan();
#endif
                        loopState = video[videoID].getLoopState();
                        
                        moviePath = c.getArgument<string>(0);
#ifdef TARGET_OSX
                        vector<string> pathParts = ofSplitString(moviePath, "/");
#else
                        vector<string> pathParts = ofSplitString(moviePath, "\\");
#endif
                        movieName = pathParts[pathParts.size() - 1];
                        
                        bIsPaused = true;
                        bIsPlaying = false;
                        bIsTextureReady = false;
                        bIsLoading = false;
                        bIsMovieDone = false;
                        bLoaded = true;
                        
                        pixels = &video[videoID].getPixelsRef();

                        unlock();
                        
                        bPopCommand = true;
                        
                        ofxThreadedVideoEvent e = ofxThreadedVideoEvent(moviePath, VIDEO_EVENT_LOAD_OK, this);
                        ofNotifyEvent(threadedVideoEvent, e, this);
                        
                        ofxThreadedVideoLoadOk++;
                        
                    }else{
                        
                        ofLogError() << "Could not load: " << instanceID << " + " << c.getCommandAsString();
                        
                        ofxThreadedVideoEvent e = ofxThreadedVideoEvent(moviePath, VIDEO_EVENT_LOAD_FAIL, this);
                        ofNotifyEvent(threadedVideoEvent, e, this);
                        
                        ofxThreadedVideoLoadFail++;
                    }
                    
                }
                
            }
            
            if(bPopCommand){
                video[videoID].update();
            }
        
            lock();
            
            if(bIsFrameNew){
                for(unsigned int i = 0; i < fades.size(); i++){
                    
                    ofxThreadedVideoFade& currentFade = fades.at(i);
                    
                    if(currentFade.getIsFading(frameCurrent)){
                        _fade = currentFade.getFade(_fade, frameCurrent);
                        
                        if(currentFade.fadeVideo){
                            if(fade != _fade) fade = _fade;
                        }
#ifdef USE_QUICKTIME_7
                        if(currentFade.fadeSound){ // we should implement for QT6
                            if(video[videoID].getVolume() != _fade) video[videoID].setVolume(_fade);
                        }
#endif
                    }
                    
                    if(currentFade.fadeOnce && currentFade.getFadeDone(frameCurrent)){
                        fades.erase(fades.begin() + i);
                        i--;
                    }

                }
            }
            
            ofxThreadedVideoGlobalMutex.lock();
            
            if(bPopCommand) popCommand();
            
            ofxThreadedVideoGlobalCritical = false;
            bCriticalSection = false;
            ofxThreadedVideoGlobalMutex.unlock();
            unlock();
        }else{
            ofxThreadedVideoGlobalMutex.unlock();
            unlock();
        }
        
        ofSleepMillis(1);

    }
        
}
示例#12
0
void emotiv::logEmoState(std::ostream& os, unsigned int userID, EmoStateHandle eState, bool withHeader) {

	ofNotifyEvent(elEventoEmo, eState);

	// Create the top header
	if (withHeader) {
		os << "Time,";
		os << "UserID,";
		os << "Wireless Signal Status,";
		os << "Blink,";
		os << "Wink Left,";
		os << "Wink Right,";
		os << "Look Left,";
		os << "Look Right,";
		os << "Eyebrow,";
		os << "Furrow,";
		os << "Smile,";
		os << "Clench,";
		os << "Smirk Left,";
		os << "Smirk Right,";
		os << "Laugh,";
		os << "Short Term Excitement,";
		os << "Long Term Excitement,";
		os << "Engagement/Boredom,";
		os << "Cognitiv Action,";
		os << "Cognitiv Power,";
		os << std::endl;
	}

	// Log the time stamp and user ID
	os << ES_GetTimeFromStart(eState) << ",";
	os << userID << ",";
	os << static_cast<int>(ES_GetWirelessSignalStatus(eState)) << ",";

	// Expressiv Suite results
	os << ES_ExpressivIsBlink(eState) << ",";
	os << ES_ExpressivIsLeftWink(eState) << ",";
	os << ES_ExpressivIsRightWink(eState) << ",";

	os << ES_ExpressivIsLookingLeft(eState) << ",";
	os << ES_ExpressivIsLookingRight(eState) << ",";

	std::map<EE_ExpressivAlgo_t, float> expressivStates;

	EE_ExpressivAlgo_t upperFaceAction = ES_ExpressivGetUpperFaceAction(eState);
	float			   upperFacePower  = ES_ExpressivGetUpperFaceActionPower(eState);

	EE_ExpressivAlgo_t lowerFaceAction = ES_ExpressivGetLowerFaceAction(eState);
	float			   lowerFacePower  = ES_ExpressivGetLowerFaceActionPower(eState);

	expressivStates[ upperFaceAction ] = upperFacePower;
	expressivStates[ lowerFaceAction ] = lowerFacePower;
	
	os << expressivStates[ EXP_EYEBROW     ] << ","; // eyebrow
	os << expressivStates[ EXP_FURROW      ] << ","; // furrow
	os << expressivStates[ EXP_SMILE       ] << ","; // smile
	os << expressivStates[ EXP_CLENCH      ] << ","; // clench
	os << expressivStates[ EXP_SMIRK_LEFT  ] << ","; // smirk left
	os << expressivStates[ EXP_SMIRK_RIGHT ] << ","; // smirk right
	os << expressivStates[ EXP_LAUGH       ] << ","; // laugh

	// Affectiv Suite results
	os << ES_AffectivGetExcitementShortTermScore(eState) << ",";
	os << ES_AffectivGetExcitementLongTermScore(eState) << ",";

	os << ES_AffectivGetEngagementBoredomScore(eState) << ",";

	// Cognitiv Suite results
	os << static_cast<int>(ES_CognitivGetCurrentAction(eState)) << ",";
	os << ES_CognitivGetCurrentActionPower(eState);

	os << std::endl;
};
示例#13
0
void ofxImgButton::touchDown(ofTouchEventArgs &touch){
    if (clickable /*&& args.button == 0 */&& !togglable && button.inside(touch.x, touch.y)) {
        pair<bool, int> temp(true, ID);
        ofNotifyEvent(imgButtonEvent, temp, this);
    }
}
示例#14
0
void ofxImgButton::mousePressed(ofMouseEventArgs & args) {
    if (clickable && args.button == 0 && !togglable && button.inside(args.x, args.y)) {
        pair<bool, int> temp(true, ID);
        ofNotifyEvent(imgButtonEvent, temp, this);
    }
}
示例#15
0
void ofxAppContent::setState(ContentState s){

	state = s;
	timeInState = 0;

	switch (s) {

		case ContentState::DOWNLOADING_JSON:{

			//start the download and parse process
			jsonParser.downloadAndParse(jsonURL,
										jsonDestinationDir,	//directory where to save
										numThreads,			//num threads
										contentCfg.pointToObjects,
										contentCfg.parseOneObject,
										contentCfg.userData
										);
			}
			break;

		case ContentState::CATALOG_ASSETS:
			startThread();
			break;

		case ContentState::CHECKING_ASSET_STATUS:{
			//sadly we need to cast our objects to AssetHolder* objects to check them
			if (parsedObjects.size()) {
				vector<AssetHolder*> assetObjs;
				for (int i = 0; i < parsedObjects.size(); i++) {
					assetObjs.push_back(dynamic_cast<AssetHolder*>(parsedObjects[i]));
				}
				ofAddListener(assetChecker.eventFinishedCheckingAllAssets, this, &ofxAppContent::assetCheckFinished);
				assetChecker.checkAssets(assetObjs, numThreads);
			} else {
				ofLogWarning("ofxAppContent-" + ID) << "There are ZERO parsed objects!";
				setState(ContentState::JSON_CONTENT_READY);
			}

			}break;

		case ContentState::REMOVING_EXPIRED_ASSETS:
			ofLogNotice("ofxAppContent-" + ID) << "Start expired asset removal phase.";
			startThread();
			break;

		case ContentState::DOWNLOADING_ASSETS:
			//fill in the list
			for(int i = 0; i < parsedObjects.size(); i++){
				parsedObjects[i]->downloadMissingAssets(dlc);
			}
			totalAssetsToDownload = dlc.getNumPendingDownloads();
			dlc.setNeedsChecksumMatchToSkipDownload(true);
			dlc.startDownloading();
			break;

		case ContentState::FILTER_OBJECTS_WITH_BAD_ASSETS:{

			int numObjectB4Filter = parsedObjects.size();

			if(!shouldSkipObjectTests){
				objectsWithBadAssets.clear();

				vector<int> badObjects;
				vector<std::string> badObjectsIds;

				for(int i = 0; i < parsedObjects.size(); i++){

					//do some asset integrity tests...
					bool allAssetsOK = parsedObjects[i]->areAllAssetsOK();
					bool needsAllAssetsToBeOk = objectUsagePolicy.allObjectAssetsAreOK;
					int numImgAssets = parsedObjects[i]->getAssetDescriptorsForType(ofxAssets::IMAGE).size();
					int numVideoAssets = parsedObjects[i]->getAssetDescriptorsForType(ofxAssets::VIDEO).size();
					int numAudioAssets = parsedObjects[i]->getAssetDescriptorsForType(ofxAssets::AUDIO).size();

					bool rejectObject = false;
					std::string rejectionReason;

					//apply all policy rules to decide if object is rejected or not
					if(needsAllAssetsToBeOk){
						if(!allAssetsOK){
							rejectObject = true;
							auto brokenAssets = parsedObjects[i]->getBrokenAssets();
							if(rejectionReason.size()) rejectionReason += " | ";
							rejectionReason += ofToString(brokenAssets.size()) + " Broken Asset(s)";
						}
					}

					if(numImgAssets < objectUsagePolicy.minNumberOfImageAssets){
						rejectObject = true;
						if(rejectionReason.size()) rejectionReason += " | ";
						rejectionReason += "Not Enough Images";
						ofLogError("ofxAppContent-" + ID) << "Rejecting Object '" << parsedObjects[i]->getObjectUUID()
							<< "' because doesnt have the min # of images! (" << numImgAssets << "/"
							<< objectUsagePolicy.minNumberOfImageAssets << ")" ;
					}

					if(numVideoAssets > objectUsagePolicy.minNumberOfVideoAssets){
						rejectObject = true;
						if(rejectionReason.size()) rejectionReason += " | ";
						rejectionReason += "Not Enough Videos";
						ofLogError("ofxAppContent-" + ID) << "Rejecting Object '" << parsedObjects[i]->getObjectUUID()
						<< "' because doesnt have the min # of Videos! (" << numVideoAssets << "/"
						<< objectUsagePolicy.minNumberOfVideoAssets << ")" ;
					}

					if(numAudioAssets > objectUsagePolicy.minNumberOfAudioAssets){
						rejectObject = true;
						if(rejectionReason.size()) rejectionReason += " | ";
						rejectionReason += "Not Enough AudioFiles";
						ofLogError("ofxAppContent-" + ID) << "Rejecting Object '" << parsedObjects[i]->getObjectUUID()
						<< "' because doesnt have the min # of Audio Files! (" << numAudioAssets << "/"
						<< objectUsagePolicy.minNumberOfAudioAssets << ")" ;
					}

					if (rejectObject){
						badObjects.push_back(i);
						badObjectsIds.push_back(parsedObjects[i]->getObjectUUID());
						objectsWithBadAssets += "Object '" + badObjectsIds.back() + "' : " + rejectionReason + "\n";
					}
				}

				for(int i = badObjects.size() - 1; i >= 0; i--){
					ofLogError("ofxAppContent-" + ID) << "Dropping object \"" << parsedObjects[i]->getObjectUUID() << "\"";
					delete parsedObjects[badObjects[i]];
					parsedObjects.erase(parsedObjects.begin() + badObjects[i]);
				}

				numIgnoredObjects += badObjects.size();
				objectsWithBadAssets = "\nRemoved " + ofToString(badObjects.size()) + " \"" + ID + "\" objects:\n\n" + objectsWithBadAssets + "\n\n" ;

			}else{
				ofLogWarning("ofxAppContent-" + ID) << "skipping Object Drop Policy Tests!! \"" << ID << "\"";
			}

			}break;

			
		case ContentState::SETUP_TEXTURED_OBJECTS:{
			numSetupTexuredObjects = 0;
			}break;


		case ContentState::FILTER_REJECTED_TEXTURED_OBJECTS:{
			int numObjectB4Filter = parsedObjects.size();

			vector<int> badObjects;
			vector<std::string> badObjectsIds;
			string log;

			for(int i = 0; i < numObjectB4Filter; i++){

				bool userRejectedObject = !parsedObjects[i]->isValid;

				if (userRejectedObject){
					badObjects.push_back(i);
					badObjectsIds.push_back(parsedObjects[i]->getObjectUUID());
					log += "Object '" + badObjectsIds.back() + "' : Rejected at Setup Textured Object stage - probably cant load img\n";
				}
			}

			for(int i = badObjects.size() - 1; i >= 0; i--){
				ofLogError("ofxAppContent-" + ID) << "Dropping object at setup Textured Object Stage \"" << parsedObjects[i]->getObjectUUID() << "\"";
				delete parsedObjects[badObjects[i]];
				parsedObjects.erase(parsedObjects.begin() + badObjects[i]);
			}

			numIgnoredObjects += badObjects.size();

			objectsWithBadAssets += "Setup Textured Object Statge\n\nRemoved " + ofToString(badObjects.size()) + " \"" + ID + "\" objects:\n\n" + log;

			ofLogWarning("ofxAppContent-" + ID) << "Removed a total of " << numIgnoredObjects << " objects for content type \"" << ID << "\" due to various rasons. Check 'logs/assetStatus.log' for more info.";
			if(numIgnoredObjects > 0) ofLogWarning("ofxAppContent-" + ID) << objectsWithBadAssets;
			if(numIgnoredObjects > 0){
				auto a = ofxApp::get().analytics();
				if(a && a->isEnabled()){
					a->sendException("ofxApp - Content '" + ID + "' - rejected " + ofToString(numIgnoredObjects) + " objects.", false);
				}
			}
			float pct;
			if(numObjectB4Filter > 0){
				pct = 100.0f * numIgnoredObjects / float(numObjectB4Filter);
			}else{
				pct = 0.0f;
			}
			ofLogWarning("ofxAppContent-" + ID) << "Ignored " << ofToString(pct,2) << "% of the objects defined in the \"" << ID << "\" JSON.";

		}break;

		case ContentState::JSON_CONTENT_READY:{
			//keep the json as a good one
			ofFile jsonFile;
			jsonFile.open(jsonParser.getJsonLocalPath());
			std::string jsonPath = jsonParser.getJsonLocalPath();
			std::string dir = ofFilePath::getEnclosingDirectory(jsonPath);
			ofFilePath::createEnclosingDirectory(dir + "knownGood");
			std::string oldJsonPath = dir + "/knownGood/" + ID + ".json";

			//calc sha1 for the last konwn json, and the fresh one
			newJsonSha1 = ofxChecksum::calcSha1(jsonParser.getJsonLocalPath());
			if(ofFile::doesFileExist(oldJsonPath)){
				oldJsonSha1 = ofxChecksum::calcSha1(oldJsonPath);
			}

			//replace the old json with the fresh one
			jsonFile.moveTo(oldJsonPath, false, true);
			}break;

		default: break;
	}

	std::string info = "\"" + ID + "\" > " + getNameForState(state);
	if (shouldSkipSha1Tests) info += " - SKIPPING SHA1 TESTS!";
	ofNotifyEvent(eventStateChanged, info);
}
示例#16
0
void ofApp::update() {
    if( ofGetMousePressed(0) ) {
        string str = ofGetTimestampString();
        ofNotifyEvent(timeStampEvent, str, this);
    }
}
示例#17
0
void ofxTLZoomer::notifyZoomStarted(){
	ofxTLZoomEventArgs zoomEvent;
    zoomEvent.sender = timeline;
	zoomEvent.currentZoom = zoomEvent.oldZoom = getViewRange();
	ofNotifyEvent(events().zoomStarted, zoomEvent);		
}
示例#18
0
	//---------
	void Register::loadAssets(string addon) {
		ofLogNotice("ofxAssets") << "//--------------------";
		ofLogNotice("ofxAssets") << "//ofxAssets::init";
		ofLogNotice("ofxAssets") << "//--------------------";
		ofLogNotice("ofxAssets") << "//";
		
		string filename, name, extension, withoutExtension, folder;
		ofDirectory files;
		
		string dataPath = "assets";
		if (addon.size() > 0) {
			dataPath += "/" + addon;
		}
		
		if (!ofDirectory::doesDirectoryExist(dataPath))
		{
			ofLogNotice("ofxAssets") << "Assets data path cannot be found. Be sure to have a ./assets subfolder inside your app's data/ folder if you want to use ofxAssets";
			return;
		}
		
		////
		//images
		////
		//
		folder = ofToDataPath(dataPath + "/images/", true);
		if (ofDirectory::doesDirectoryExist(folder)) {
			files.listDir(folder);
			for (int i=0; i<files.size(); i++) {
				filename = files.getPath(i);
				extension = ofFilePath::getFileExt(filename);
				withoutExtension = filename.substr(0, filename.length() - extension.length() - 1);
				name = ofFilePath::getBaseName(filename);
				transformName(name, addon);
				
				if (!(extension == "png" || extension == "jpeg" || extension == "jpg"))
					continue;
				
				if (this->images.count(name) > 0)
					continue;
				
				this->images.insert(pair<string, ofImage>(name, ofImage()));
				
				this->images[name].loadImage(filename);
				
				ofLogNotice("ofxAssets") << "Loaded image asset '" << name << "'" << endl;
			}
		}
		//
		////
		
		
		////
		//shaders
		////
		//
		folder = ofToDataPath(dataPath + "/shaders/", true);
		if (ofDirectory::doesDirectoryExist(folder)) {
			files.listDir(folder);
			bool hasFrag, hasVert;
			for (int i=0; i<files.size(); i++) {
				filename = files.getPath(i);
				extension = ofFilePath::getFileExt(filename);
				withoutExtension = filename.substr(0, filename.length() - extension.length() - 1);
				name = ofFilePath::getBaseName(filename);
				transformName(name, addon);
				
				if (!(extension == "vert" || extension == "frag" || extension == "geom"))
					continue;
				
				if (this->shaders.count(name) > 0)
					continue;
				
				this->shaders.insert(pair<string, ofShader>(name, ofShader()));
				
				if (ofFile::doesFileExist(withoutExtension + ".frag"))
					this->shaders[name].setupShaderFromFile(GL_FRAGMENT_SHADER, withoutExtension + ".frag");
				if (ofFile::doesFileExist(withoutExtension + ".vert"))
					this->shaders[name].setupShaderFromFile(GL_VERTEX_SHADER, withoutExtension + ".vert");
				if (ofFile::doesFileExist(withoutExtension + ".geom"))
					this->shaders[name].setupShaderFromFile(GL_GEOMETRY_SHADER, withoutExtension + ".geom");
				this->shaders[name].linkProgram();
				
				ofLogNotice("ofxAssets") << "Loaded shader asset '" << name << "'" << endl;
			}
		}
		//
		////
		
		
		////
		//fonts
		////
		//
		folder = ofToDataPath(dataPath + "/fonts/", true);
		if (ofDirectory::doesDirectoryExist(folder)) {
			files.listDir(folder);
			for (int i=0; i<files.size(); i++) {
				filename = files.getPath(i);
				extension = ofFilePath::getFileExt(filename);
				withoutExtension = filename.substr(0, filename.length() - extension.length() - 1);
				name = ofFilePath::getBaseName(filename);
				transformName(name, addon);
				
				if (!(extension == "ttf"))
					continue;
				
				if (this->fontFilenames.count(name) > 0)
					continue;
				
				this->fontFilenames.insert(pair<string, string>(name, filename));
				
				ofLogNotice("ofxAssets") << "Found font asset '" << name << "'" << endl;
			}
		}
		//
		////
		
		////
		//videos
		////
		//
		folder = ofToDataPath(dataPath + "/videos/", true);
		if (ofDirectory::doesDirectoryExist(folder)) {
			files.listDir(folder);
			for (int i=0; i<files.size(); i++) {
				filename = files.getPath(i);
				extension = ofFilePath::getFileExt(filename);
				withoutExtension = filename.substr(0, filename.length() - extension.length() - 1);
				name = ofFilePath::getBaseName(filename);
				transformName(name, addon);
				
				if (!(extension == "mov"))
					continue;
				
				if (this->videos.count(name) > 0)
					continue;
				
				this->videos.insert(pair<string, ofVideoPlayer>(name, ofVideoPlayer()));
				
				this->videos[name].loadMovie(filename);
				this->videos[name].play();
				
				ofLogNotice("ofxAssets") << "Loaded video asset '" << name << "'" << endl;
			}
		}
		//
		////
		
		
		ofLogNotice("ofxAssets") << "//";
		ofLogNotice("ofxAssets") << "//--------------------";
		
		ofNotifyEvent(evtLoad, *this, this);
	}
示例#19
0
//------------------------------------------
bool ofCoreEvents::notifySetup(){
	return ofNotifyEvent( setup, voidEventArgs );
}
示例#20
0
void ofxFenster::update(ofEventArgs& e){
	update();
	static ofEventArgs efe;
	ofNotifyEvent(ofxFensterEvents.update, efe,this);
}
示例#21
0
//------------------------------------------
bool ofCoreEvents::notifyMouseScrolled(int x, int y, float scrollX, float scrollY){
	ofMouseEventArgs mouseEventArgs(ofMouseEventArgs::Scrolled,x,y);
	mouseEventArgs.scrollX = scrollX;
	mouseEventArgs.scrollY = scrollY;
	return ofNotifyEvent( mouseScrolled, mouseEventArgs );
}
示例#22
0
	void CamaraLucida::render()
	{
		if (!inited())
			return;
		
		if (mesh->is_render_enabled())
		{
			fbo.bind();
			//ofEnableAlphaBlending();  
			//glEnable(GL_BLEND);  
			//glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA,GL_ONE,GL_ONE_MINUS_SRC_ALPHA); 
		
			ofNotifyEvent( render_texture, void_event_args );
		
			fbo.unbind();
			//ofDisableAlphaBlending(); 
			//glDisable(GL_BLEND);  
		}
		
		glClearColor(0, 0, 0, 1);
		glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
		glEnable(GL_DEPTH_TEST);
	
		glPolygonMode(GL_FRONT, GL_FILL);
		// TODO wireframe it's not working with fbo textures.. why?
		// @see cmlMesh.enable_render();
		// glPolygonMode(GL_FRONT_AND_BACK, GL_LINE); 
		
		glColor3f(1, 1, 1);
		glViewport(0, 0, ofGetWidth(), ofGetHeight());
		
		gl_ortho();
		
		ofNotifyEvent( render_hud, void_event_args );
		
		render_screenlog();
		
		gl_projection();	
		gl_viewpoint();
		
		gl_scene_control();
		
		if (_debug)
		{
			render_world_CS();
			render_proj_CS();
			render_rgb_CS();
			render_proj_ppal_point();
		}
		
		//	if (using_opencl)
		//		opencl.finish();
		
		// TODO alpha blending!
		
		//glEnable(GL_BLEND);  
		//glBlendFunc(GL_SRC_ALPHA, GL_ONE);
		//glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA); 
		//glBlendFuncSeparate(GL_ONE, GL_SRC_COLOR, GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); 
		//glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_ONE, GL_ONE_MINUS_SRC_ALPHA); 
		//ofEnableAlphaBlending();
		
		if (mesh->is_render_enabled())
		{
			fbo.getTextureReference(0).bind();
		
			mesh->render();
		
			fbo.getTextureReference(0).unbind();
		}
		
		//glDisable(GL_BLEND);
		//ofDisableAlphaBlending(); 
	}
示例#23
0
//------------------------------------------
bool ofCoreEvents::notifyExit(){
	return ofNotifyEvent( exit, voidEventArgs );
}
void KinectV2Classifier::predictLabel(int predictedLabel)
{
    ofNotifyEvent(predictionE, predictedLabel, this);
}
示例#25
0
//------------------------------------------
bool ofCoreEvents::notifyDragEvent(ofDragInfo info){
	return ofNotifyEvent(fileDragEvent, info);
}
void ofxSimpleGuiToggle::onPress(int x, int y, int button) {
	if(momentary) setValue(true);
	else toggle();
	
	ofNotifyEvent(onPressed, e, this);
}
示例#27
0
//void CloudsMixer::fillBuffer(float *output, int bufferSize, int nChannels)
void CloudsMixer::audioOut(float * output, int bufferSize, int nChannels )
{
    GetCloudsAudioEvents()->dopull = GetCloudsAudioEvents()->fadeValue > 0;
    // check for buffer size mismatch
    if (bufferSize != musicArgs.bufferSize ||
            bufferSize != diageticArgs.bufferSize) {
        return;
    }

    // check for channels mismatch
    if (nChannels != musicArgs.nChannels ||
            nChannels != diageticArgs.nChannels)
    {
        return;
    }

    size_t size = nChannels*bufferSize*sizeof(float);
    // fill music buffer
    memset(musicArgs.buffer, 0, size);
    ofNotifyEvent(GetCloudsAudioEvents()->musicAudioRequested, musicArgs, this);

    // fill diagetic buffer
    memset(diageticArgs.buffer, 0, size);
    ofNotifyEvent(GetCloudsAudioEvents()->diageticAudioRequested, diageticArgs, this);

    // mix
    for (int i=0; i<bufferSize*nChannels; i++)
    {
        output[i] = (musicArgs.buffer[i]*musicVol*GetCloudsAudioEvents()->fadeValue) + diageticArgs.buffer[i]*diageticVol;

        // read from delay
        if(GetCloudsAudioEvents()->dodelay) {
            output[i]+=delayLine.buffer[(delptr-44099+44100)%44100]*dval;
            delayLine.buffer[delptr%44100] = delayLine.buffer[delptr%44100]*0.8;
        }
        // write into delay
        else {
            delayLine.buffer[delptr%44100] = musicArgs.buffer[i]*musicVol + diageticArgs.buffer[i]*diageticVol;
        }
        delptr++;


        // Luke's Compressor
        float current = abs(output[i]);
        if(current>followgain) {
            followgain = followgain + ((current-followgain)/attack);
        }
        else {
            followgain = followgain + ((current-followgain)/decay);
        }
        if(followgain>thresh) GetCloudsAudioEvents()->gain = 1.0-((followgain-thresh)*ratio);
        else GetCloudsAudioEvents()->gain = 1.0;

        output[i] = output[i]*GetCloudsAudioEvents()->gain*MASTER_GAIN;

        // clip
        if (output[i] > 1) {
            output[i] = 1;
        }
        else if (output[i] < -1) {
            output[i] = -1;
        }
    }

    // figure out when delay turns off
    if(GetCloudsAudioEvents()->dodelay)
    {
        float delsum = 0.;
        for(int i = 0; i<44100; i++)
        {
            delsum += fabs(delayLine.buffer[i]);
        }
        if(delsum<20.) {
            GetCloudsAudioEvents()->dodelay = false;
            dval = 0.;
        }
    }

    // adjust fade
    if(fsig==1) // fading up
    {
        GetCloudsAudioEvents()->fadeValue+=famt;
        if(GetCloudsAudioEvents()->fadeValue>0.999)
        {
            GetCloudsAudioEvents()->fadeValue = 1.0;
            fsig = 0;
            delptr = 0; // reset delay line
        }
    }
    else if(fsig==-1) // fading down
    {
        GetCloudsAudioEvents()->fadeValue-=famt;
        dval+=famt; // fade in delay while fading out audio
        if(dval>1.0) dval = 1.0;
        if(GetCloudsAudioEvents()->fadeValue<0.001)
        {
            GetCloudsAudioEvents()->fadeValue = 0.;
            fsig = 0;
            if(GetCloudsAudioEvents()->setupflush) {
                GetCloudsAudioEvents()->doflush = true;
            }
        }
    }


    /*
    if(showCompressor) {
        for(float i = 0;i<0.5;i=i+0.01)
        {
            if (followgain>i) cout << "•"; else cout << " ";
        }
        cout << " : ";
        for(float i = 0.5;i<1.;i=i+0.01)
        {
            if (gain>i) cout << "•"; else cout << " ";
        }
        cout << endl;
    }
     */


    //cout << followgain << " : " << gain << endl;



}
void ofxSimpleGuiToggle::onRelease(int x, int y, int button) {
	if(momentary) setValue(false);
	
	ofNotifyEvent(onReleased, e, this);
}
示例#29
0
void ofxButton::valueChanged(bool & v){
	if(!v){
		ofNotifyEvent(triggerEvent);
	}
}
// proceed the message
bool durationReceiver::handle(const ofxOscMessage &_msg) {
	if( !isEnabled() ) return false;
	
	// lock mutex
	ofScopedLock myLock(oscMutex);
	
	// get incoming track info
	if( _msg.getAddress().compare("/dt/info") == 0 ){
		
		// todo: register incomming variable types // tracks and let them be synchronised automatically with inter variables + getters
		
		return true; // we're done
	}
	
	// incoming track data
	if( canHandle(_msg) ){
		string track=_msg.getAddress().substr(DT_PREFIX_LEN,_msg.getAddress().npos);
		
		ofxOscArgType type = OFXOSC_TYPE_NONE;
		if( _msg.getNumArgs() > 0 ) type = _msg.getArgType(0);
		
		if(_msg.getNumArgs()>0 && (type == OFXOSC_TYPE_INT32 || type == OFXOSC_TYPE_FLOAT) ){
			// /dt/cmd/effect-*
			#define cmpEffectCmd "cmd/effect-"
			if( track.compare(0, sizeof(cmpEffectCmd)-1, cmpEffectCmd) == 0  ) {
				
				track=track.substr(sizeof(cmpEffectCmd)-1,track.npos);
				
				// enable / disable an effect
				// /dt/cmd/effect-enable-effectName
				#define cmpEffectCmdEnable "enable-"
				if( track.compare(0, sizeof(cmpEffectCmdEnable)-1, cmpEffectCmdEnable) == 0  ) {
					
					effectCmdEventArgs args;
					args.command = "enable";
					args.targetEffectName = track.substr(sizeof(cmpEffectCmdEnable)-1, track.npos);
					args.boolValue = _msg.getArgAsInt32(0);
					
					ofNotifyEvent(basicEffect::effectCommandEvent, args);
				}
				#define cmpEffectCmdAlpha "alpha-"
				else if( track.compare(0, sizeof(cmpEffectCmdAlpha)-1, cmpEffectCmdAlpha) == 0  ) {
					
					effectCmdEventArgs args;
					args.command = "alpha";
					args.targetEffectName = track.substr(sizeof(cmpEffectCmdAlpha)-1, track.npos);
					args.floatValue = _msg.getArgAsFloat(0);
					
					ofNotifyEvent(basicEffect::effectCommandEvent, args);
				}
			}
		}
        
        else if( type == OFXOSC_TYPE_FLOAT){
            float value = 0;
            if(_msg.getNumArgs()>0) value=_msg.getArgAsFloat(0);
            durationFloatEventArgs args;
            args.track=track;
            //args.type="curve";
            args.value=value;
            ofNotifyEvent(durationFloatEvent, args);
        }
        else if(type == OFXOSC_TYPE_INT32){
            
        }
        else if(type == OFXOSC_TYPE_STRING){
            durationFlagEventArgs args;
            args.track=track;
            args.flag = _msg.getArgAsString(0);
            ofNotifyEvent(durationFlagEvent, args);
        }
        // handle bangs
        else if(type == OFXOSC_TYPE_NONE){
            durationBangEventArgs args;
            args.track=track;
            ofNotifyEvent(durationBangEvent, args);
        }
        
		
		//	- - - - - - - - - -
		//	Notice:
		//	The following message handling are temporary and static.
		//	A system needs to be made for synchronising any track by automatically parsing it. #ofxOscVariable
		//	Hence you can dynamically create new variables and bind them to effect variables
		//	- - - - - - - - - -
		
		if( track.compare("bang") == 0 && type == OFXOSC_TYPE_NONE ){
			{ // send bangEvent
				durationBangEventArgs args;
				args.track=track;
				ofNotifyEvent(durationBangEvent, args);
			}
		}
		else if( track.compare("test") == 0 ){
			{ // send flagEvent
				string flag = "";
				if(_msg.getNumArgs()>0 && type == OFXOSC_TYPE_STRING){
					flag=_msg.getArgAsString(0);
				}
				durationFlagEventArgs args;
				args.track=track;
				args.flag="";
				
				ofNotifyEvent(durationFlagEvent, args);
			}
		}
		else if( track.compare("lineEffectIntensity") == 0 ){
			{ // curve
				float value = 0;
				if(_msg.getNumArgs()>0)
					value=_msg.getArgAsFloat(0);
				durationFloatEventArgs args;
				args.track=track;
				args.type="curve";
				args.value=value;
				ofNotifyEvent(durationFloatEvent, args);
			}
		}
		// tmp
		else if( track.compare("bgVendomeOpacity") == 0 ){
			{ // curve
				float value = 0;
				if(_msg.getNumArgs()>0)
					value=_msg.getArgAsFloat(0);
				durationFloatEventArgs args;
				args.track=track;
				args.type="curve";
				args.value=value;
				ofNotifyEvent(durationFloatEvent, args);
			}
		}
		
		else if( track.compare("imageBeatEffectIntensity") == 0 ){
			{ // curve
				float value = 0;
				if(_msg.getNumArgs()>0) value=_msg.getArgAsFloat(0);
				durationFloatEventArgs args;
				args.track=track;
				args.type="curve";
				args.value=value;
				ofNotifyEvent(durationFloatEvent, args);
			}
		}
		/*{
			// send bangEvent
			durationBangEventArgs args;
			args.what="aubioTempo";
			ofNotifyEvent(durationBangEvent, args);
		}
		{
			// send bangEvent
			durationTempoEventArgs args;
			args.bpm = mirCache.bpm;
			args.isTempoBis=false;
			ofNotifyEvent(durationTempoEventArgs, args);
		}
		 */
		// unknown track, could not get information
		else {
			// todo: try to add the signal here
			//ofLogNotice();
		}
		return true;
	}/*
	else if( addr.compare("/aubioTempoBis") == 0 ){
		{
			// send bangEvent
			durationBangEventArgs args;
			args.what="aubioTempoBis";
			ofNotifyEvent(durationBangEvent, args);
		}
		{
			// send bangEvent
			durationTempoEventArgs args;
			args.bpm = mirCache.bpm;
			args.isTempoBis=true;
			ofNotifyEvent(durationTempoEventArgs, args);
		}
		return true;
	}
	else if( addr.compare("/aubioBpm") == 0 ){
		if(_msg.getNumArgs()>0) mirCache.bpm = _msg.getArgTypeName(0)=="int32"?_msg.getArgAsInt32(0):_msg.getArgAsFloat(0);
		return true;
	}
	else if( addr.compare("/aubioPitch") == 0 ){
		if(_msg.getNumArgs()>0)	mirCache.pitch = _msg.getArgTypeName(0)=="int32"?_msg.getArgAsInt32(0):_msg.getArgAsFloat(0);
		return true;
	}
	else if( addr.compare("/aubioQuiet") == 0 ){
		if(_msg.getNumArgs()>0) mirCache.silence = (_msg.getArgAsInt32(0)==0)?true:false;
		return true;
	}
	else if( addr.compare("/aubioIsPlaying") == 0 ){
		if(_msg.getNumArgs()>0) mirCache.isPlaying = (_msg.getArgAsInt32(0)==0)?true:false;
		return true;
	}
	else if( addr.compare("/aubioZcr") == 0 ){
		if(_msg.getNumArgs()>0) mirCache.zcr = _msg.getArgTypeName(0)=="int32"?_msg.getArgAsInt32(0):_msg.getArgAsFloat(0);
		return true;
	}
	else if( addr.compare("/balance") == 0 ){
		if(_msg.getNumArgs()>0) mirCache.balance = _msg.getArgTypeName(0)=="int32"?_msg.getArgAsInt32(0):_msg.getArgAsFloat(0);
		return true;
	}
	// Fiddle FFT Equaliser
	else if( addr.compare(0, 16, "/fiddle_fft/band") == 0 ){ // 16 first chars
		int band = ofToInt(addr.substr(16));
		
		if(addr.compare(17, 20, "rms")==0) // rms (volume)
			mirCache.fiddleFFT[band][0]=_msg.getArgTypeName(0)=="int32"?_msg.getArgAsInt32(0):_msg.getArgAsFloat(0);
		else // pitch
			mirCache.fiddleFFT[band][1]=_msg.getArgTypeName(0)=="int32"?_msg.getArgAsInt32(0):_msg.getArgAsFloat(0);
		
		return true;
	}
	else if( addr.compare("") == 0 ){
		
		return true;
	}*/
	else {
		//cout << "Not recognized OSC = " << _msg.getAddress() << endl;
	}
	
	// unrecognized messge
	return false;
}