Esempio n. 1
0
AudioFileReaderArray AudioFileReader::regionsFromMetaData (const int metaDataOption, const int bufferSize) throw()
{
    AudioFileReaderArray regionReaderArray;
    AudioFileMetaData metaData = getMetaData();
    
    if (metaData.isNotNull())
    {
        if (metaDataOption & AudioFile::ConvertCuePointsToRegions)
            metaData.convertCuePointsToRegions (getNumFrames(), metaDataOption & AudioFile::RemoveCuePoints);
        
        AudioFileRegionArray regions = metaData.getRegions();
        const Long numRegions = regions.length();
        
        if (numRegions > 0)
        {
            regionReaderArray.setSize (int (numRegions), false);
            regionReaderArray.clear();
            
            for (Long i = 0; i < numRegions; ++i)
            {
                const AudioFileReader reader = AudioFileReader (*this, regions[i], bufferSize);
                regionReaderArray.add (reader);
            }
        }
    }
    
    return regionReaderArray;
}
fl_FrameLayout * fl_ContainerLayout::getNthFrameLayout(UT_sint32 i) const
{
	if(i> getNumFrames())
	{
		return NULL;
	}
	return m_vecFrames.getNthItem(i);
}
Esempio n. 3
0
ofxGifFrame * ofxGifFile::getFrameAt(int _index) {
    if ( _index < getNumFrames() ){
        return &(gifFrames[_index]); //??
    } else {
        ofLogWarning()<<"ofxGifFile:: trying to get frame that doesn't exist, returning NULL!";
        return NULL;
    }
}
fp_FrameContainer * fl_ContainerLayout::getNthFrameContainer(UT_sint32 i) const
{
	if(i> getNumFrames())
	{
		return NULL;
	}
	fl_FrameLayout * pFrame= m_vecFrames.getNthItem(i);
	fp_FrameContainer * pFC = static_cast<fp_FrameContainer *>(pFrame->getFirstContainer());
	return pFC;
}
Esempio n. 5
0
void ofxGifFile::draw(float _x, float _y){
    if ( getNumFrames() == 0 ){
        ofLogWarning()<<"ofxGifFile::No frames to draw!";
        return;
    }
    if ( getNumFrames() == 0 ){
        ofLogWarning()<<"ofxGifFile::No frames to draw!";
        return;
    }
    // never drawn, lets kick off
    if ( lastDrawn == -1 ){
        lastDrawn = ofGetElapsedTimef();
    
    } else if ( ofGetElapsedTimef() - lastDrawn >= gifFrames[frameIndex].getDuration() ){
        frameIndex++;
        frameIndex %= getNumFrames();
    }
    drawFrame( frameIndex, _x, _y );
}
Esempio n. 6
0
// TODO: seeking back may go beyond edge, same with forward (it might never find a keyframe)
bool SimpleVideo::seekToFrame( int iFrameNumber, AVFrame* pOutput )
{
	if( ( iFrameNumber < 0 ) || ( iFrameNumber >= getNumFrames() ) )
	{
		return false;
	}

	bool bSucceeded = ( av_seek_frame( m_pFormatContext, m_iVideoStreamIndex, iFrameNumber, AVSEEK_FLAG_ANY ) >= 0 );
	if( bSucceeded )
	{
		bSucceeded = getNextFrameRaw();
		if( bSucceeded )
		{
			if( m_pFrame->key_frame == 1 )
			{
				convertFrameToRGB( pOutput );
				return true;
			}
			else
			{
				// seek backwards until I see a keyframe
				int currentFrameNumber = iFrameNumber - 1;
				av_seek_frame( m_pFormatContext, m_iVideoStreamIndex, currentFrameNumber, AVSEEK_FLAG_ANY );
				getNextFrameRaw();
				while( m_pFrame->key_frame == 0 )
				{
					--currentFrameNumber;
					av_seek_frame( m_pFormatContext, m_iVideoStreamIndex, currentFrameNumber, AVSEEK_FLAG_ANY );
					getNextFrameRaw();
				}

				// then read forward until I get back to my frame number
				++currentFrameNumber;
				getNextFrameRaw();
				while( currentFrameNumber < iFrameNumber )
				{
					++currentFrameNumber;
					getNextFrameRaw();
				}

				convertFrameToRGB( pOutput );
				return true;
			}
		}
	}

	return NULL;
}
Esempio n. 7
0
char *
formatThreadInfo (StringB &threaddescB, SBProcess process, int threadindexid)
{
	logprintf (LOG_TRACE, "formatThreadInfo (0x%x, 0x%x, %d)\n", &threaddescB, &process, threadindexid);
	threaddescB.clear();
	if (!process.IsValid())
		return threaddescB.c_str();
	int pid=process.GetProcessID();
	int state = process.GetState ();
	if (state == eStateStopped) {
		int tmin, tmax;
		bool useindexid;
		if (threadindexid < 0) {
			tmin = 0;
			tmax = process.GetNumThreads();
			useindexid = false;
		}
		else{
			tmin = threadindexid;
			tmax = threadindexid+1;
			useindexid = false;
		}
		const char *separator="";
		for (int ithread=tmin; ithread<tmax; ithread++) {
			SBThread thread;
			if (useindexid)
				thread = process.GetThreadByIndexID(ithread);
			else
				thread = process.GetThreadAtIndex(ithread);
			if (!thread.IsValid())
				continue;
			int tid=thread.GetThreadID();
			threadindexid=thread.GetIndexID();
			int frames = getNumFrames (thread);
			if (frames > 0) {
				SBFrame frame = thread.GetFrameAtIndex(0);
				if (frame.IsValid()) {
					char * framedescstr = formatFrame (frame, WITH_LEVEL_AND_ARGS);
					threaddescB.catsprintf (
						"%s{id=\"%d\",target-id=\"Thread 0x%x of process %d\",%s,state=\"stopped\"}",
						separator, threadindexid, tid, pid, framedescstr);
				}
			}
			separator=",";
		}
	}
	return threaddescB.c_str();
}
Esempio n. 8
0
void BaseFrameListStimulus::drawFrame(shared_ptr<StimulusDisplay> display) {
    int frameNumber = getFrameNumber();
    int numFrames = getNumFrames();
    if (frameNumber < numFrames) {
        auto stim = getStimulusForFrame(frameNumber);
        if (stim->isLoaded()) {
            stim->draw(display);
        } else {
            merror(M_DISPLAY_MESSAGE_DOMAIN,
                   "Stimulus \"%s\" (frame number %d of stimulus \"%s\") is not loaded and will not be displayed",
                   stim->getTag().c_str(),
                   frameNumber,
                   getTag().c_str());
        }
        lastFrameDrawn = frameNumber;
    }
}
Esempio n. 9
0
void SpriteSequence::openFrameSequence() {
	if (!isSequenceOpen()) {
		FrameSequence::openFrameSequence();

		if (isSequenceOpen()) {
			uint32 numFrames = getNumFrames();

			for (uint32 i = 0; i < numFrames; ++i) {
				SpriteFrame *frame = new SpriteFrame();
				frame->initFromPICTResource(_resFork, i + 0x80, _transparent);
				_sprite.addFrame(frame, 0, 0);
			}

			_sprite.setBounds(_bounds);
		}
	}
}
Esempio n. 10
0
int BaseFrameListStimulus::getFrameNumber() {
    if (!isPlaying()) {
        return -1;
    }
    
    int frameNumber = getNominalFrameNumber();
    const int numFrames = getNumFrames();
    
    if (bool(loop->getValue())) {
        // We're looping, so just return the wrapped frame number, never triggering ending or ended
        return frameNumber % numFrames;
    }
    
    const int numRepeats = int(repeats->getValue());
    if ((numRepeats > 0) && (frameNumber < numFrames * numRepeats)) {
        if (frameNumber < numFrames * (numRepeats - 1)) {
            // We aren't yet in the last repetition cycle, so just return the wrapped frame number, without
            // (potentially) triggering ending or ended
            return frameNumber % numFrames;
        }
        frameNumber %= numFrames;
    }
    
    //
    // This method can be called multiple times during the drawing cycle.  Therefore, we use
    // didSetEnding and didSetEnded to ensure that ending and ended are set only once during
    // a given play-through.
    //
    if ((frameNumber == numFrames - 1) && (ending != NULL) && (ending->getValue().getInteger() == 0)) {
        if (!didSetEnding) {
            ending->setValue(true);
            didSetEnding = true;
        }
    } else if ((frameNumber >= numFrames) && (ended != NULL) && (ended->getValue().getInteger() == 0)) {
        if (!didSetEnded) {
            ended->setValue(true);
            didSetEnded = true;
        }
    }
    
    return frameNumber;
}
Esempio n. 11
0
Datum BaseFrameListStimulus::getCurrentAnnounceDrawData() {
    Datum announceData = StandardDynamicStimulus::getCurrentAnnounceDrawData();
    
    if (stimulusGroup) {
        announceData.addElement(STIMULUS_GROUP, stimulusGroup->getTag());
    }
    
    announceData.addElement(LOOP, loop->getValue());
    announceData.addElement("playing", Datum(isPlaying()));
    
    int frameNumber = getFrameNumber();
    announceData.addElement("current_frame", Datum((long)frameNumber));
    
    Datum currentStimulusAnnounceData(0L);
    if ((frameNumber >= 0) && (frameNumber < getNumFrames())) {
        currentStimulusAnnounceData = getStimulusForFrame(frameNumber)->getCurrentAnnounceDrawData();
    }
    announceData.addElement("current_stimulus", currentStimulusAnnounceData);
    
    return announceData;
}
void OutputDeviceNodeXAudio::initialize()
{
	CI_ASSERT_MSG( getNumChannels() <= 2, "number of channels greater than 2 is not supported." );

	auto internalBuffer = getInternalBuffer();
	size_t numSamples = internalBuffer->getSize();

	memset( &mXAudioBuffer, 0, sizeof( mXAudioBuffer ) );
	mXAudioBuffer.AudioBytes = numSamples * sizeof( float );
	if( getNumChannels() == 2 ) {
		// setup stereo, XAudio2 requires interleaved samples so point at interleaved buffer
		mBufferInterleaved = BufferInterleaved( internalBuffer->getNumFrames(), internalBuffer->getNumChannels() );
		mXAudioBuffer.pAudioData = reinterpret_cast<BYTE *>( mBufferInterleaved.getData() );
	}
	else {
		// setup mono
		mXAudioBuffer.pAudioData = reinterpret_cast<BYTE *>( internalBuffer->getData() );
	}

	initSourceVoice();
}
Esempio n. 13
0
void BaseFrameListStimulus::unload(shared_ptr<StimulusDisplay> display) {
    for (int i = 0; i < getNumFrames(); i++) {
        getStimulusForFrame(i)->unload(display);
    }
    loaded = false;
}
Esempio n. 14
0
void BaseFrameListStimulus::freeze(bool shouldFreeze) {
    StandardDynamicStimulus::freeze(shouldFreeze);
    for (int i = 0; i < getNumFrames(); i++) {
        getStimulusForFrame(i)->freeze(shouldFreeze);
    }
}
Esempio n. 15
0
int SampleStream::openFile(const char* filename, int numChannels, int bufferLength) {
    
    gBusy = 1;
    
    sf_close(sndfile);
    sfinfo.format = 0; 
    if (!(sndfile = sf_open (string(filename).c_str(), SFM_READ, &sfinfo))) {
		cout << "Couldn't open file " << filename << ": " << sf_strerror(sndfile) << endl;
		return 1;
	}
    
    gBufferLength = bufferLength;
    gNumChannels = numChannels;
    gFilename = filename;
    
    
    for(int i=0;i<2;i++) {
        if(gSampleBuf[i] != NULL) {
            cout << "I AM NOT A NULL" << endl;
            for(int ch=0;ch<numChannels;ch++) {
                if(gSampleBuf[i][ch].samples != NULL)
                    delete[] gSampleBuf[i][ch].samples;
            }
            free(gSampleBuf[i]);
        }
    }
    
    
    
    gSampleBuf[0] = (SampleData*)calloc(bufferLength*numChannels,sizeof(SampleData));
    gSampleBuf[1] = (SampleData*)calloc(bufferLength*numChannels,sizeof(SampleData));
    
    gReadPtr = bufferLength;
    gBufferReadPtr = 0;
    gActiveBuffer = 0;
    gDoneLoadingBuffer = 1;
    gBufferToBeFilled = 0;
    
    gPlaying = 0;
    gFadeAmount = 0;
    gFadeLengthInSeconds = 0.1;
    gFadeDirection = -1;
    
    gNumFramesInFile = getNumFrames(gFilename);
    
    if(gNumFramesInFile <= gBufferLength) {
        printf("Sample needs to be longer than buffer size. This example is intended to work with long samples.");
        return 1;
    }
    
    for(int ch=0;ch<gNumChannels;ch++) {
        for(int i=0;i<2;i++) {
            gSampleBuf[i][ch].sampleLen = gBufferLength;
        	gSampleBuf[i][ch].samples = new float[gBufferLength];
            if(getSamples(gFilename,gSampleBuf[i][ch].samples,ch,0,gBufferLength)) {
                printf("error getting samples\n");
                return 1;
            }
        }
    }
    
    cout << "Loaded " << filename << endl;
    
    gBusy = 0;
    
    return 0;
    
}
Esempio n. 16
0
  void OutputDeviceNodeWebAudio::renderInputs( emscripten::val e )
  {
    auto outputBuffer = e["outputBuffer"];
    auto inputBuffer = e["inputBuffer"];

    auto ctx = getContext();
    if( ! ctx )
    {
       CI_LOG_I( "can't get context" );
       return;
    }

    // this technically doesn't do anything but leave it here for now.
    lock_guard<mutex> lock( ctx->getMutex() );

    ctx->preProcess();

      
    auto internalBuffer = getInternalBuffer();
    internalBuffer->zero();
    pullInputs( internalBuffer );

    if( checkNotClipping() )
    {
      internalBuffer->zero();
    }

    const size_t numFrames = internalBuffer->getNumFrames();
    const size_t numChannels = internalBuffer->getNumChannels();

    //dsp::interleave( internalBuffer->getData(), outputBuffer, numFrames, numChannels, numFrames );

    ctx->postProcess();

    // =========================================== //

    // make sure that the ScriptNodeProcessor's bufferSize prop is set properly.
    mImpl->setOutputBufferSize( numFrames );

    // output array is what gets directed towards speakers
    val output = outputBuffer.call<val>( "getChannelData", 0 );

    // input is only going to be populated when using native WebAudio nodes.
    val input = inputBuffer.call<val>( "getChannelData",0 );

    // check first 100 values to see if input buffer is filled or empty(aka the value is 0). If empty, we won't bother processing,
    bool mInputIsEmpty = true;
    for( int a = 0; a < 100; ++a )
    {
      if( input[a].as<float>() != 0.0 )
      {
        mInputIsEmpty = false;
      }
    }

    // get internal cinder data
    float * data = static_cast<float*>( internalBuffer->getData() );
    float * finalData;
    float * idata;


    if( !mInputIsEmpty ) 
    {

      // will hold input data
      std::vector<float> _inputData;

      // copy Float32Array(aka "input") into _inputData vector
      ci::em::helpers::copyToVector( input,_inputData );

      // convert to float* so we can pass information to dsp methods
      idata = &_inputData[0];

      // add input data to any cinder data
      dsp::add(idata,data,finalData,numFrames);

    }
    else{

      // if no input data just pass through
      finalData = data;
    }

    // loop through and set all info from finalData pointer onto the output buffer.
    for( int i = 0; i < numFrames; ++i )
    {
       output.set<float>( i,finalData[i] );
    }


      // release finalData pointer.
      // releasing these causes things to crash :(
      //free(finalData);
      //free(idata);
      
  }
Esempio n. 17
0
int Movie::allocateDataStructures() {


   //Allocate framePaths here before image, since allocate call will call getFrame

   if(parent->icCommunicator()->commRank()==0){
      framePath = (char**) malloc(parent->getNBatch() * sizeof(char*));
      assert(framePath);
      for(int b = 0; b < parent->getNBatch(); b++){
         framePath[b] = NULL;
      }
   }
   
   batchPos = (long*) malloc(parent->getNBatch() * sizeof(long));
   if(batchPos==NULL) {
      fprintf(stderr, "%s \"%s\" error allocating memory for batchPos (batch size %d): %s\n",
            name, getKeyword(), parent->getNBatch(), strerror(errno));
      exit(EXIT_FAILURE);
   }
   for(int b = 0; b < parent->getNBatch(); b++){
      batchPos[b] = 0L;
   }
   frameNumbers = (int*) calloc(parent->getNBatch(), sizeof(int));
   if (frameNumbers==NULL) {
      fprintf(stderr, "%s \"%s\" error allocating memory for frameNumbers (batch size %d): %s\n",
            name, getKeyword(), parent->getNBatch(), strerror(errno));
      exit(EXIT_FAILURE);
   }

   //Calculate file positions for beginning of each frame
   numFrames = getNumFrames();
   std::cout << "File " << inputPath << " contains " << numFrames << " frames\n";

   startFrameIndex = (int*)calloc(parent->getNBatch(), sizeof(int));
   assert(startFrameIndex);
   skipFrameIndex = (int*)calloc(parent->getNBatch(), sizeof(int));
   assert(skipFrameIndex);

   int nbatch = parent->getNBatch();
   assert(batchMethod);

   if(strcmp(batchMethod, "byImage") == 0){
      //No skip here allowed
      if(numSkipFrame != 0){
         std::cout << "Movie layer " << name << " batchMethod of \"byImage\" sets skip_frame_index, do not specify.\n"; 
         exit(-1);
      }

      int offset = 0;
      //Default value
      if(numStartFrame == 0){
      }
      //Uniform start array
      else if(numStartFrame == 1){
         offset = *paramsStartFrameIndex;
      }
      else{
         std::cout << "Movie layer " << name << " batchMethod of \"byImage\" requires 0 or 1 start_frame_index values\n"; 
         exit(-1);
      }
      //Allocate and default
      //Not done in allocate, as Image Allocate needs this parameter to be set
      int kb0 = getLayerLoc()->kb0;
      int nbatchGlobal = getLayerLoc()->nbatchGlobal;
      for(int b = 0; b < nbatch; b++){ 
         startFrameIndex[b] = offset + kb0 + b;
         skipFrameIndex[b] = nbatchGlobal;
      }
   }
   else if (strcmp(batchMethod, "byMovie") == 0){
      //No skip here allowed
      if(numSkipFrame != 0){
         std::cout << "Movie layer " << name << " batchMethod of \"byImage\" sets skip_frame_index, do not specify.\n"; 
         exit(-1);
      }
      
      int offset = 0;
      //Default value
      if(numStartFrame== 0){
      }
      //Uniform start array
      else if(numStartFrame== 1){
         offset = *paramsStartFrameIndex;
      }
      else{
         std::cout << "Movie layer " << name << " batchMethod of \"byMovie\" requires 0 or 1 start_frame_index values\n"; 
         exit(-1);
      }

      int nbatchGlobal = getLayerLoc()->nbatchGlobal;
      int kb0 = getLayerLoc()->kb0;

      int framesPerBatch = floor(numFrames/nbatchGlobal);
      if(framesPerBatch < 1){
         framesPerBatch = 1;
      }
      for(int b = 0; b < nbatch; b++){ 
         //+1 for 1 indexed
         startFrameIndex[b] = offset + ((b+kb0)*framesPerBatch);
         skipFrameIndex[b] = 1;
      }
   }
   else if(strcmp(batchMethod, "bySpecified") == 0){
      if(numStartFrame != nbatch && numStartFrame != 0){
         std::cout << "Movie layer " << name << " batchMethod of \"bySpecified\" requires " << nbatch << " start_frame_index values\n"; 
         exit(-1);
      }
      if(numSkipFrame != nbatch && numSkipFrame != 0){
         std::cout << "Movie layer " << name << " batchMethod of \"bySpecified\" requires " << nbatch << " skip_frame_index values\n"; 
         exit(-1);
      }
      for(int b = 0; b < nbatch; b++){ 
         if(numStartFrame == 0){
            //+1 for 1 indexed
            startFrameIndex[b] = 0;
         }
         else{
            startFrameIndex[b] = paramsStartFrameIndex[b];
         }
         if(numSkipFrame == 0){
            skipFrameIndex[b] = 1;
         }
         else{
            skipFrameIndex[b] = paramsSkipFrameIndex[b];
         }
      }
   }
   else{
      //This should never excute, as this check was done in the reading of this parameter
      assert(0);
   }
   if (parent->columnId()==0) {
      for (int b=0; b<parent->getNBatch(); b++) {
         frameNumbers[b] = -1;
      }
   }

   //Call Image allocate, which will call getFrame
   int status = Image::allocateDataStructures();

   //if (!randomMovie) {
      //assert(!parent->parameters()->presentAndNotBeenRead(name, "start_frame_index"));
      //assert(!parent->parameters()->presentAndNotBeenRead(name, "skip_frame_index"));

      //assert(!parent->parameters()->presentAndNotBeenRead(name, "autoResizeFlag"));
      //if (!autoResizeFlag){
      //   constrainOffsets();  // ensure that offsets keep loc within image bounds
      //}

      // status = readImage(filename, getOffsetX(), getOffsetY()); // readImage already called by Image::allocateDataStructures(), above
      //assert(status == PV_SUCCESS);
   //}
   //else {
   //   if (randState==NULL) {
   //      initRandState();
   //   }
   //   status = randomFrame();
   //}

   return status;
}
Esempio n. 18
0
// by now we're copying everything (no pointers)
void ofxGifFile::addFrame(ofPixels _px, int _left, int _top, bool useTexture, GifFrameDisposal disposal, float _duration){
    ofxGifFrame f;

    if(getNumFrames() == 0){
        accumPx = _px; // we assume 1st frame is fully drawn
        if ( !useTexture ){
            f.setUseTexture(false);
        }
        f.setFromPixels(_px , _left, _top, _duration);
		gifDuration = _duration;
    } else {
        // add new pixels to accumPx
        int cropOriginX = _left;
        int cropOriginY = _top;
    
        // [todo] make this loop only travel through _px, not accumPx
        for (int i = 0; i < accumPx.getWidth() * accumPx.getHeight(); i++) {
            int x = i % accumPx.getWidth();
            int y = i / accumPx.getWidth();
            
            if (x >= _left  && x < _left + _px.getWidth()  &&
                y >= _top   && y < _top  + _px.getHeight()){
                int cropX = x - cropOriginX;  //   (i - _left) % _px.getWidth();
                int cropY = y - cropOriginY;
                //int cropI = cropX + cropY * _px.getWidth();
                if ( _px.getColor(cropX, cropY).a == 0 ){
                    switch ( disposal ) {
                        case GIF_DISPOSAL_BACKGROUND:
                            _px.setColor(x,y,bgColor);
                            break;
                            
                        case GIF_DISPOSAL_LEAVE:
                        case GIF_DISPOSAL_UNSPECIFIED:
                            _px.setColor(x,y,accumPx.getColor(cropX, cropY));
//                            accumPx.setColor(x,y,_px.getColor(cropX, cropY));
                            break;
                            
                        case GIF_DISPOSAL_PREVIOUS:
                            _px.setColor(x,y,accumPx.getColor(cropX, cropY));
                            break;
                    }
                } else {
                    accumPx.setColor(x, y, _px.getColor(cropX, cropY) );
                }
            } else {
                if ( _px.getColor(x, y) == bgColor ){
                    switch ( disposal ) {
                        case GIF_DISPOSAL_BACKGROUND:
                            accumPx.setColor(x,y,bgColor);
                            break;
                            
                        case GIF_DISPOSAL_UNSPECIFIED:
                        case GIF_DISPOSAL_LEAVE:
                            accumPx.setColor(x,y,_px.getColor(x, y));
                            break;
                            
                        case GIF_DISPOSAL_PREVIOUS:
                            _px.setColor(x,y,accumPx.getColor(x, y));
                            break;
                    }
                } else {
                    accumPx.setColor(x, y, _px.getColor(x, y) );
                }
            }
        }
        
        if ( !useTexture ){
            f.setUseTexture(false);
        }
        f.setFromPixels(_px,_left, _top, _duration);
    }
    accumPx = _px;
    
    //
    gifFrames.push_back(f);
}
Esempio n. 19
0
double BufferRecorderNode::getNumSeconds() const
{
	return (double)getNumFrames() / (double)getSampleRate();
}