//--------------------------------------------------------
void ofQuickTimePlayer::play(){
	if( !isLoaded() ){
		ofLog(OF_LOG_ERROR, "ofQuickTimePlayer::play - movie not loaded!");
		return;
	}

	bPlaying = true;
	bPaused = false;
	
	//--------------------------------------
	#ifdef OF_VIDEO_PLAYER_QUICKTIME
	//--------------------------------------

	if (!bStarted){
	 	start();
	}else {
		// ------------ lower level "startMovie"
		// ------------ use desired speed & time (-1,1,etc) to Preroll...
		TimeValue timeNow;
	   	timeNow = GetMovieTime(moviePtr, nil);
		PrerollMovie(moviePtr, timeNow, X2Fix(speed));
		SetMovieRate(moviePtr,  X2Fix(speed));
		MoviesTask(moviePtr, 0);
	}
	
	//--------------------------------------
	#endif
	//--------------------------------------

	//this is if we set the speed first but it only can be set when we are playing.
	setSpeed(speed);

}
//--------------------------------------------------------
void ofQuickTimePlayer::start(){

	//--------------------------------------
	#ifdef OF_VIDEO_PLAYER_QUICKTIME
	//--------------------------------------

	if (bLoaded == true && bStarted == false){
		SetMovieActive(moviePtr, true);

		//------------------ set the movie rate to default
		//------------------ and preroll, so the first frames come correct

		TimeValue timeNow 	= 	GetMovieTime(moviePtr, 0);
		Fixed playRate 		=	GetMoviePreferredRate(moviePtr); 		//Not being used!

		PrerollMovie(moviePtr, timeNow, X2Fix(speed));
		SetMovieRate(moviePtr,  X2Fix(speed));
		setLoopState(currentLoopState);

		// get some pixels in there right away:
		MoviesTask(moviePtr,0);
		#if defined(TARGET_OSX) && defined(__BIG_ENDIAN__)
			convertPixels(offscreenGWorldPixels, pixels.getPixels(), width, height);
		#endif
		bHavePixelsChanged = true;

		bStarted = true;
		bPlaying = true;
	}

	//--------------------------------------
	#endif
	//--------------------------------------
}
/*
 *  PsychQTGetMovieTimeIndex()  -- Return current playback time of movie.
 */
double PsychQTGetMovieTimeIndex(int moviehandle)
{
    Movie   theMovie;
    
    if (moviehandle < 0 || moviehandle >= PSYCH_MAX_MOVIES) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided!");
    }
    
    // Fetch references to objects we need:
    theMovie = movieRecordBANK[moviehandle].theMovie;    
    if (theMovie == NULL) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided. No movie associated with this handle !!!");
    }

    // Retrieve timeindex:
    return((double) GetMovieTime(theMovie, NULL) / (double) GetMovieTimeScale(theMovie));
}
Beispiel #4
0
OSErr QTInfo_SetPosterToFrame (Movie theMovie, MovieController theMC)
{
    TimeValue			myTime;
    ComponentResult		myErr = noErr;

    // stop the movie from playing
    myErr = MCDoAction(theMC, mcActionPlay, (void *)0L);
    if (myErr != noErr)
        goto bail;

    myTime = GetMovieTime(theMovie, NULL);
    SetMoviePosterTime(theMovie, myTime);

    myErr = MCMovieChanged(theMC, theMovie);

bail:
    return((OSErr)myErr);
}
Beispiel #5
0
ComponentResult FFAvi_MovieImportIdle(ff_global_ptr storage, long inFlags, long *outFlags) {
	ComponentResult err = noErr;
	TimeValue currentIdleTime = GetMovieTime(storage->movie, NULL);
	TimeScale movieTimeScale = GetMovieTimeScale(storage->movie);
	int addSamples = false;
	
	storage->idlesSinceLastAdd++;
	
	if (currentIdleTime == storage->lastIdleTime && storage->idlesSinceLastAdd > 5 || 
		storage->loadedTime < currentIdleTime + 5*movieTimeScale)
	{
		storage->idlesSinceLastAdd = 0;
		addSamples = true;
	}
	
	err = import_with_idle(storage, inFlags | movieImportWithIdle, outFlags, 0, 1000, addSamples);
	
	storage->lastIdleTime = currentIdleTime;
	return err;
}
//---------------------------------------------------------------------------
float ofQuickTimePlayer::getPosition(){
	if( !isLoaded() ){
		ofLog(OF_LOG_ERROR, "ofQuickTimePlayer: movie not loaded!");
		return 0.0;
	}
	
	//--------------------------------------
	#ifdef OF_VIDEO_PLAYER_QUICKTIME
	//--------------------------------------

		long total 		= GetMovieDuration(moviePtr);
		long current 	= GetMovieTime(moviePtr, nil);
		float pct 		= ((float)current/(float)total);
		return pct;

	//--------------------------------------
	#endif
	//--------------------------------------


}
Beispiel #7
0
/**
 * Checks whether the player is active.
 *
 * This function is called at regular intervals from OpenTTD's main loop, so
 * we call @c MoviesTask() from here to let QuickTime do its work.
 */
bool MusicDriver_QtMidi::IsSongPlaying()
{
	if (!_quicktime_started) return true;

	switch (_quicktime_state) {
		case QT_STATE_IDLE:
		case QT_STATE_STOP:
			/* Do nothing. */
			break;

		case QT_STATE_PLAY:
			MoviesTask(_quicktime_movie, 0);
			/* Check wether movie ended. */
			if (IsMovieDone(_quicktime_movie) ||
					(GetMovieTime(_quicktime_movie, NULL) >=
					 GetMovieDuration(_quicktime_movie))) {
				_quicktime_state = QT_STATE_STOP;
			}
	}

	return _quicktime_state == QT_STATE_PLAY;
}
//---------------------------------------------------------------------------
float ofVideoPlayer::getPosition(){

	//--------------------------------------
	#ifdef OF_VIDEO_PLAYER_QUICKTIME
	//--------------------------------------

		long total 		= GetMovieDuration(moviePtr);
		long current 	= GetMovieTime(moviePtr, nil);
		float pct 		= ((float)current/(float)total);
		return pct;

	//--------------------------------------
	#else
	//--------------------------------------

		return (float) fobsDecoder->getFrameTime() / getDuration();

	//--------------------------------------
	#endif
	//--------------------------------------


}
//---------------------------------------------------------------------------
float ofxAlphaVideoPlayer::getPosition(){

	//--------------------------------------
	#ifdef OF_VIDEO_PLAYER_QUICKTIME
	//--------------------------------------

		long total 		= GetMovieDuration(moviePtr);
		long current 	= GetMovieTime(moviePtr, nil);
		float pct 		= ((float)current/(float)total);
		return pct;

	//--------------------------------------
	#else
	//--------------------------------------

		return gstUtils.getPosition();

	//--------------------------------------
	#endif
	//--------------------------------------


}
Beispiel #10
0
static DWORD WINAPI QTSplitter_thread(LPVOID data)
{
    QTSplitter *This = (QTSplitter *)data;
    HRESULT hr = S_OK;
    TimeValue next_time;
    CVPixelBufferRef pixelBuffer = NULL;
    OSStatus err;
    TimeRecord tr;

    WaitForSingleObject(This->runEvent, -1);

    EnterCriticalSection(&This->csReceive);
    if (!This->pQTMovie)
    {
        LeaveCriticalSection(&This->csReceive);
        return 0;
    }

    This->state = State_Running;
    /* Prime the pump:  Needed for MPEG streams */
    GetMovieNextInterestingTime(This->pQTMovie, nextTimeEdgeOK | nextTimeStep, 0, NULL, This->movie_time, 1, &next_time, NULL);

    GetMovieTime(This->pQTMovie, &tr);

    if (This->pAudio_Pin)
        QT_Create_Extract_Session(This);

    LeaveCriticalSection(&This->csReceive);

    do
    {
        LONGLONG tStart=0, tStop=0;
        LONGLONG mStart=0, mStop=0;
        float time;

        EnterCriticalSection(&This->csReceive);
        if (!This->pQTMovie)
        {
            LeaveCriticalSection(&This->csReceive);
            return 0;
        }

        GetMovieNextInterestingTime(This->pQTMovie, nextTimeStep, 0, NULL, This->movie_time, 1, &next_time, NULL);

        if (next_time == -1)
        {
            TRACE("No next time\n");
            LeaveCriticalSection(&This->csReceive);
            break;
        }

        tr.value = SInt64ToWide(next_time);
        SetMovieTime(This->pQTMovie, &tr);
        MoviesTask(This->pQTMovie,0);
        QTVisualContextTask(This->vContext);

        TRACE("In loop at time %ld\n",This->movie_time);
        TRACE("In Next time %ld\n",next_time);

        mStart = This->movie_time;
        mStop = next_time;

        time = (float)(This->movie_time - This->movie_start) / This->movie_scale;
        tStart = time * 10000000;
        time = (float)(next_time - This->movie_start) / This->movie_scale;
        tStop = time * 10000000;

        /* Deliver Audio */
        if (This->pAudio_Pin && This->pAudio_Pin->pin.pin.pConnectedTo && This->aSession)
        {
            int data_size=0;
            BYTE* ptr;
            IMediaSample *sample = NULL;
            AudioBufferList aData;
            UInt32 flags;
            UInt32 frames;
            WAVEFORMATEX* pvi;
            float duration;

            pvi = (WAVEFORMATEX*)This->pAudio_Pin->pmt->pbFormat;

            hr = BaseOutputPinImpl_GetDeliveryBuffer(&This->pAudio_Pin->pin, &sample, NULL, NULL, 0);

            if (FAILED(hr))
            {
                ERR("Audio: Unable to get delivery buffer (%x)\n", hr);
                goto audio_error;
            }

            hr = IMediaSample_GetPointer(sample, &ptr);
            if (FAILED(hr))
            {
                ERR("Audio: Unable to get pointer to buffer (%x)\n", hr);
                goto audio_error;
            }

            duration = (float)next_time / This->movie_scale;
            time = (float)This->movie_time / This->movie_scale;
            duration -= time;
            frames = pvi->nSamplesPerSec * duration;
            TRACE("Need audio for %f seconds (%li frames)\n",duration,frames);

            data_size = IMediaSample_GetSize(sample);
            if (data_size < frames * pvi->nBlockAlign)
                FIXME("Audio buffer is too small\n");

            aData.mNumberBuffers = 1;
            aData.mBuffers[0].mNumberChannels = pvi->nChannels;
            aData.mBuffers[0].mDataByteSize = data_size;
            aData.mBuffers[0].mData = ptr;

            err = MovieAudioExtractionFillBuffer(This->aSession, &frames, &aData, &flags);
            if (frames == 0)
            {
                TimeRecord etr;

                /* Ran out of frames, Restart the extraction session */
                TRACE("Restarting extraction session\n");
                MovieAudioExtractionEnd(This->aSession);
                This->aSession = NULL;
                QT_Create_Extract_Session(This);

                etr = tr;
                etr.value = SInt64ToWide(This->movie_time);
                MovieAudioExtractionSetProperty(This->aSession,
                    kQTPropertyClass_MovieAudioExtraction_Movie,
                    kQTMovieAudioExtractionMoviePropertyID_CurrentTime,
                    sizeof(TimeRecord), &etr );

                frames = pvi->nSamplesPerSec * duration;
                aData.mNumberBuffers = 1;
                aData.mBuffers[0].mNumberChannels = pvi->nChannels;
                aData.mBuffers[0].mDataByteSize = data_size;
                aData.mBuffers[0].mData = ptr;

                MovieAudioExtractionFillBuffer(This->aSession, &frames, &aData, &flags);
            }

            TRACE("Got %i frames\n",(int)frames);

            IMediaSample_SetActualDataLength(sample, frames * pvi->nBlockAlign);

            IMediaSample_SetMediaTime(sample, &mStart, &mStop);
            IMediaSample_SetTime(sample, &tStart, &tStop);

            hr = OutputQueue_Receive(This->pAudio_Pin->queue, sample);
            TRACE("Audio Delivered (%x)\n",hr);

audio_error:
            if (sample)
                IMediaSample_Release(sample);
        }
        else
            TRACE("Audio Pin not connected or no Audio\n");

        /* Deliver Video */
        if (This->pVideo_Pin && QTVisualContextIsNewImageAvailable(This->vContext,0))
        {
            err = QTVisualContextCopyImageForTime(This->vContext, NULL, NULL, &pixelBuffer);
            if (err == noErr)
            {
                int data_size=0;
                BYTE* ptr;
                IMediaSample *sample = NULL;

                hr = BaseOutputPinImpl_GetDeliveryBuffer(&This->pVideo_Pin->pin, &sample, NULL, NULL, 0);
                if (FAILED(hr))
                {
                    ERR("Video: Unable to get delivery buffer (%x)\n", hr);
                    goto video_error;
                }

                data_size = IMediaSample_GetSize(sample);
                if (data_size < This->outputSize)
                {
                    ERR("Sample size is too small %d < %d\n", data_size, This->outputSize)
    ;
                    hr = E_FAIL;
                    goto video_error;
                }

                hr = IMediaSample_GetPointer(sample, &ptr);
                if (FAILED(hr))
                {
                    ERR("Video: Unable to get pointer to buffer (%x)\n", hr);
                    goto video_error;
                }

                hr = AccessPixelBufferPixels( pixelBuffer, ptr);
                if (FAILED(hr))
                {
                    ERR("Failed to access Pixels\n");
                    goto video_error;
                }

                IMediaSample_SetActualDataLength(sample, This->outputSize);

                IMediaSample_SetMediaTime(sample, &mStart, &mStop);
                IMediaSample_SetTime(sample, &tStart, &tStop);

                hr = OutputQueue_Receive(This->pVideo_Pin->queue, sample);
                TRACE("Video Delivered (%x)\n",hr);

    video_error:
                if (sample)
                    IMediaSample_Release(sample);
                if (pixelBuffer)
                    CVPixelBufferRelease(pixelBuffer);
            }
        }
        else
            TRACE("No video to deliver\n");

        This->movie_time = next_time;
        LeaveCriticalSection(&This->csReceive);
    } while (hr == S_OK);

    This->state = State_Stopped;
    if (This->pAudio_Pin)
        OutputQueue_EOS(This->pAudio_Pin->queue);
    if (This->pVideo_Pin)
        OutputQueue_EOS(This->pVideo_Pin->queue);

    return hr;
}
/*
 *  PsychQTSetMovieTimeIndex()  -- Set current playback time of movie.
 */
double PsychQTSetMovieTimeIndex(int moviehandle, double timeindex, psych_bool indexIsFrames)
{
    Movie		theMovie;
    double		oldtime;
	long		targetIndex, myIndex;
    short		myFlags;
    TimeValue	myTime;
    OSType		myTypes[1];
    
    if (moviehandle < 0 || moviehandle >= PSYCH_MAX_MOVIES) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided!");
    }
    
    // Fetch references to objects we need:
    theMovie = movieRecordBANK[moviehandle].theMovie;    
    if (theMovie == NULL) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided. No movie associated with this handle !!!");
    }
    
    // Retrieve current timeindex:
    oldtime = (double) GetMovieTime(theMovie, NULL) / (double) GetMovieTimeScale(theMovie);
    
	// Index based or target time based seeking?
	if (indexIsFrames) {
		// Index based seeking:
		
		// Seek to given targetIndex:
		targetIndex = (long) (timeindex + 0.5);

		// We want video samples.
		myTypes[0] = VisualMediaCharacteristic;
		
		// We want to begin with the first frame in the movie:
		myFlags = nextTimeStep + nextTimeEdgeOK;
		
		// Start with iteration at beginning:
		myTime = 0;
		myIndex = -1;
		
		// We iterate until end of movie (myTime < 0) or targetIndex reached:
		while ((myTime >= 0) && (myIndex < targetIndex)) {
			// Increment our index position:
			myIndex++;
			
			// Look for the next frame in the track; when there are no more frames,
			// myTime is set to -1, so we'll exit the while loop
			GetMovieNextInterestingTime(theMovie, myFlags, 1, myTypes, myTime, FloatToFixed(1), &myTime, NULL);

			// after the first interesting time, don't include the time we're currently at
			myFlags = nextTimeStep;
		}    
		
		// Valid time for existing target frame?
		if (myTime >= 0) {
			// Yes. Seek to it:
			SetMovieTimeValue(theMovie, myTime);
		}

		// Done with seek.
	}
	else {
		// Time based seeking:

		// Set new timeindex as time in seconds:
		SetMovieTimeValue(theMovie, (TimeValue) (((timeindex * (double) GetMovieTimeScale(theMovie))) + 0.5f));

		// Done with seek.
	}

    // Check if end of movie is reached. Rewind, if so...
    if (IsMovieDone(theMovie) && movieRecordBANK[moviehandle].loopflag > 0) {
        if (GetMovieRate(theMovie) > 0) {
            GoToBeginningOfMovie(theMovie);
        } else {
            GoToEndOfMovie(theMovie);
        }
    }

	// Yield some processing time to Quicktime to update properly:
    MoviesTask(theMovie, 0);
    
    // Return old time value of previous position:
    return(oldtime);
}
/*
 *  PsychQTGetTextureFromMovie() -- Create an OpenGL texture map from a specific videoframe from given movie object.
 *
 *  win = Window pointer of onscreen window for which a OpenGL texture should be created.
 *  moviehandle = Handle to the movie object.
 *  checkForImage = true == Just check if new image available, false == really retrieve the image, blocking if necessary.
 *  timeindex = When not in playback mode, this allows specification of a requested frame by presentation time.
 *              If set to -1, or if in realtime playback mode, this parameter is ignored and the next video frame is returned.
 *  out_texture = Pointer to the Psychtoolbox texture-record where the new texture should be stored.
 *  presentation_timestamp = A ptr to a double variable, where the presentation timestamp of the returned frame should be stored.
 *
 *  Returns true (1) on success, false (0) if no new image available, -1 if no new image available and there won't be any in future.
 */
int PsychQTGetTextureFromMovie(PsychWindowRecordType *win, int moviehandle, int checkForImage, double timeindex, PsychWindowRecordType *out_texture, double *presentation_timestamp)
{
	static TimeValue myNextTimeCached = -2;
	static TimeValue nextFramesTimeCached = -2;
    TimeValue		myCurrTime;
    TimeValue		myNextTime;
    TimeValue       nextFramesTime=0;
    short		myFlags;
    OSType		myTypes[1];
    OSErr		error = noErr;
    Movie               theMovie;
    CVOpenGLTextureRef newImage = NULL;
    QTVisualContextRef  theMoviecontext;
    unsigned int failcount=0;
    float lowerLeft[2];
    float lowerRight[2];    
    float upperRight[2];    
    float upperLeft[2];
    GLuint texid;
    Rect rect;
    float rate;
    double targetdelta, realdelta, frames;
	PsychRectType outRect;

    if (!PsychIsOnscreenWindow(win)) {
        PsychErrorExitMsg(PsychError_user, "Need onscreen window ptr!!!");
    }
    
    // Activate OpenGL context of target window:
    PsychSetGLContext(win);

    // Explicitely disable Apple's Client storage extensions. For now they are not really useful to us.
    glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_FALSE);
    
    if (moviehandle < 0 || moviehandle >= PSYCH_MAX_MOVIES) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided.");
    }
    
    if ((timeindex!=-1) && (timeindex < 0 || timeindex >= 10000.0)) {
        PsychErrorExitMsg(PsychError_user, "Invalid timeindex provided.");
    }
    
    if (NULL == out_texture && !checkForImage) {
        PsychErrorExitMsg(PsychError_internal, "NULL-Ptr instead of out_texture ptr passed!!!");
    }
    
    // Fetch references to objects we need:
    theMovie = movieRecordBANK[moviehandle].theMovie;
    theMoviecontext = movieRecordBANK[moviehandle].QTMovieContext;

    if (theMovie == NULL) {
        PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided. No movie associated with this handle.");
    }

    // Check if end of movie is reached. Rewind, if so...
    if (IsMovieDone(theMovie) && movieRecordBANK[moviehandle].loopflag > 0) {
        if (GetMovieRate(theMovie)>0) {
            GoToBeginningOfMovie(theMovie);
        } else {
            GoToEndOfMovie(theMovie);
        }
    }
    
    // Is movie actively playing (automatic async playback, possibly with synced sound)?
    // If so, then we ignore the 'timeindex' parameter, because the automatic playback
    // process determines which frames should be delivered to PTB when. This function will
    // simply wait or poll for arrival/presence of a new frame that hasn't been fetched
    // in previous calls.
    if (0 == GetMovieRate(theMovie)) {
        // Movie playback inactive. We are in "manual" mode: No automatic async playback,
        // no synced audio output. The user just wants to manually fetch movie frames into
        // textures for manual playback in a standard Matlab-loop.

		// First pass - checking for new image?
		if (checkForImage) {
			// Image for specific point in time requested?
			if (timeindex >= 0) {
				// Yes. We try to retrieve the next possible image for requested timeindex.
				myCurrTime = (TimeValue) ((timeindex * (double) GetMovieTimeScale(theMovie)) + 0.5f);
			}
			else {
				// No. We just retrieve the next frame, given the current movie time.
				myCurrTime = GetMovieTime(theMovie, NULL);
			}
            
			// Retrieve timeindex of the closest image sample after myCurrTime:
			myFlags = nextTimeStep + nextTimeEdgeOK;	// We want the next frame in the movie's media.
			myTypes[0] = VisualMediaCharacteristic;		// We want video samples.
			GetMovieNextInterestingTime(theMovie, myFlags, 1, myTypes, myCurrTime, FloatToFixed(1), &myNextTime, &nextFramesTime);
			error = GetMoviesError();
			if (error != noErr) {
				PsychErrorExitMsg(PsychError_internal, "Failed to fetch texture from movie for given timeindex!");
			}
			
			// Found useful event?
			if (myNextTime == -1) {
				if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-WARNING: Bogus timevalue in movie track for movie %i. Trying to keep going.\n", moviehandle);
				
				// No. Just push timestamp to current time plus a little bit in the hope
				// this will get us unstuck:
				myNextTime = myCurrTime + (TimeValue) 1;
				nextFramesTime = (TimeValue) 0;
			}
			
			if (myNextTime != myNextTimeCached) {
				// Set movies current time to myNextTime, so the next frame will be fetched from there:
				SetMovieTimeValue(theMovie, myNextTime);
				
				// nextFramesTime is the timeindex to which we need to advance for retrieval of next frame: (see code below)
				nextFramesTime=myNextTime + nextFramesTime;
				
				if (PsychPrefStateGet_Verbosity() > 5) printf("PTB-DEBUG: Current timevalue in movie track for movie %i is %lf secs.\n", moviehandle, (double) myNextTime / (double) GetMovieTimeScale(theMovie));
				if (PsychPrefStateGet_Verbosity() > 5) printf("PTB-DEBUG: Next timevalue in movie track for movie %i is %lf secs.\n", moviehandle, (double) nextFramesTime / (double) GetMovieTimeScale(theMovie));
				
				// Cache values for 2nd pass:
				myNextTimeCached = myNextTime;
				nextFramesTimeCached = nextFramesTime;
			}
			else {
				// Somehow got stuck? Do nothing...
				if (PsychPrefStateGet_Verbosity() > 5) printf("PTB-DEBUG: Seem to be a bit stuck at timevalue [for movie %i] of %lf secs. Nudging a bit forward...\n", moviehandle, (double) myNextTime / (double) GetMovieTimeScale(theMovie));
				// Nudge the timeindex a bit forware in the hope that this helps:
				SetMovieTimeValue(theMovie, GetMovieTime(theMovie, NULL) + 1);
			}
		}
		else {
			// This is the 2nd pass: Image fetching. Use cached values from first pass:
			// Caching in a static works because we're always called immediately for 2nd
			// pass after successfull return from 1st pass, and we're not multi-threaded,
			// i.e., don't need to be reentrant or thread-safe here:
			myNextTime = myNextTimeCached;
			nextFramesTime = nextFramesTimeCached;
			myNextTimeCached = -2;
		}
	}
    else {
        // myNextTime unavailable if in autoplayback-mode:
        myNextTime=-1;
    }
    
    // Presentation timestamp requested?
    if (presentation_timestamp) {
        // Already available?
        if (myNextTime==-1) {
            // Retrieve the exact presentation timestamp of the retrieved frame (in movietime):
            myFlags = nextTimeStep + nextTimeEdgeOK;            // We want the next frame in the movie's media.
            myTypes[0] = VisualMediaCharacteristic;		// We want video samples.
                                                                // We search backward for the closest available image for the current time. Either we get the current time
                                                                // if we happen to fetch a frame exactly when it becomes ready, or we get a bit earlier timestamp, which is
                                                                // the optimal presentation timestamp for this frame:
            GetMovieNextInterestingTime(theMovie, myFlags, 1, myTypes, GetMovieTime(theMovie, NULL), FloatToFixed(-1), &myNextTime, NULL);
        }
        // Convert pts (in Quicktime ticks) to pts in seconds since start of movie and return it:
        *presentation_timestamp = (double) myNextTime / (double) GetMovieTimeScale(theMovie);
    }

    // Allow quicktime visual context task to do its internal bookkeeping and cleanup work:
    if (theMoviecontext) QTVisualContextTask(theMoviecontext);

    // Perform decompress-operation:
    if (checkForImage) MoviesTask(theMovie, 0);
    
    // Should we just check for new image? If so, just return availability status:
    if (checkForImage) {
        if (PSYCH_USE_QT_GWORLDS) {
            // We use GWorlds. In this case we either suceed immediately due to the
            // synchronous nature of GWorld rendering, or we fail completely at end
            // of non-looping movie:
            if (IsMovieDone(theMovie) && movieRecordBANK[moviehandle].loopflag == 0) {
                // No new frame available and there won't be any in the future, because this is a non-looping
                // movie that has reached its end.
                return(-1);
            }
            
            // Is this the special case of a movie without video, but only sound? In that case,
			// we always return a 'false' because there ain't no image to return.
			if (movieRecordBANK[moviehandle].QTMovieGWorld == NULL) return(false);
			
			// Success!
            return(true);
        }
        
        // Code which uses QTVisualContextTasks...
        if (QTVisualContextIsNewImageAvailable(theMoviecontext, NULL)) {
            // New frame ready!
            return(true);
        }
        else if (IsMovieDone(theMovie) && movieRecordBANK[moviehandle].loopflag == 0) {
            // No new frame available and there won't be any in the future, because this is a non-looping
            // movie that has reached its end.
            return(-1);
        }
        else {
            // No new frame available yet:
            return(false);
        }
    }
    
    if (!PSYCH_USE_QT_GWORLDS) {
        // Blocking wait-code for non-GWorld mode:
        // Try up to 1000 iterations for arrival of requested image data in wait-mode:
        failcount=0;
        while ((failcount < 1000) && !QTVisualContextIsNewImageAvailable(theMoviecontext, NULL)) {
            PsychWaitIntervalSeconds(0.005);
            MoviesTask(theMovie, 0);
            failcount++;
        }
        
        // No new frame available and there won't be any in the future, because this is a non-looping
        // movie that has reached its end.
        if ((failcount>=1000) && IsMovieDone(theMovie) && (movieRecordBANK[moviehandle].loopflag == 0)) {
            return(-1);
        }
        
        // Fetch new OpenGL texture with the new movie image frame:
        error = QTVisualContextCopyImageForTime(theMoviecontext, kCFAllocatorDefault, NULL, &newImage);
        if ((error!=noErr) || newImage == NULL) {
            PsychErrorExitMsg(PsychError_internal, "OpenGL<->Quicktime texture fetch failed!!!");
        }
    
        // Disable client storage, if it was enabled:
        glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_FALSE);
        
        // Build a standard PTB texture record:    
        CVOpenGLTextureGetCleanTexCoords (newImage, lowerLeft, lowerRight, upperRight, upperLeft);
        texid = CVOpenGLTextureGetName(newImage);
        
        // Assign texture rectangle:
        PsychMakeRect(outRect, upperLeft[0], upperLeft[1], lowerRight[0], lowerRight[1]);    
        
        // Set texture orientation as if it were an inverted Offscreen window: Upside-down.
        out_texture->textureOrientation = (CVOpenGLTextureIsFlipped(newImage)) ? 3 : 4;

        // Assign OpenGL texture id:
        out_texture->textureNumber = texid;
        
        // Store special texture object as part of the PTB texture record:
        out_texture->targetSpecific.QuickTimeGLTexture = newImage;
    }
    else {
        // Synchronous texture fetch code for GWorld rendering mode:
        // At this point, the GWorld should contain the source image for creating a
        // standard OpenGL texture:
        
        // Disable client storage, if it was enabled:
        glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_FALSE);
        
        // Build a standard PTB texture record:    

        // Assign texture rectangle:
        GetMovieBox(theMovie, &rect);

        // Hack: Need to extend rect by 4 pixels, because GWorlds are 4 pixels-aligned via
        // image row padding:
        rect.right = rect.right + 4;
        PsychMakeRect(out_texture->rect, rect.left, rect.top, rect.right, rect.bottom);    
        
        // Set NULL - special texture object as part of the PTB texture record:
        out_texture->targetSpecific.QuickTimeGLTexture = NULL;

        // Set texture orientation as if it were an inverted Offscreen window: Upside-down.
        out_texture->textureOrientation = 3;
        
        // Setup a pointer to our GWorld as texture data pointer:
        out_texture->textureMemorySizeBytes = 0;

		// Quicktime textures are aligned on 4 Byte boundaries:
		out_texture->textureByteAligned = 4;

        // Lock GWorld:
        if(!LockPixels(GetGWorldPixMap(movieRecordBANK[moviehandle].QTMovieGWorld))) {
            // Locking surface failed! We abort.
            PsychErrorExitMsg(PsychError_internal, "PsychQTGetTextureFromMovie(): Locking GWorld pixmap surface failed!!!");
        }
        
        // This will retrieve an OpenGL compatible pointer to the GWorlds pixel data and assign it to our texmemptr:
        out_texture->textureMemory = (GLuint*) GetPixBaseAddr(GetGWorldPixMap(movieRecordBANK[moviehandle].QTMovieGWorld));
            
        // Let PsychCreateTexture() do the rest of the job of creating, setting up and
        // filling an OpenGL texture with GWorlds content:
        PsychCreateTexture(out_texture);
        
        // Undo hack from above after texture creation: Now we need the real width of the
        // texture for proper texture coordinate assignments in drawing code et al.
        rect.right = rect.right - 4;
        PsychMakeRect(outRect, rect.left, rect.top, rect.right, rect.bottom);    

        // Unlock GWorld surface. We do a glFinish() before, for safety reasons...
        //glFinish();
        UnlockPixels(GetGWorldPixMap(movieRecordBANK[moviehandle].QTMovieGWorld));

        // Ready to use the texture... We're done.
    }
    
	// Normalize texture rectangle and assign it:
	PsychNormalizeRect(outRect, out_texture->rect);
	
    rate = FixedToFloat(GetMovieRate(theMovie));
    
    // Detection of dropped frames: This is a heuristic. We'll see how well it works out...
    if (rate && presentation_timestamp) {
        // Try to check for dropped frames in playback mode:

        // Expected delta between successive presentation timestamps:
        targetdelta = 1.0f / (movieRecordBANK[moviehandle].fps * rate);

        // Compute real delta, given rate and playback direction:
        if (rate>0) {
            realdelta = *presentation_timestamp - movieRecordBANK[moviehandle].last_pts;
            if (realdelta<0) realdelta = 0;
        }
        else {
            realdelta = -1.0 * (*presentation_timestamp - movieRecordBANK[moviehandle].last_pts);
            if (realdelta<0) realdelta = 0;
        }
        
        frames = realdelta / targetdelta;
        // Dropped frames?
        if (frames > 1 && movieRecordBANK[moviehandle].last_pts>=0) {
            movieRecordBANK[moviehandle].nr_droppedframes += (int) (frames - 1 + 0.5);
        }

        movieRecordBANK[moviehandle].last_pts = *presentation_timestamp;
    }
    
    // Manually advance movie time, if in fetch mode:
    if (0 == GetMovieRate(theMovie)) {
        // We are in manual fetch mode: Need to manually advance movie time to next
        // media sample:
		if (nextFramesTime == myNextTime) {
			// Invalid value? Try to hack something that gets us unstuck:
			myNextTime = GetMovieTime(theMovie, NULL);
			nextFramesTime = myNextTime + (TimeValue) 1;
		}

        SetMovieTimeValue(theMovie, nextFramesTime);        
    }
    
    // Check if end of movie is reached. Rewind, if so...
    if (IsMovieDone(theMovie) && movieRecordBANK[moviehandle].loopflag > 0) {
        if (GetMovieRate(theMovie)>0) {
            GoToBeginningOfMovie(theMovie);
        } else {
            GoToEndOfMovie(theMovie);
        }
    }

    return(TRUE);
}
Beispiel #13
0
void mexFunction(int nlhs, mxArray* plhs[], int nrhs, const mxArray* prhs[]) {

  if (nlhs < 0) { mexErrMsgTxt("Too few output arguments."); return; }
  if (nlhs > 1) { mexErrMsgTxt("Too many output arguments."); return; }
  if (nrhs < 1) { mexErrMsgTxt("Too few input arguments."); return; }
  if (nrhs > 3) { mexErrMsgTxt("Too many input arguments."); return; }

  TimeValue duration;
  TimeRecord myTimeRecord;
  Rect bounds;
  OSErr result = 0;
  short resRefNum = -1;
  short actualResId = DoTheRightThing;
  FSSpec theFSSpec;
  GWorldPtr offWorld;
  Movie theMovie = nil;
  MovieController thePlayer = nil;
  MovieDrawingCompleteUPP myDrawCompleteProc;
  long frame_end;
  long myStep = 1;
  char location[PATH_BUFFER_SIZE];
  long frame_count;
  mwSize cdims[2];

  mxGetString(prhs[0], location, PATH_BUFFER_SIZE);

  if (nrhs > 2) {
    frame_start = rint(mxGetScalar(prhs[1]));
    frame_end = rint(mxGetScalar(prhs[2]));
  } else if (nrhs > 1) {
    frame_start = 1;
    frame_end = rint(mxGetScalar(prhs[1]));
  } else {
    frame_start = 1;
    frame_end = 0;
  }

  if (frame_start < 1) {
    mexErrMsgTxt("Error: the starting frame must be positive\n"); 
    return; 
  }

  if (frame_end < 0) {
    mexErrMsgTxt("Error: the ending frame must be positive\n"); 
    return; 
  }

  if (frame_end != 0 && frame_end < frame_start) {
    mexErrMsgTxt("Error: the ending frame must not be less than the starting frame\n"); 
    return; 
  }

  myDrawCompleteProc = NewMovieDrawingCompleteUPP(DrawCompleteProc);

  EnterMovies();            

  if (NativePathNameToFSSpec(location, &theFSSpec, 0) ||
      OpenMovieFile(&theFSSpec, &resRefNum, 0) ||
      NewMovieFromFile(&theMovie, resRefNum, &actualResId, 0, 0, 0)) {
    mexErrMsgTxt("Error: failed to open movie\n"); 
    return; 
  }

  if (resRefNum != -1) CloseMovieFile(resRefNum);

  GetMovieBox(theMovie, &bounds);
  QTNewGWorld(&offWorld, k32ARGBPixelFormat, &bounds, NULL, NULL, 0);
  LockPixels(GetGWorldPixMap(offWorld));
  SetGWorld(offWorld, NULL);

  thePlayer = NewMovieController(theMovie, &bounds, mcTopLeftMovie | mcNotVisible);
  SetMovieGWorld(theMovie, offWorld, NULL);
  SetMovieActive(theMovie, true);
  SetMovieDrawingCompleteProc(theMovie, movieDrawingCallWhenChanged, myDrawCompleteProc, (long) offWorld);
  GetMovieTime(theMovie, &myTimeRecord);
  duration = GetMovieDuration(theMovie);

  // Compute the number of frames for allocation of output structure
  frame_count = 0;
  while ((frame_end == 0 || frame_count < frame_end) && GetMovieTime(theMovie, NULL) < duration) {
    frame_count++;
    MCDoAction(thePlayer, mcActionStep, (Ptr) myStep);
  }
  SetMovieTime(theMovie, &myTimeRecord);
  
  // Ignore frames greater than those in the file
  if (frame_end == 0 || frame_count < frame_end) frame_end = frame_count;
  
  cdims[0] = frame_end - frame_start + 1; // Indices are one-based
  cdims[1] = 1; 

  plhs[0] = mxCreateCellArray(2, cdims);

  // Step through the movie and save the frame when in the chosen interval
  // Note: the step size seems to be handled as a short internally. 
  //       Using anything greater than 32758 will seek to an incorrect frame
  frame_num = 1;
  while (frame_num <= frame_end) {
    MCDoAction(thePlayer, mcActionStep, (Ptr) myStep);
    if (frame_num >= frame_start) {
      MCIdle(thePlayer);
      mxSetCell(plhs[0], frame_num - frame_start, framedata);
    }
    frame_num++;
  }

  UnlockPixels(GetGWorldPixMap (offWorld));
  DisposeGWorld(offWorld);
  DisposeMovieController (thePlayer);
  DisposeMovie(theMovie);
  DisposeMovieDrawingCompleteUPP(myDrawCompleteProc);
  ExitMovies();

  return;
}
Beispiel #14
0
int quicktime_player::get_time() {
    return GetMovieTime(m->movie, NULL);
}
void QTCmpr_CompressSequence (WindowObject theWindowObject)
{
	ComponentInstance			myComponent = NULL;
	GWorldPtr					myImageWorld = NULL;		// the graphics world we draw the images in
	PixMapHandle				myPixMap = NULL;
	Movie						mySrcMovie = NULL;
	Track						mySrcTrack = NULL;
	Movie						myDstMovie = NULL;
	Track						myDstTrack = NULL;
	Media						myDstMedia = NULL;
	Rect						myRect;
	PicHandle					myPicture = NULL;
	CGrafPtr					mySavedPort = NULL;
	GDHandle					mySavedDevice = NULL;
	SCTemporalSettings			myTimeSettings;
	SCDataRateSettings			myRateSettings;
	FSSpec						myFile;
	Boolean						myIsSelected = false;
	Boolean						myIsReplacing = false;	
	short						myRefNum = -1;
	StringPtr 					myMoviePrompt = QTUtils_ConvertCToPascalString(kQTCSaveMoviePrompt);
	StringPtr 					myMovieFileName = QTUtils_ConvertCToPascalString(kQTCSaveMovieFileName);
	MatrixRecord				myMatrix;
	ImageDescriptionHandle		myImageDesc = NULL;
	TimeValue					myCurMovieTime = 0L;
	TimeValue					myOrigMovieTime = 0L;		// current movie time, when compression is begun
	short						myFrameNum;		
	long						myFlags = 0L;
	long						myNumFrames = 0L;
	long						mySrcMovieDuration = 0L;	// duration of source movie
	OSErr						myErr = noErr;
#if USE_ASYNC_COMPRESSION
	ICMCompletionProcRecord		myICMComplProcRec;
	ICMCompletionProcRecordPtr	myICMComplProcPtr = NULL;
	OSErr						myICMComplProcErr = noErr;

	myICMComplProcRec.completionProc = NULL;
	myICMComplProcRec.completionRefCon = 0L;
#endif

	if (theWindowObject == NULL)
		goto bail;

	//////////
	//
	// get the movie and the first video track in the movie
	//
	//////////
	
	mySrcMovie = (**theWindowObject).fMovie;
	if (mySrcMovie == NULL)
		goto bail;

	mySrcTrack = GetMovieIndTrackType(mySrcMovie, 1, VideoMediaType, movieTrackMediaType);
	if (mySrcTrack == NULL)
		goto bail;
	
	// stop the movie; we don't want it to be playing while we're (re)compressing it
	SetMovieRate(mySrcMovie, (Fixed)0L);

	// get the current movie time, when compression is begun; we'll restore this later
	myOrigMovieTime = GetMovieTime(mySrcMovie, NULL);

	//////////
	//
	// configure and display the Standard Image Compression dialog box
	//
	//////////
	
	// open an instance of the Standard Image Compression dialog component
	myComponent = OpenDefaultComponent(StandardCompressionType, StandardCompressionSubType);
	if (myComponent == NULL)
		goto bail;

	// turn off "best depth" option in the compression dialog, because all of our
	// buffering is done at 32-bits (regardless of the depth of the source data)
	//
	// a more ambitious approach would be to loop through each of the video sample
	// descriptions in each of the video tracks looking for the deepest depth, and
	// using that for the best depth; better yet, we could find out which compressors
	// were used and set one of those as the default in the compression dialog
	SCGetInfo(myComponent, scPreferenceFlagsType, &myFlags);
	myFlags &= ~scShowBestDepth;
	SCSetInfo(myComponent, scPreferenceFlagsType, &myFlags);

	// because we are recompressing a movie that may have a variable frame rate,
	// we want to allow the user to leave the frame rate text field blank (in which
	// case we can preserve the frame durations of the source movie); if the user
	// enters a number, we will resample the movie at a new frame rate; if we don't
	// clear this flag, the compression dialog will not allow zero in the frame rate field
	//
	// NOTE: we could have set this flag above when we cleared the scShowBestDepth flag;
	// it is done here for clarity.	
	SCGetInfo(myComponent, scPreferenceFlagsType, &myFlags);
	myFlags |= scAllowZeroFrameRate;
	SCSetInfo(myComponent, scPreferenceFlagsType, &myFlags);

	// get the number of video frames in the movie
	myNumFrames = QTUtils_GetFrameCount(mySrcTrack);

	// get the bounding rectangle of the movie, create a 32-bit GWorld with those
	// dimensions, and draw the movie poster picture into it; this GWorld will be
	// used for the test image in the compression dialog box and for rendering movie
	// frames
	myPicture = GetMoviePosterPict(mySrcMovie);
	if (myPicture == NULL)
		goto bail;
		
	GetMovieBox(mySrcMovie, &myRect);

	myErr = NewGWorld(&myImageWorld, 32, &myRect, NULL, NULL, 0L);
	if (myErr != noErr)
		goto bail;
		
	// get the pixmap of the GWorld; we'll lock the pixmap, just to be safe
	myPixMap = GetGWorldPixMap(myImageWorld);
	if (!LockPixels(myPixMap))
		goto bail;

	// draw the movie poster image into the GWorld
	GetGWorld(&mySavedPort, &mySavedDevice);
	SetGWorld(myImageWorld, NULL);
	EraseRect(&myRect);
	DrawPicture(myPicture, &myRect);
	KillPicture(myPicture);
	SetGWorld(mySavedPort, mySavedDevice);

	// set the picture to be displayed in the dialog box; passing NULL for the rect
	// means use the entire image; passing 0 for the flags means to use the default
	// system method of displaying the test image, which is currently a combination
	// of cropping and scaling; personally, I prefer scaling (your mileage may vary)
	SCSetTestImagePixMap(myComponent, myPixMap, NULL, scPreferScaling);

	// install the custom procs, if requested
	// we can install two kinds of custom procedures for use in connection with
	// the standard dialog box: (1) a modal-dialog filter function, and (2) a hook
	// function to handle the custom button in the dialog box
	if (gUseExtendedProcs)
		QTCmpr_InstallExtendedProcs(myComponent, (long)myPixMap);
	
	// set up some default settings for the compression dialog
	SCDefaultPixMapSettings(myComponent, myPixMap, true);
	
	// clear out the default frame rate chosen by Standard Compression (a frame rate
	// of 0 means to use the rate of the source movie)
	myErr = SCGetInfo(myComponent, scTemporalSettingsType, &myTimeSettings);
	if (myErr != noErr)
		goto bail;

	myTimeSettings.frameRate = 0;
	SCSetInfo(myComponent, scTemporalSettingsType, &myTimeSettings);

	// request image compression settings from the user; in other words, put up the dialog box
	myErr = SCRequestSequenceSettings(myComponent);
	if (myErr == scUserCancelled)
		goto bail;

	// get a copy of the temporal settings the user entered; we'll need them for some
	// of our calculations (in a simpler application, we'd never have to look at them)	
	SCGetInfo(myComponent, scTemporalSettingsType, &myTimeSettings);

	//////////
	//
	// adjust the data rate [to be supplied][relevant only for movies that have sound tracks]
	//
	//////////

	
	//////////
	//
	// adjust the sample count
	//
	// if the user wants to resample the frame rate of the movie (as indicated a non-zero
	// value in the frame rate field) calculate the number of frames and duration for the new movie
	//
	//////////
	
	if (myTimeSettings.frameRate != 0) {
		long	myDuration = GetMovieDuration(mySrcMovie);
		long	myTimeScale = GetMovieTimeScale(mySrcMovie);
		float	myFloat = (float)myDuration * myTimeSettings.frameRate;
		
		myNumFrames = myFloat / myTimeScale / 65536;
		if (myNumFrames == 0)
			myNumFrames = 1;
	}

	//////////
	//
	// get the name and location of the new movie file
	//
	//////////

	// prompt the user for a file to put the compressed image into; in theory, the name
	// should have a file extension appropriate to the type of compressed data selected by the user;
	// this is left as an exercise for the reader
	QTFrame_PutFile(myMoviePrompt, myMovieFileName, &myFile, &myIsSelected, &myIsReplacing);
	if (!myIsSelected)
		goto bail;

	// delete any existing file of that name
	if (myIsReplacing) {
		myErr = DeleteMovieFile(&myFile);
		if (myErr != noErr)
			goto bail;
	}
		
	//////////
	//
	// create the target movie
	//
	//////////
	
	myErr = CreateMovieFile(&myFile, sigMoviePlayer, smSystemScript, 
								createMovieFileDeleteCurFile | createMovieFileDontCreateResFile, &myRefNum, &myDstMovie);
	if (myErr != noErr)
		goto bail;
	
	// create a new video movie track with the same dimensions as the entire source movie
	myDstTrack = NewMovieTrack(myDstMovie,
								(long)(myRect.right - myRect.left) << 16,
								(long)(myRect.bottom - myRect.top) << 16, kNoVolume);
	if (myDstTrack == NULL)
		goto bail;
	
	// create a media for the new track with the same time scale as the source movie;
	// because the time scales are the same, we don't have to do any time scale conversions.
	myDstMedia = NewTrackMedia(myDstTrack, VIDEO_TYPE, GetMovieTimeScale(mySrcMovie), 0, 0);
	if (myDstMedia == NULL)
		goto bail;
	
	// copy the user data and settings from the source to the dest movie
	CopyMovieSettings(mySrcMovie, myDstMovie);
	
	// set movie matrix to identity and clear the movie clip region (because the conversion
	// process transforms and composites all video tracks into one untransformed video track)
	SetIdentityMatrix(&myMatrix);
	SetMovieMatrix(myDstMovie, &myMatrix);
	SetMovieClipRgn(myDstMovie, NULL);
	
	// set the movie to highest quality imaging
	SetMoviePlayHints(mySrcMovie, hintsHighQuality, hintsHighQuality);

	myImageDesc = (ImageDescriptionHandle)NewHandleClear(sizeof(ImageDescription));
	if (myImageDesc == NULL)
		goto bail;

	// prepare for adding frames to the movie
	myErr = BeginMediaEdits(myDstMedia);
	if (myErr != noErr)
		goto bail;

	//////////
	//
	// compress the image sequence
	//
	// we are going to step through the source movie, compress each frame, and then add
	// the compressed frame to the destination movie
	//
	//////////
	
	myErr = SCCompressSequenceBegin(myComponent, myPixMap, NULL, &myImageDesc);
	if (myErr != noErr)
		goto bail;
	
#if USE_ASYNC_COMPRESSION
	myFlags = codecFlagUpdatePrevious + codecFlagUpdatePreviousComp + codecFlagLiveGrab;
	SCSetInfo(myComponent, scCodecFlagsType, &myFlags);
#endif

	// clear out our image GWorld and set movie to draw into it
	SetGWorld(myImageWorld, NULL);
	EraseRect(&myRect);
	SetMovieGWorld(mySrcMovie, myImageWorld, GetGWorldDevice(myImageWorld));

	// set current time value to beginning of the source movie
	myCurMovieTime = 0;

	// get a value we'll need inside the loop
	mySrcMovieDuration = GetMovieDuration(mySrcMovie);

	// loop through all of the interesting times we counted above
	for (myFrameNum = 0; myFrameNum < myNumFrames; myFrameNum++) {
		short			mySyncFlag;
		TimeValue		myDuration;
		long			myDataSize;
		Handle			myCompressedData;

		//////////
		//
		// get the next frame of the source movie
		//
		//////////
		
		// if we are resampling the movie, step to the next frame
		if (myTimeSettings.frameRate) {
			myCurMovieTime = myFrameNum * mySrcMovieDuration / (myNumFrames - 1);
			myDuration = mySrcMovieDuration / myNumFrames;
		} else {
			OSType		myMediaType = VIDEO_TYPE;
			
			myFlags = nextTimeMediaSample;

			// if this is the first frame, include the frame we are currently on		
			if (myFrameNum == 0)
				myFlags |= nextTimeEdgeOK;
			
			// if we are maintaining the frame durations of the source movie,
			// skip to the next interesting time and get the duration for that frame
			GetMovieNextInterestingTime(mySrcMovie, myFlags, 1, &myMediaType, myCurMovieTime, 0, &myCurMovieTime, &myDuration);
		}
		
		SetMovieTimeValue(mySrcMovie, myCurMovieTime);
		MoviesTask(mySrcMovie, 0);
		MoviesTask(mySrcMovie, 0);
		MoviesTask(mySrcMovie, 0);

		// if data rate constraining is being done, tell Standard Compression the
		// duration of the current frame in milliseconds; we only need to do this
		// if the frames have variable durations
		if (!SCGetInfo(myComponent, scDataRateSettingsType, &myRateSettings)) {
			myRateSettings.frameDuration = myDuration * 1000 / GetMovieTimeScale(mySrcMovie);
			SCSetInfo(myComponent, scDataRateSettingsType, &myRateSettings);
		}

		//////////
		//
		// compress the current frame of the source movie and add it to the destination movie
		//
		//////////
		
		// if SCCompressSequenceFrame completes successfully, myCompressedData will hold
		// a handle to the newly-compressed image data and myDataSize will be the size of
		// the compressed data (which will usually be different from the size of the handle);
		// also mySyncFlag will be a value that that indicates whether or not the frame is a
		// key frame (and which we pass directly to AddMediaSample); note that we do not need
		// to dispose of myCompressedData, since SCCompressSequenceEnd will do that for us
#if !USE_ASYNC_COMPRESSION
		myErr = SCCompressSequenceFrame(myComponent, myPixMap, &myRect, &myCompressedData, &myDataSize, &mySyncFlag);
		if (myErr != noErr)
			goto bail;
#else
		if (myICMComplProcPtr == NULL) {
			myICMComplProcRec.completionProc = NewICMCompletionProc(QTCmpr_CompletionProc);
			myICMComplProcRec.completionRefCon = (long)&myICMComplProcErr;
			myICMComplProcPtr = &myICMComplProcRec;
		}
		
		myICMComplProcErr = kAsyncDefaultValue;
		
		myErr = SCCompressSequenceFrameAsync(myComponent, myPixMap, &myRect, &myCompressedData, &myDataSize, &mySyncFlag, myICMComplProcPtr);
		if (myErr != noErr)
			goto bail;

		// spin our wheels while we're waiting for the compress call to complete
		while (myICMComplProcErr == kAsyncDefaultValue) {
			EventRecord			myEvent;
			
			WaitNextEvent(0, &myEvent, 60, NULL);
			SCAsyncIdle(myComponent);
		}
		myErr = myICMComplProcErr;
#endif

		myErr = AddMediaSample(myDstMedia, myCompressedData, 0, myDataSize, myDuration, (SampleDescriptionHandle)myImageDesc, 1, mySyncFlag, NULL);
		if (myErr != noErr)
			goto bail;
	}
	
	// close the compression sequence; this will dispose of the image description
	// and compressed data handles allocated by SCCompressSequenceBegin
	SCCompressSequenceEnd(myComponent);

	//////////
	//
	// add the media data to the destination movie
	//
	//////////
	
	myErr = EndMediaEdits(myDstMedia);
	if (myErr != noErr)
		goto bail;
	
	InsertMediaIntoTrack(myDstTrack, 0, 0, GetMediaDuration(myDstMedia), fixed1);

	// add the movie resource to the dst movie file.
	myErr = AddMovieResource(myDstMovie, myRefNum, NULL, NULL);
	if (myErr != noErr)
		goto bail;

	// flatten the movie data [to be supplied]
	
	// close the movie file
	CloseMovieFile(myRefNum);
	
bail:
	// close the Standard Compression component
	if (myComponent != NULL)
		CloseComponent(myComponent);

	if (mySrcMovie != NULL) {
		// restore the source movie's original graphics port and device
		SetMovieGWorld(mySrcMovie, mySavedPort, mySavedDevice);

		// restore the source movie's original movie time
		SetMovieTimeValue(mySrcMovie, myOrigMovieTime);
	}
	
	// restore the original graphics port and device
	SetGWorld(mySavedPort, mySavedDevice);

	// delete the GWorld we were drawing frames into
	if (myImageWorld != NULL)
		DisposeGWorld(myImageWorld);
	
#if USE_ASYNC_COMPRESSION
	if (myICMComplProcRec.completionProc != NULL)
		DisposeICMCompletionUPP(myICMComplProcRec.completionProc);
#endif

	free(myMoviePrompt);
	free(myMovieFileName);
}