//-----------------------------------------------------------------------------
void ofxQtVideoSaver::finishMovie(){

	if (!bSetupForRecordingMovie) return;
	
	bSetupForRecordingMovie = false;
	
    
    EndMediaEdits (media);             /* Inform the Movie Toolbox that they */
                                       /*   can close the media container.   */


	/*  Step 5:  Insert a reference into the track that specifies which of the
    media samples to play and when to start playing them. 
    ======================================================================  */
    
    InsertMediaIntoTrack 
      (
      track,                           /* the track to update.               */
      0,                               /* time in track where the specified  */
                                       /*   media samples should start playg */
                                       /*   using movie time scale.          */
      0,                               /* time in media samples of the first */
                                       /*   sample to play using media time  */
                                       /*   scale.                           */
      GetMediaDuration (media),        /* duration of media samples to play  */
                                       /*   using media time scale.          */
      1L<<16 //fixed1                  /* rate at which to play the samples. */
      );


/*  Step 6:  Append the movie atom to the movie file (AddMovieResource).
    ====================================================================  */
    
    sResId = movieInDataForkResID;
    osErr = AddMovieResource
      (
      movie,                           /* movie to create moov atom from     */
      sResRefNum,                      /* file to receive the moov atom      */
      &sResId,                         /* id num of movie resource (res fork)*/
      (unsigned char *) fileName.c_str()                      /* name of movie resource (res fork)  */
      );
    if (osErr) 
      { 
      printf ("AddMovieResource failed %d\n", osErr); 
      goto bail; 
      }

    if (sResRefNum != 0) 
      {
      CloseMovieFile (sResRefNum);     /* close file CreateMovieFile opened  */
      sResRefNum = 0;
      }


	/*  Step 7 (optional):  Place the movie atom as the first atom in a new 
    movie file, and interleave the media data (FlattenMovieData).        
    ===================================================================  */
    
    // no flattening necessary I think .....  
	// if (bFlatten) flatten_my_movie (movie, pszFlatFilename);


	/*  Step 8:  Close the movie file that CreateMovieFile opened (if necessary) 
    and dispose of the movie memory structures (DisposeMovie). 
    ========================================================================  */
    


	SetGWorld (pSavedPort, hSavedDevice);
    DisposeMovie (movie);
	if (hImageDescription != NULL) DisposeHandle ((Handle) hImageDescription);
    if (hCompressedData   != NULL) DisposeHandle (hCompressedData);
    if (pMovieGWorld      != NULL) DisposeGWorld (pMovieGWorld);

  bail:    

    if (sResRefNum != 0) CloseMovieFile (sResRefNum);
    if (movie     != NULL) DisposeMovie (movie);

}
void QTEffects_RespondToDialogSelection (OSErr theErr)
{
	Boolean					myDialogWasCancelled = false;
	short					myResID = movieInDataForkResID;
	UInt16					myMovieIter;
	short					mySrcMovieRefNum = 0;
	Movie					myPrevSrcMovie = NULL;
	Track					myPrevSrcTrack = NULL;
	Movie					myNextSrcMovie = NULL;
	Track					myNextSrcTrack = NULL;
	short					myDestMovieRefNum = 0;
	FSSpec					myFile;
	Boolean					myIsSelected = false;
	Boolean					myIsReplacing = false;	
	StringPtr 				myPrompt = QTUtils_ConvertCToPascalString(kEffectsSaveMoviePrompt);
	StringPtr 				myFileName = QTUtils_ConvertCToPascalString(kEffectsSaveMovieFileName);
	Movie					myDestMovie = NULL;
	Fixed					myDestMovieWidth, myDestMovieHeight;
	ImageDescriptionHandle	myDesc = NULL;
	Track					videoTrackFX, videoTrackA, videoTrackB;
	Media					videoMediaFX, videoMediaA, videoMediaB;
	TimeValue				myCurrentDuration = 0;
	TimeValue				myReturnedDuration;
	Boolean					isFirstTransition = true;
	TimeValue				myMediaTransitionDuration;
	TimeValue				myMediaFXStartTime, myMediaFXDuration;
	OSType					myEffectCode;
	long					myFlags = createMovieFileDeleteCurFile | createMovieFileDontCreateResFile;
	long					myLong;
	OSErr					myErr = noErr;

	// standard parameter box has been dismissed, so remember that fact
	gEffectsDialog = 0L;
	
	myDialogWasCancelled = (theErr == userCanceledErr);
	
	// we're finished with the effect list and movie posters	
	QTDisposeAtomContainer(gEffectList);
	
	if (gPosterA != NULL)
		KillPicture(gPosterA);
		
	if (gPosterB != NULL)
		KillPicture(gPosterB);
	
	// when the sign says stop, then stop
	if (myDialogWasCancelled)
		goto bail;

	// add atoms naming the sources to gEffectSample
	myLong = EndianU32_NtoB(kSourceOneName);
	QTInsertChild(gEffectSample, kParentAtomIsContainer, kEffectSourceName, 1, 0, sizeof(myLong), &myLong, NULL);

	myLong = EndianU32_NtoB(kSourceTwoName);
	QTInsertChild(gEffectSample, kParentAtomIsContainer, kEffectSourceName, 2, 0, sizeof(myLong), &myLong, NULL);
	
	// extract the 'what' atom to find out what kind of effect it is
	{
		QTAtom			myEffectAtom;
		QTAtomID		myEffectAtomID;
		long			myEffectCodeSize;
		Ptr				myEffectCodePtr;

		myEffectAtom = QTFindChildByIndex(gEffectSample, kParentAtomIsContainer, kParameterWhatName, kParameterWhatID, &myEffectAtomID);
		
		myErr = QTLockContainer(gEffectSample);
		BailError(myErr);

		myErr = QTGetAtomDataPtr(gEffectSample, myEffectAtom, &myEffectCodeSize, &myEffectCodePtr);
		BailError(myErr);

		if (myEffectCodeSize != sizeof(OSType)) {
			myErr = paramErr;
			goto bail;
		}
		
		myEffectCode = *(OSType *)myEffectCodePtr;		// "tsk"
		myEffectCode = EndianU32_BtoN(myEffectCode);	// because the data is read from an atom container
		
		myErr = QTUnlockContainer(gEffectSample);
		BailError(myErr);
	}

	// ask the user for the name of the new movie file
	QTFrame_PutFile(myPrompt, myFileName, &myFile, &myIsSelected, &myIsReplacing);
	if (!myIsSelected)
		goto bail;				// deal with user cancelling

	// create a movie file for the destination movie
	myErr = CreateMovieFile(&myFile, FOUR_CHAR_CODE('TVOD'), 0, myFlags, &myDestMovieRefNum, &myDestMovie);
	BailError(myErr);
	
	// open the first file as a movie; call the first movie myPrevSrcMovie
	myErr = OpenMovieFile(&gSpecList[0], &mySrcMovieRefNum, fsRdPerm);
	BailError(myErr);
	
	myErr = NewMovieFromFile(&myPrevSrcMovie, mySrcMovieRefNum, NULL, NULL, 0, NULL);
	BailError(myErr);
	
	myErr = CloseMovieFile(mySrcMovieRefNum);
	BailError(myErr);
	
	// if the movie is shorter than kMinimumDuration, scale it to that length
	SetMovieTimeScale(myPrevSrcMovie, kTimeScale);
	myErr = QTEffects_GetFirstVideoTrackInMovie(myPrevSrcMovie, &myPrevSrcTrack);
	BailNil(myPrevSrcTrack);
	
	if (GetTrackDuration(myPrevSrcTrack) < kMinimumDuration) {
		myErr = ScaleTrackSegment(myPrevSrcTrack, 0, GetTrackDuration(myPrevSrcTrack), kMinimumDuration);
		BailError(myErr);
	}
	
	// find out how big the first movie is; we'll use it as the size of all our tracks
	GetTrackDimensions(myPrevSrcTrack, &myDestMovieWidth, &myDestMovieHeight);
	
#if USES_MAKE_IMAGE_DESC_FOR_EFFECT
	// create a new sample description for the effect,
	// which is just an image description specifying the effect and its dimensions
	myErr = MakeImageDescriptionForEffect(myEffectCode, &myDesc);
	if (myErr != noErr)
		BailError(myErr);
#else
	// create a new sample description for the effect,
	// which is just an image description specifying the effect and its dimensions
	myDesc = (ImageDescriptionHandle)NewHandleClear(sizeof(ImageDescription));
	BailNil(myDesc);
	
	(**myDesc).idSize = sizeof(ImageDescription);
	(**myDesc).cType = myEffectCode;
	(**myDesc).hRes = 72L << 16;
	(**myDesc).vRes = 72L << 16;
	(**myDesc).dataSize = 0L;
	(**myDesc).frameCount = 1;
	(**myDesc).depth = 0;
	(**myDesc).clutID = -1;
#endif
	
	// fill in the fields of the sample description
	(**myDesc).vendor = kAppleManufacturer;
	(**myDesc).temporalQuality = codecNormalQuality;
	(**myDesc).spatialQuality = codecNormalQuality;
	(**myDesc).width = FixRound(myDestMovieWidth);
	(**myDesc).height = FixRound(myDestMovieHeight);

	// add three video tracks to the destination movie:
	// 	- videoTrackFX is where the effects and stills live; it's user-visible.
	//	- videoTrackA is where the "source A"s for effects live; it's hidden by the input map
	//	- videoTrackB is where the "source B"s for effects live; it's hidden by the input map
	videoTrackFX = NewMovieTrack(myDestMovie, myDestMovieWidth, myDestMovieHeight, 0);
	BailNil(videoTrackFX);
	videoMediaFX = NewTrackMedia(videoTrackFX, VideoMediaType, kTimeScale, NULL, 0);
	BailNil(videoMediaFX);
	myErr = BeginMediaEdits(videoMediaFX);
	BailError(myErr);
	
	videoTrackA = NewMovieTrack(myDestMovie, myDestMovieWidth, myDestMovieHeight, 0);
	BailNil(videoTrackA);
	videoMediaA = NewTrackMedia(videoTrackA, VideoMediaType, kTimeScale, NULL, 0);
	BailNil(videoMediaA);

	videoTrackB = NewMovieTrack(myDestMovie, myDestMovieWidth, myDestMovieHeight, 0);
	BailNil(videoTrackB);
	videoMediaB = NewTrackMedia(videoTrackB, VideoMediaType, kTimeScale, NULL, 0);
	BailNil(videoMediaB);

	// create the input map
	{
		long				myRefIndex1, myRefIndex2;
		QTAtomContainer		myInputMap;
		QTAtom				myInputAtom;
		OSType				myInputType;

		QTNewAtomContainer(&myInputMap);

		// first input
		if (videoTrackA) {
		
			AddTrackReference(videoTrackFX, videoTrackA, kTrackModifierReference, &myRefIndex1);
			QTInsertChild(myInputMap, kParentAtomIsContainer, kTrackModifierInput, myRefIndex1, 0, 0, NULL, &myInputAtom);
	
			myInputType = EndianU32_NtoB(kTrackModifierTypeImage);
			QTInsertChild(myInputMap, myInputAtom, kTrackModifierType, 1, 0, sizeof(myInputType), &myInputType, NULL);
	
			myLong = EndianU32_NtoB(kSourceOneName);
			QTInsertChild(myInputMap, myInputAtom, kEffectDataSourceType, 1, 0, sizeof(myLong), &myLong, NULL);
		}

		// second input
		if (videoTrackB) {
		
			AddTrackReference(videoTrackFX, videoTrackB, kTrackModifierReference, &myRefIndex2);
			QTInsertChild(myInputMap, kParentAtomIsContainer, kTrackModifierInput, myRefIndex2, 0, 0, NULL, &myInputAtom);
	
			myInputType = EndianU32_NtoB(kTrackModifierTypeImage);
			QTInsertChild(myInputMap, myInputAtom, kTrackModifierType, 1, 0, sizeof(myInputType), &myInputType, NULL);
	
			myLong = EndianU32_NtoB(kSourceTwoName);
			QTInsertChild(myInputMap, myInputAtom, kEffectDataSourceType, 1, 0, sizeof(myLong), &myLong, NULL);
		}

		// set that map
		SetMediaInputMap(GetTrackMedia(videoTrackFX), myInputMap);
		
		QTDisposeAtomContainer(myInputMap);
	}

	myCurrentDuration = 0;

#if MAKE_STILL_SECTIONS
	// copy the first sample of the first video track of the first movie to videoTrackFX, with duration kStillDuration.
	myErr = CopyPortionOfTrackToTrack(myPrevSrcTrack, eStartPortion + eMiddlePortion, videoTrackFX, myCurrentDuration, &myReturnedDuration);
	BailError(myErr);
	
	myCurrentDuration += myReturnedDuration;
#endif 

	// now process any remaining files
	myMovieIter = 1;
	while (myMovieIter < gSpecCount) {
		
		// open the next file as a movie; call it nextSourceMovie
		myErr = OpenMovieFile(&gSpecList[myMovieIter], &mySrcMovieRefNum, fsRdPerm);
		BailError(myErr);
		
		myErr = NewMovieFromFile(&myNextSrcMovie, mySrcMovieRefNum, NULL, NULL, 0, NULL);
		BailError(myErr);
		
		// we're done with the movie file, so close it
		myErr = CloseMovieFile(mySrcMovieRefNum);
		BailError(myErr);
		
		// if the movie is shorter than kMinimumDuration, scale it to that length
		SetMovieTimeScale(myNextSrcMovie, kTimeScale);
		myErr = QTEffects_GetFirstVideoTrackInMovie(myNextSrcMovie, &myNextSrcTrack);
		BailNil(myNextSrcTrack);
		
		if (GetTrackDuration(myNextSrcTrack) < kMinimumDuration) {
			myErr = ScaleTrackSegment(myNextSrcTrack, 0, GetTrackDuration(myNextSrcTrack), kMinimumDuration);
			BailError(myErr);
		}

		// create a transition effect from the previous source movie's first video sample to the next source movie's first video sample
		// (the effect should have duration kEffectDuration);
		// this involves adding one sample to each of the three video tracks:
		
		//    sample from previous source movie	 -> videoTrackA
		myErr = QTEffects_CopyPortionOfTrackToTrack(myPrevSrcTrack, eFinishPortion, videoTrackA, myCurrentDuration, &myReturnedDuration);
		BailError(myErr);
		
		//    sample from next source movie    	 -> videoTrackB
		myErr = QTEffects_CopyPortionOfTrackToTrack(myNextSrcTrack, eStartPortion, videoTrackB, myCurrentDuration, &myReturnedDuration);
		BailError(myErr);
		
		//    effect sample                 	  -> videoTrackFX
		if (isFirstTransition) {
			myMediaTransitionDuration = myReturnedDuration;
			myMediaFXStartTime = GetMediaDuration(videoMediaFX);
			myErr = AddMediaSample(videoMediaFX, gEffectSample, 0, GetHandleSize(gEffectSample), myMediaTransitionDuration, (SampleDescriptionHandle)myDesc, 1, 0, NULL);
			BailError(myErr);
			
			myMediaFXDuration = GetMediaDuration(videoMediaFX) - myMediaFXStartTime;
			isFirstTransition = false;
		}
		
		myErr = InsertMediaIntoTrack(videoTrackFX, myCurrentDuration, myMediaFXStartTime, myMediaFXDuration, FixRatio(myReturnedDuration, myMediaTransitionDuration));
		BailError(myErr);
		
		myCurrentDuration += myReturnedDuration;
		
#if MAKE_STILL_SECTIONS
		// copy the first video sample of myNextSrcMovie to videoTrackFX, with duration kStillDuration.
		myErr = QTEffects_CopyPortionOfTrackToTrack(myNextSrcTrack, eMiddlePortion + (myMovieIter + 1 == theSpecCount) ? eFinishPortion : 0, videoTrackFX, myCurrentDuration, &myReturnedDuration);
		BailError(myErr);
		
		myCurrentDuration += myReturnedDuration;
#endif // MAKE_STILL_SECTIONS
		
		// dispose of previous source movie.  
		DisposeMovie(myPrevSrcMovie);
		
		myPrevSrcMovie = myNextSrcMovie;
		myPrevSrcTrack = myNextSrcTrack;
		myNextSrcMovie = NULL;
		myNextSrcTrack = NULL;
		
		myMovieIter++;
	} // while
	
	myErr = EndMediaEdits(videoMediaFX);
	BailError(myErr);

	myErr = AddMovieResource(myDestMovie, myDestMovieRefNum, &myResID, "\pMovie 1");
	BailError(myErr);
	
	CloseMovieFile(myDestMovieRefNum);
	
	if (myPrevSrcMovie != NULL)
		DisposeMovie(myPrevSrcMovie);
		
	DisposeMovie(myDestMovie);
	
bail:
	free(myPrompt);
	free(myFileName);

	QTDisposeAtomContainer(gEffectSample);
	DisposeHandle((Handle)myDesc);

	return;
}
void ofxQtVideoSaver::setup( int width , int height, string movieName){

	w = width;
	h = height;
    
   
    fileName = (ofToDataPath(movieName));
    //pszFlatFilename = flatFileName;
    
    initializeQuicktime();
	/*  Load the FSSpec structure to describe the receiving file.  For a 
    description of this and related calls see 
    http://developer.apple.com/quicktime/icefloe/dispatch004.html.
    ================================================================  */


	#ifdef TARGET_WIN32
		FILE * pFile;
		pFile = fopen (fileName.c_str(),"w");
		fclose (pFile);
		char fileNameStr[255];
		sprintf(fileNameStr, "%s", fileName.c_str());
		osErr = NativePathNameToFSSpec (fileNameStr, &fsSpec, 0);
		
	#endif
	#ifdef TARGET_OSX
	
		/// kill a file and make a new one if needed:		
		FILE * pFile;
		pFile = fopen (fileName.c_str(),"w");
		fclose (pFile);
	
		Boolean isdir;
		osErr = FSPathMakeRef((const UInt8*)fileName.c_str(), &fsref, &isdir);
		osErr = FSGetCatalogInfo(&fsref, kFSCatInfoNone, NULL, NULL, &fsSpec, NULL);
	#endif

    if (osErr && (osErr != fnfErr))    /* File-not-found error is ok         */
      { 
      printf ("getting FSS spec failed %d\n", osErr); 
      goto bail; 
     }
	 

	/*  Step 1:  Create a new, empty movie file and a movie that references that 
    file (CreateMovieFile).  
    ======================================================================== */
            
    osErr = CreateMovieFile 
      (
      &fsSpec,                         /* FSSpec specifier                   */
      FOUR_CHAR_CODE('TVOD'),          /* file creator type, TVOD = QT player*/
      smCurrentScript,                 /* movie script system to use         */
      createMovieFileDeleteCurFile     /* movie file creation flags          */
        | createMovieFileDontCreateResFile,
      &sResRefNum,                     /* returned file ref num to data fork */
      &movie                           /* returned handle to open empty movie*/
                                       /*   that references the created file */
      );
    if (osErr) 
      { 
      printf ("CreateMovieFile failed %d\n", osErr); 
      goto bail; 
      }


	/*  Step 2:  Add a new track to that movie (NewMovieTrack).
    =======================================================  */

    track = NewMovieTrack 
      (
      movie,                           /* the movie to add track to          */
      ((long) w << 16),              /* width of track in pixels (Fixed)   */
      FixRatio (h, 1),               /* height of track in pixels (Fixed)  */ 
      kNoVolume                        /* default volume level               */
      );
    osErr = GetMoviesError ();
    if (osErr) 
      { 
      printf ("NewMovieTrack failed %d\n", osErr); 
      goto bail; 
      }
    

	/*  Step 3:  Add a new media to that track (NewTrackMedia).
    =======================================================  */
    
    media = NewTrackMedia 
      (
      track,                           /* the track to add the media to      */
      VideoMediaType,                  /* media type, e.g. SoundMediaType    */
      600,                             /* num media time units that elapse/sec*/
      NULL,                            /* ptr to file that holds media sampls*/
      0                                /* type of ptr to media samples       */
      );
    osErr = GetMoviesError ();
    if (osErr) 
      { 
      printf ("NewTrackMedia failed %d\n", osErr); 
      goto bail; 
      }

	/*  Step 4:  Add media samples to the media. 
    ========================================  */
    
    BeginMediaEdits (media);           /* Inform the Movie Toolbox that we   */
                                       /*   want to change the media samples */
                                       /*   referenced by a track's media.   */
                                       /*   This opens the media container   */
                                       /*   and makes it ready to receive    */
                                       /*   and/or remove sample data.       */
    
    
    

    // Step 5: setup graphics port for qt movie and compression type ---
    
    /*  Create a new offscreen graphics world that will hold the movie's
    drawing surface.  draw_image() copies the image of IceFlow to this
    surface with varying amounts of transparency.
    =================================================================  */
    
    MacSetRect (&rect, 0, 0, w, h);

    osErr = NewGWorld 
      (
      &pMovieGWorld,                   /* receives the new GWorld.           */
      24,                              /* pixel depth in bits/pixel          */
      &rect,                           /* desired size of the GWorld.        */
      NULL, 
      NULL, 
      (GWorldFlags) 0
      );
    if (osErr != noErr) 
      { 
      printf ("NewGWorld 1 failed %d\n", osErr); 
      goto bail; 
      }


/*  Retrieve the pixel map associated with that graphics world and lock 
    the pixel map in memory.  GetMaxCompressionSize() and CompressImage()
    only operate on pixel maps, not graphics worlds.
    =====================================================================  */
    
    pixMapHandle = GetGWorldPixMap (pMovieGWorld);
    if (pixMapHandle == NULL) 
      { 
      printf ("GetGWorldPixMap failed\n"); 
      goto bail; 
      }
    LockPixels (pixMapHandle);


/*  Get the maximum number of bytes required to hold an image having the 
    specified characteristics compressed using the specified compressor.
    ====================================================================  */

     
    osErr = GetMaxCompressionSize 
      (
      pixMapHandle,							/* the pixel map to compress from.    */
      &rect,								/* the image rectangle.               */
      0,									/* let ICM choose image bit depth.    */
      codecHighQuality,						/* compression quality specifier.     */
      kRawCodecType,						/* desired compression type           */   // < set to RAW in case we set to a new compression type...
      (CompressorComponent) anyCodec,		/* codec specifier.                   */
      &lMaxCompressionSize					/* receives max bytes needed for cmp. */
      );
    if (osErr != noErr) 
      { 
      printf ("GetMaxCompressionSize failed %d\n", osErr); 
      goto bail; 
      }


/*  Allocate a buffer to hold the compressed image data by creating a new
    handle.
    =====================================================================  */
    hCompressedData = NewHandle (lMaxCompressionSize);
    if (hCompressedData == NULL) 
      { 
      printf ("NewHandle(%ld) failed\n", lMaxCompressionSize); 
      goto bail; 
      }

/*  Lock the handle and then dereference it to obtain a pointer to the data 
    buffer because CompressImage() wants us to pass it a pointer, not a 
    handle. 
    =======================================================================  */

    HLockHi (hCompressedData);
    pCompressedData = *hCompressedData;

/*  Create an image description object in memory of minimum size to pass 
    to CompressImage().  CompressImage() will resize the memory as 
    necessary so create it small here.
    ====================================================================  */
    
    hImageDescription = (ImageDescriptionHandle) NewHandle (4);
    if (hImageDescription == NULL) 
      { 
      printf ("NewHandle(4) failed\n"); 
      goto bail; 
      }
	
	
	
	bSetupForRecordingMovie = true;
    return;
    
    
    
    
  bail:    
	printf("got to bail somehows \n");
    if (sResRefNum != 0) CloseMovieFile (sResRefNum);
    if (movie     != NULL) DisposeMovie (movie);

    //ExitMovies ();                     /* Finalize Quicktime                 */
    
    return;
}
Пример #4
0
void QTVectors_CreateVectorMovie (UInt32 theBuildAtomMethod)
{
	Handle						myHandle = NULL;
	ImageDescriptionHandle		mySampleDesc = NULL;
	short						myResRefNum = 0;
	short						myResID = movieInDataForkResID;
	Movie						myMovie = NULL;
	Track						myTrack;
	Media						myMedia;
	FSSpec						myFile;
	Boolean						myIsSelected = false;
	Boolean						myIsReplacing = false;	
	StringPtr 					myPrompt = QTUtils_ConvertCToPascalString(kVectorSavePrompt);
	StringPtr 					myFileName = QTUtils_ConvertCToPascalString(kVectorSaveMovieFileName);
	ComponentInstance			myComponent;
	ComponentResult				myResult;
	long						myFlags = createMovieFileDeleteCurFile | createMovieFileDontCreateResFile;
	OSErr						myErr = noErr;
	
	// METHOD ONE: use a raw data stream
	
	if (theBuildAtomMethod == kUseRawDataStream) {
	
		// kUseRawDataStream: build the vector data using a stream of hard-coded raw data
		// NOTE: the data in the stream *must* be big-endian, since it's stored in a QuickTime atom container.

		long					myPath[] = {	
			
		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(long)), EndianU32_NtoB(kCurveAntialiasControlAtom),
			EndianU32_NtoB(kCurveAntialiasOn),

		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(long)), EndianU32_NtoB(kCurveFillTypeAtom),
			EndianU32_NtoB(gxEvenOddFill),

		// a big white enclosing rectangle (600 x 600)
		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(ARGBColor)), EndianU32_NtoB(kCurveARGBColorAtom),
			EndianU32_NtoB(0xffffffff),	// alpha, red
			EndianU32_NtoB(0xffffffff),	// green, blue
										// it's white!

		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(long)*11), EndianU32_NtoB(kCurvePathAtom),
			EndianU32_NtoB(1),			// one contour in path
			EndianU32_NtoB(4),			// four points in path
			EndianU32_NtoB(0x00000000),	// all points are on the curve: it's a rectangle! 
			EndianU32_NtoB(0x00000000), EndianU32_NtoB(0x00000000), 	// top left
			EndianU32_NtoB(0x02580000), EndianU32_NtoB(0x00000000),		// top right
			EndianU32_NtoB(0x02580000), EndianU32_NtoB(0x02580000),		// bottom right 
			EndianU32_NtoB(0x00000000), EndianU32_NtoB(0x02580000),		// bottom left

		// a black rounded square, centered at 150,150
		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(ARGBColor)), EndianU32_NtoB(kCurveARGBColorAtom),
			EndianU32_NtoB(0x00000000),	// alpha, red
			EndianU32_NtoB(0x00000000),	// green, blue
										// it's black!

		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(long)*11), EndianU32_NtoB(kCurvePathAtom),
			EndianU32_NtoB(1),			// one contour in path
			EndianU32_NtoB(4),			// four points in path
			EndianU32_NtoB(0xffffffff), // all points are off the curve: it's a rounded square! 
			EndianU32_NtoB(0x00640000), EndianU32_NtoB(0x00640000),
			EndianU32_NtoB(0x00C80000), EndianU32_NtoB(0x00640000),
			EndianU32_NtoB(0x00C80000), EndianU32_NtoB(0x00C80000), 
			EndianU32_NtoB(0x00640000), EndianU32_NtoB(0x00C80000),

		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(long)), EndianU32_NtoB(kCurveFillTypeAtom),
			EndianU32_NtoB(gxEvenOddFill),

		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(long)), EndianU32_NtoB(kCurvePenThicknessAtom),
			EndianU32_NtoB(0x100000),
											
		// enable linear gradient for all following atoms
		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(long)), EndianU32_NtoB(kCurveGradientTypeAtom),
			EndianU32_NtoB(kLinearGradient),
		
		// define the gradient: red -> green -> red -> blue									
		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(GradientColorRecord)*4), EndianU32_NtoB(kCurveGradientRecordAtom),
										
			EndianU32_NtoB(0xffffffff),	// gradient color record 1:
			EndianU32_NtoB(0x00000000),	// red
			EndianU32_NtoB(0x00000000),	// beginning of gradient
										
			EndianU32_NtoB(0x77770000),	// gradient color record 2:
			EndianU32_NtoB(0xffff0000),	// green
			EndianU32_NtoB(0x00004000),
										
			EndianU32_NtoB(0x3333ffff),	// gradient color record 3:
			EndianU32_NtoB(0x00000000),	// red
			EndianU32_NtoB(0x0000C000),
										
			EndianU32_NtoB(0xffff0000),	// gradient color record 4:
			EndianU32_NtoB(0x0000ffff),	// blue
			EndianU32_NtoB(0x00010000),	// end of gradient

		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(long)), EndianU32_NtoB(kCurveGradientAngleAtom),
			EndianU32_NtoB(0x00450000),	// gradient at 45û angle
		
		// a green rectangle, centered at 40,40, painted with a linear gradient									
		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(ARGBColor)), EndianU32_NtoB(kCurveARGBColorAtom),
			EndianU32_NtoB(0x00000000),	// alpha, red
			EndianU32_NtoB(0xffff0000),	// green, blue
										// it's green!

		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(long)*11), EndianU32_NtoB(kCurvePathAtom),
			EndianU32_NtoB(1),			// one contour in path
			EndianU32_NtoB(4),			// four points in path
			EndianU32_NtoB(0x00000000),	// all points are on the curve: it's a rectangle! 
			EndianU32_NtoB(0x00100000), EndianU32_NtoB(0x00100000),
			EndianU32_NtoB(0x00400000), EndianU32_NtoB(0x00100000),
			EndianU32_NtoB(0x00400000), EndianU32_NtoB(0x00400000),
			EndianU32_NtoB(0x00100000), EndianU32_NtoB(0x00400000),

		// disable gradient for all following atoms (since no atom data)
		EndianU32_NtoB(kSizeOfSizeAndTagFields), EndianU32_NtoB(kCurveGradientRecordAtom),
									
		// a red rounded square, centered at 50,50
		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(ARGBColor)), EndianU32_NtoB(kCurveARGBColorAtom),
			EndianU32_NtoB(0x3333ffff),	// alpha, red
			EndianU32_NtoB(0x00000000),	// green, blue
										// it's red!

		EndianU32_NtoB(kSizeOfSizeAndTagFields + sizeof(long)*11), EndianU32_NtoB(kCurvePathAtom),
			EndianU32_NtoB(1L),			// one contour in path
			EndianU32_NtoB(4L),			// four points in path
			EndianU32_NtoB(0xffffffff), // all points are off the curve: it's a rounded square! 
			EndianU32_NtoB(0x001e0000), EndianU32_NtoB(0x001e0000),
			EndianU32_NtoB(0x00460000), EndianU32_NtoB(0x001e0000),
			EndianU32_NtoB(0x00460000), EndianU32_NtoB(0x00460000),
			EndianU32_NtoB(0x001e0000), EndianU32_NtoB(0x00460000),

		EndianU32_NtoB(kSizeOfZeroAtomHeader), EndianU32_NtoB(kCurveEndAtom),
	};
			
		myHandle = NewHandle(sizeof(myPath));
		if (myHandle == NULL)
			goto bail;
			
		BlockMove(myPath, *myHandle, sizeof(myPath));
	
	}	// end of kUseRawDataStream

	
	// METHOD TWO: use the Curve Utilities API
	
	if (theBuildAtomMethod == kUseCurveUtilities) {
	
		// kUseCurveUtilities: build the vector data using the Curve Utilities API		
		Handle						myPath;
		gxPoint						myPoint;
		long						myAtomData[14];
		ARGBColor					myColor;
		GradientColorRecord			myGradients[4];
	
		// open the vector codec; we'll need it for some subsequent calls
		myComponent = OpenDefaultComponent(decompressorComponentType, kVectorCodecType);
		if (myComponent == NULL)
			goto bail;

		// create a new, empty vector data stream
		myResult = CurveCreateVectorStream(myComponent, &myHandle);
		if (myResult != noErr)
			goto bail;
		
		// now start adding atoms holding the vector data
		
		// set antialiasing on
		myAtomData[0] = EndianU32_NtoB(kCurveAntialiasOn);
		CurveAddAtomToVectorStream(myComponent, kCurveAntialiasControlAtom, sizeof(long), myAtomData, myHandle);

		// set fill type
		myAtomData[0] = EndianU32_NtoB(gxEvenOddFill);
		CurveAddAtomToVectorStream(myComponent, kCurveFillTypeAtom, sizeof(long), myAtomData, myHandle);

		// a big white enclosing rectangle (600 x 600)
		myColor.alpha = EndianU16_NtoB(0xffff);
		myColor.red = EndianU16_NtoB(0xffff);
		myColor.green = EndianU16_NtoB(0xffff);
		myColor.blue = EndianU16_NtoB(0xffff);
		CurveAddAtomToVectorStream(myComponent, kCurveARGBColorAtom, sizeof(ARGBColor), &myColor, myHandle);

#if USE_CURVE_INSERT_POINT_INTO_PATH
		// create a new, empty path
		CurveNewPath(myComponent, &myPath);

		myPoint.x = 0x00000000;
		myPoint.y = 0x00000000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 0, true);
		
		myPoint.x = 0x02580000;
		myPoint.y = 0x00000000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 1, true);
		
		myPoint.x = 0x02580000;
		myPoint.y = 0x02580000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 2, true);
		
		myPoint.x = 0x00000000;
		myPoint.y = 0x02580000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 3, true);

		// add the 'path' atom to the vector data stream
		CurveAddPathAtomToVectorStream(myComponent, myPath, myHandle);
		DisposeHandle(myPath);
#else
		myAtomData[0] = EndianU32_NtoB(1L);
		myAtomData[1] = EndianU32_NtoB(4L);
		myAtomData[2] = EndianU32_NtoB(0x00000000);
		myAtomData[3] = EndianU32_NtoB(0x00000000);
		myAtomData[4] = EndianU32_NtoB(0x00000000);
		myAtomData[5] = EndianU32_NtoB(0x02580000);
		myAtomData[6] = EndianU32_NtoB(0x00000000);
		myAtomData[7] = EndianU32_NtoB(0x02580000);
		myAtomData[8] = EndianU32_NtoB(0x02580000);
		myAtomData[9] = EndianU32_NtoB(0x00000000);
		myAtomData[10] = EndianU32_NtoB(0x02580000);
		CurveAddAtomToVectorStream(myComponent, kCurvePathAtom, sizeof(long)*11, myAtomData, myHandle);
#endif
		
		// a black rounded square, centered at 150,150
		myColor.alpha = EndianU16_NtoB(0x0000);
		myColor.red = EndianU16_NtoB(0x0000);
		myColor.green = EndianU16_NtoB(0x0000);
		myColor.blue = EndianU16_NtoB(0x0000);
		CurveAddAtomToVectorStream(myComponent, kCurveARGBColorAtom, sizeof(ARGBColor), &myColor, myHandle);

#if USE_CURVE_INSERT_POINT_INTO_PATH
		// create a new, empty path
		CurveNewPath(myComponent, &myPath);

		myPoint.x = 0x00640000;
		myPoint.y = 0x00640000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 0, false);
		
		myPoint.x = 0x00C80000;
		myPoint.y = 0x00640000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 1, false);
		
		myPoint.x = 0x00C80000;
		myPoint.y = 0x00C80000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 2, false);
		
		myPoint.x = 0x00640000;
		myPoint.y = 0x00C80000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 3, false);

		// add the 'path' atom to the vector data stream
		CurveAddPathAtomToVectorStream(myComponent, myPath, myHandle);
		DisposeHandle(myPath);
#else
		myAtomData[0] = EndianU32_NtoB(1L);
		myAtomData[1] = EndianU32_NtoB(4L);
		myAtomData[2] = EndianU32_NtoB(0xffffffff);
		myAtomData[3] = EndianU32_NtoB(0x00640000);
		myAtomData[4] = EndianU32_NtoB(0x00640000);
		myAtomData[5] = EndianU32_NtoB(0x00C80000);
		myAtomData[6] = EndianU32_NtoB(0x00640000);
		myAtomData[7] = EndianU32_NtoB(0x00C80000);
		myAtomData[8] = EndianU32_NtoB(0x00C80000);
		myAtomData[9] = EndianU32_NtoB(0x00640000);
		myAtomData[10] = EndianU32_NtoB(0x00C80000);
		CurveAddAtomToVectorStream(myComponent, kCurvePathAtom, sizeof(long)*11, myAtomData, myHandle);
#endif

		// set fill type
		myAtomData[0] = EndianU32_NtoB(gxEvenOddFill);
		CurveAddAtomToVectorStream(myComponent, kCurveFillTypeAtom, sizeof(long), myAtomData, myHandle);

		// set pen thickness
		myAtomData[0] = EndianU32_NtoB(0x100000);
		CurveAddAtomToVectorStream(myComponent, kCurvePenThicknessAtom, sizeof(long), myAtomData, myHandle);

		// enable linear gradient for all following atoms
		myAtomData[0] = EndianU32_NtoB(kLinearGradient);
		CurveAddAtomToVectorStream(myComponent, kCurveGradientTypeAtom, sizeof(long), myAtomData, myHandle);

		// define the gradient: red -> green -> red -> blue									
		myGradients[0].thisColor.alpha = EndianU16_NtoB(0xffff);
		myGradients[0].thisColor.red = EndianU16_NtoB(0xffff);
		myGradients[0].thisColor.green = EndianU16_NtoB(0x0000);
		myGradients[0].thisColor.blue = EndianU16_NtoB(0x0000);
		myGradients[0].endingPercentage = EndianU32_NtoB(0x00000000);
		myGradients[1].thisColor.alpha = EndianU16_NtoB(0x7777);
		myGradients[1].thisColor.red = EndianU16_NtoB(0x0000);
		myGradients[1].thisColor.green = EndianU16_NtoB(0xffff);
		myGradients[1].thisColor.blue = EndianU16_NtoB(0x0000);
		myGradients[1].endingPercentage = EndianU32_NtoB(0x00004000);
		myGradients[2].thisColor.alpha = EndianU16_NtoB(0x3333);
		myGradients[2].thisColor.red = EndianU16_NtoB(0xffff);
		myGradients[2].thisColor.green = EndianU16_NtoB(0x0000);
		myGradients[2].thisColor.blue = EndianU16_NtoB(0x0000);
		myGradients[2].endingPercentage = EndianU32_NtoB(0x0000C000);
		myGradients[3].thisColor.alpha = EndianU16_NtoB(0xffff);
		myGradients[3].thisColor.red = EndianU16_NtoB(0x0000);
		myGradients[3].thisColor.green = EndianU16_NtoB(0x0000);
		myGradients[3].thisColor.blue = EndianU16_NtoB(0xffff);
		myGradients[3].endingPercentage = EndianU32_NtoB(0x00010000);
		CurveAddAtomToVectorStream(myComponent, kCurveGradientRecordAtom, sizeof(GradientColorRecord)*4, myGradients, myHandle);

		// set gradient angle
		myAtomData[0] = EndianU32_NtoB(0x00450000);
		CurveAddAtomToVectorStream(myComponent, kCurveGradientAngleAtom, sizeof(long), myAtomData, myHandle);

		// a green rectangle, centered at 40,40, painted with a linear gradient									
		myColor.alpha = EndianU16_NtoB(0x0000);
		myColor.red = EndianU16_NtoB(0x0000);
		myColor.green = EndianU16_NtoB(0xffff);
		myColor.blue = EndianU16_NtoB(0x0000);
		CurveAddAtomToVectorStream(myComponent, kCurveARGBColorAtom, sizeof(ARGBColor), &myColor, myHandle);

#if USE_CURVE_INSERT_POINT_INTO_PATH
		// create a new, empty path
		CurveNewPath(myComponent, &myPath);

		myPoint.x = 0x00100000;
		myPoint.y = 0x00100000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 0, true);
		
		myPoint.x = 0x00400000;
		myPoint.y = 0x00100000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 1, true);
		
		myPoint.x = 0x00400000;
		myPoint.y = 0x00400000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 2, true);
		
		myPoint.x = 0x00100000;
		myPoint.y = 0x00400000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 3, true);

		// add the 'path' atom to the vector data stream
		CurveAddPathAtomToVectorStream(myComponent, myPath, myHandle);
		DisposeHandle(myPath);
#else
		myAtomData[0] = EndianU32_NtoB(1L);
		myAtomData[1] = EndianU32_NtoB(4L);
		myAtomData[2] = EndianU32_NtoB(0x00000000);
		myAtomData[3] = EndianU32_NtoB(0x00100000);
		myAtomData[4] = EndianU32_NtoB(0x00100000);
		myAtomData[5] = EndianU32_NtoB(0x00400000);
		myAtomData[6] = EndianU32_NtoB(0x00100000);
		myAtomData[7] = EndianU32_NtoB(0x00400000);
		myAtomData[8] = EndianU32_NtoB(0x00400000);
		myAtomData[9] = EndianU32_NtoB(0x00100000);
		myAtomData[10] = EndianU32_NtoB(0x00400000);
		CurveAddAtomToVectorStream(myComponent, kCurvePathAtom, sizeof(long)*11, myAtomData, myHandle);
#endif

		// disable gradient for all following atoms (since no atom data)
		CurveAddAtomToVectorStream(myComponent, kCurveGradientTypeAtom, 0, NULL, myHandle);
		
		// a red rounded square, centered at 50,50
		myColor.alpha = EndianU16_NtoB(0x3333);
		myColor.red = EndianU16_NtoB(0xffff);
		myColor.green = EndianU16_NtoB(0x0000);
		myColor.blue = EndianU16_NtoB(0x0000);
		CurveAddAtomToVectorStream(myComponent, kCurveARGBColorAtom, sizeof(ARGBColor), &myColor, myHandle);

#if USE_CURVE_INSERT_POINT_INTO_PATH
		// create a new, empty path
		CurveNewPath(myComponent, &myPath);

		myPoint.x = 0x001e0000;
		myPoint.y = 0x001e0000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 0, false);
		
		myPoint.x = 0x00460000;
		myPoint.y = 0x001e0000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 1, false);
		
		myPoint.x = 0x00460000;
		myPoint.y = 0x00460000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 2, false);
		
		myPoint.x = 0x001e0000;
		myPoint.y = 0x00460000;
		CurveInsertPointIntoPath(myComponent, &myPoint, myPath, 0, 3, false);

		// add the 'path' atom to the vector data stream
		CurveAddPathAtomToVectorStream(myComponent, myPath, myHandle);
		DisposeHandle(myPath);
#else
		myAtomData[0] = EndianU32_NtoB(1L);
		myAtomData[1] = EndianU32_NtoB(4L);
		myAtomData[2] = EndianU32_NtoB(0xffffffff);
		myAtomData[3] = EndianU32_NtoB(0x001e0000);
		myAtomData[4] = EndianU32_NtoB(0x001e0000);
		myAtomData[5] = EndianU32_NtoB(0x00460000);
		myAtomData[6] = EndianU32_NtoB(0x001e0000);
		myAtomData[7] = EndianU32_NtoB(0x00460000);
		myAtomData[8] = EndianU32_NtoB(0x00460000);
		myAtomData[9] = EndianU32_NtoB(0x001e0000);
		myAtomData[10] = EndianU32_NtoB(0x00460000);
		CurveAddAtomToVectorStream(myComponent, kCurvePathAtom, sizeof(long)*11, myAtomData, myHandle);
#endif

		// add the 'zero' atom to the vector data stream
		CurveAddZeroAtomToVectorStream(myComponent, myHandle);
		
	}	// end of kUseCurveUtilities
	
	// create the image description
	mySampleDesc = (ImageDescriptionHandle)NewHandleClear(sizeof(ImageDescription));
	if (mySampleDesc == NULL)
		goto bail;
	
	// fill in the fields of the image description
	(**mySampleDesc).idSize = sizeof(ImageDescription);
	(**mySampleDesc).cType = kVectorCodecType;
	(**mySampleDesc).vendor = kAppleManufacturer;
	(**mySampleDesc).temporalQuality = codecNormalQuality;
	(**mySampleDesc).spatialQuality = codecNormalQuality;
	(**mySampleDesc).width = 300;
	(**mySampleDesc).height = 300;
	(**mySampleDesc).hRes = 72L << 16;
	(**mySampleDesc).vRes = 72L << 16;
	(**mySampleDesc).dataSize = 0L;
	(**mySampleDesc).frameCount = 1;
	(**mySampleDesc).depth = 0;
	(**mySampleDesc).clutID = -1;
		
	// prompt user for new file name
	QTFrame_PutFile(myPrompt, myFileName, &myFile, &myIsSelected, &myIsReplacing);
	if (!myIsSelected)
		goto bail;
	
	// create a movie file for the destination movie
	myErr = CreateMovieFile(&myFile, FOUR_CHAR_CODE('TVOD'), smCurrentScript, myFlags, &myResRefNum, &myMovie);
	if (myErr != noErr)
		goto bail;
	
	// create the vector track and media
	myTrack = NewMovieTrack(myMovie, FixDiv(300, 1), FixDiv(300, 1), kNoVolume);
	myMedia = NewTrackMedia(myTrack, VideoMediaType, 600, NULL, 0);
	
	// create the vector media sample
	BeginMediaEdits(myMedia);
		
	myErr = AddMediaSample(myMedia, myHandle, 0, GetHandleSize(myHandle), 600, (SampleDescriptionHandle)mySampleDesc, 1, 0, NULL);
	if (myErr != noErr)
		goto bail;
		
	EndMediaEdits(myMedia);
	
	// add the media to the track
	InsertMediaIntoTrack(myTrack, 0, 0, GetMediaDuration(myMedia), fixed1);
	AddMovieResource(myMovie, myResRefNum, &myResID, NULL);

bail:
	free(myPrompt);
	free(myFileName);

	if (mySampleDesc != NULL)
		DisposeHandle((Handle)mySampleDesc);
	
	if (myResRefNum != 0)
		CloseMovieFile(myResRefNum);

	if (myHandle != NULL)
		DisposeHandle(myHandle);

	if (myMovie != NULL)
		DisposeMovie(myMovie);

	if (myComponent != NULL)
		CloseComponent(myComponent);
}
Пример #5
0
void mexFunction(int nlhs, mxArray* plhs[], int nrhs, const mxArray* prhs[]) {

  if (nlhs < 0) { mexErrMsgTxt("Too few output arguments."); return; }
  if (nlhs > 1) { mexErrMsgTxt("Too many output arguments."); return; }
  if (nrhs < 1) { mexErrMsgTxt("Too few input arguments."); return; }
  if (nrhs > 3) { mexErrMsgTxt("Too many input arguments."); return; }

  TimeValue duration;
  TimeRecord myTimeRecord;
  Rect bounds;
  OSErr result = 0;
  short resRefNum = -1;
  short actualResId = DoTheRightThing;
  FSSpec theFSSpec;
  GWorldPtr offWorld;
  Movie theMovie = nil;
  MovieController thePlayer = nil;
  MovieDrawingCompleteUPP myDrawCompleteProc;
  long frame_end;
  long myStep = 1;
  char location[PATH_BUFFER_SIZE];
  long frame_count;
  mwSize cdims[2];

  mxGetString(prhs[0], location, PATH_BUFFER_SIZE);

  if (nrhs > 2) {
    frame_start = rint(mxGetScalar(prhs[1]));
    frame_end = rint(mxGetScalar(prhs[2]));
  } else if (nrhs > 1) {
    frame_start = 1;
    frame_end = rint(mxGetScalar(prhs[1]));
  } else {
    frame_start = 1;
    frame_end = 0;
  }

  if (frame_start < 1) {
    mexErrMsgTxt("Error: the starting frame must be positive\n"); 
    return; 
  }

  if (frame_end < 0) {
    mexErrMsgTxt("Error: the ending frame must be positive\n"); 
    return; 
  }

  if (frame_end != 0 && frame_end < frame_start) {
    mexErrMsgTxt("Error: the ending frame must not be less than the starting frame\n"); 
    return; 
  }

  myDrawCompleteProc = NewMovieDrawingCompleteUPP(DrawCompleteProc);

  EnterMovies();            

  if (NativePathNameToFSSpec(location, &theFSSpec, 0) ||
      OpenMovieFile(&theFSSpec, &resRefNum, 0) ||
      NewMovieFromFile(&theMovie, resRefNum, &actualResId, 0, 0, 0)) {
    mexErrMsgTxt("Error: failed to open movie\n"); 
    return; 
  }

  if (resRefNum != -1) CloseMovieFile(resRefNum);

  GetMovieBox(theMovie, &bounds);
  QTNewGWorld(&offWorld, k32ARGBPixelFormat, &bounds, NULL, NULL, 0);
  LockPixels(GetGWorldPixMap(offWorld));
  SetGWorld(offWorld, NULL);

  thePlayer = NewMovieController(theMovie, &bounds, mcTopLeftMovie | mcNotVisible);
  SetMovieGWorld(theMovie, offWorld, NULL);
  SetMovieActive(theMovie, true);
  SetMovieDrawingCompleteProc(theMovie, movieDrawingCallWhenChanged, myDrawCompleteProc, (long) offWorld);
  GetMovieTime(theMovie, &myTimeRecord);
  duration = GetMovieDuration(theMovie);

  // Compute the number of frames for allocation of output structure
  frame_count = 0;
  while ((frame_end == 0 || frame_count < frame_end) && GetMovieTime(theMovie, NULL) < duration) {
    frame_count++;
    MCDoAction(thePlayer, mcActionStep, (Ptr) myStep);
  }
  SetMovieTime(theMovie, &myTimeRecord);
  
  // Ignore frames greater than those in the file
  if (frame_end == 0 || frame_count < frame_end) frame_end = frame_count;
  
  cdims[0] = frame_end - frame_start + 1; // Indices are one-based
  cdims[1] = 1; 

  plhs[0] = mxCreateCellArray(2, cdims);

  // Step through the movie and save the frame when in the chosen interval
  // Note: the step size seems to be handled as a short internally. 
  //       Using anything greater than 32758 will seek to an incorrect frame
  frame_num = 1;
  while (frame_num <= frame_end) {
    MCDoAction(thePlayer, mcActionStep, (Ptr) myStep);
    if (frame_num >= frame_start) {
      MCIdle(thePlayer);
      mxSetCell(plhs[0], frame_num - frame_start, framedata);
    }
    frame_num++;
  }

  UnlockPixels(GetGWorldPixMap (offWorld));
  DisposeGWorld(offWorld);
  DisposeMovieController (thePlayer);
  DisposeMovie(theMovie);
  DisposeMovieDrawingCompleteUPP(myDrawCompleteProc);
  ExitMovies();

  return;
}
Пример #6
0
void
PlaySound_QuickTime(const char *cSoundFilename)
{
    int err;
    Str255 pSoundFilename;      /* file pathname in Pascal-string format */
    FSSpec fsSoundFile;         /* movie file location descriptor */
    short resRefNum;            /* open movie file reference */

    if (!fQTInitialised) {
        pthread_mutex_init(&mutexQTAccess, NULL);
        ListCreate(&movielist);
        fQTInitialised = TRUE;
    }

    /* QuickTime is NOT reentrant in Mac OS (it is in MS Windows!) */
    pthread_mutex_lock(&mutexQTAccess);

    EnterMovies();              /* can be called multiple times */

    err = NativePathNameToFSSpec(cSoundFilename, &fsSoundFile, 0);
    if (err != 0) {
        outputf(_("PlaySound_QuickTime: error #%d, can't find %s.\n"), err, cSoundFilename);
    } else {
        /* open movie (WAV or whatever) file */
        err = OpenMovieFile(&fsSoundFile, &resRefNum, fsRdPerm);
        if (err != 0) {
            outputf(_("PlaySound_QuickTime: error #%d opening %s.\n"), err, cSoundFilename);
        } else {
            /* create movie from movie file */
            Movie *movie = (Movie *) malloc(sizeof(Movie));
            err = NewMovieFromFile(movie, resRefNum, NULL, NULL, 0, NULL);
            CloseMovieFile(resRefNum);
            if (err != 0) {
                outputf(_("PlaySound_QuickTime: error #%d reading %s.\n"), err, cSoundFilename);
            } else {
                /* reset movie timebase */
                TimeRecord t = { 0 };
                t.base = GetMovieTimeBase(*movie);
                SetMovieTime(*movie, &t);
                /* add movie to list of running movies */
                ListInsert(&movielist, movie);
                /* run movie */
                StartMovie(*movie);
            }
        }
    }

    pthread_mutex_unlock(&mutexQTAccess);

    if (!fQTPlaying) {
        /* launch playing thread if necessary */
        int err;
        pthread_t qtthread;
        fQTPlaying = TRUE;
        err = pthread_create(&qtthread, 0L, Thread_PlaySound_QuickTime, NULL);
        if (err == 0)
            pthread_detach(qtthread);
        else
            fQTPlaying = FALSE;
    }
}
Пример #7
0
boolean playeropenmovieinwindow ( ptrfilespec f ) {
	
	//
	// 2006-06-23 creedon: FSRef-zed
	//
	// 7.0b4 PBS: open a movie in the QuickTime Player window and display it.  If the window isn't already open, open it.
	//
	
	short movieresref;
	OSErr err;
	hdlwindowinfo hinfo;
	FSSpec fs;
	
	if (macgetfsspec (f, &fs) != noErr)
		return (false);
	
	if (!findplayerwindow (&hinfo))
		
		playeropencommand (); // If the Player window doesn't exist, create a new one.
		
	getwindowinfo (playerwindow, &playerwindowinfo);
		
	playerdisposecurrentmovie (); // make sure the current movie has been disposed
	
	SetGWorld (GetWindowPort (playerwindow), nil);
	
	err = OpenMovieFile ( &fs, &movieresref, fsRdPerm);
	
	if (err != noErr)
		
		return (false);
	
	NewMovieFromFile (&currentmovie, movieresref, nil, nil, newMovieActive, nil);
			
	CloseMovieFile (movieresref);
	
	SetMovieGWorld (currentmovie, GetWindowPort (playerwindow), nil);
	
	GetMovieBox (currentmovie, &currentmovierect);
	
	OffsetRect (&currentmovierect, -currentmovierect.left, -currentmovierect.top);
	
	SetMovieBox (currentmovie, &currentmovierect);		
		
	currentcontroller = NewMovieController (currentmovie, &currentmovierect, mcTopLeftMovie);
	
	MCGetControllerBoundsRect (currentcontroller, &currentmovierect);
	
	//MCSetActionFilterWithRefCon (currentcontroller, NewMCActionFilterWithRefConProc(playermoviecontrollereventfilter), (long) playerwindow);
	
	SizeWindow (playerwindow, currentmovierect.right, currentmovierect.bottom, false);
	
	(**playerwindowinfo).contentrect = currentmovierect;
	
	AlignWindow (playerwindow, false, nil, nil); // position for optimal playback
	
	//MCDoAction (currentcontroller, mcActionSetKeysEnabled, (void *) true); /*enable keyboard input*/ /*No!*/
	
	MoviesTask (nil, 0);
	
	MCIdle (currentcontroller);
		
	return (true);	
	} // playeropenmovieinwindow
Пример #8
0
void QTCmpr_CompressSequence (WindowObject theWindowObject)
{
	ComponentInstance			myComponent = NULL;
	GWorldPtr					myImageWorld = NULL;		// the graphics world we draw the images in
	PixMapHandle				myPixMap = NULL;
	Movie						mySrcMovie = NULL;
	Track						mySrcTrack = NULL;
	Movie						myDstMovie = NULL;
	Track						myDstTrack = NULL;
	Media						myDstMedia = NULL;
	Rect						myRect;
	PicHandle					myPicture = NULL;
	CGrafPtr					mySavedPort = NULL;
	GDHandle					mySavedDevice = NULL;
	SCTemporalSettings			myTimeSettings;
	SCDataRateSettings			myRateSettings;
	FSSpec						myFile;
	Boolean						myIsSelected = false;
	Boolean						myIsReplacing = false;	
	short						myRefNum = -1;
	StringPtr 					myMoviePrompt = QTUtils_ConvertCToPascalString(kQTCSaveMoviePrompt);
	StringPtr 					myMovieFileName = QTUtils_ConvertCToPascalString(kQTCSaveMovieFileName);
	MatrixRecord				myMatrix;
	ImageDescriptionHandle		myImageDesc = NULL;
	TimeValue					myCurMovieTime = 0L;
	TimeValue					myOrigMovieTime = 0L;		// current movie time, when compression is begun
	short						myFrameNum;		
	long						myFlags = 0L;
	long						myNumFrames = 0L;
	long						mySrcMovieDuration = 0L;	// duration of source movie
	OSErr						myErr = noErr;
#if USE_ASYNC_COMPRESSION
	ICMCompletionProcRecord		myICMComplProcRec;
	ICMCompletionProcRecordPtr	myICMComplProcPtr = NULL;
	OSErr						myICMComplProcErr = noErr;

	myICMComplProcRec.completionProc = NULL;
	myICMComplProcRec.completionRefCon = 0L;
#endif

	if (theWindowObject == NULL)
		goto bail;

	//////////
	//
	// get the movie and the first video track in the movie
	//
	//////////
	
	mySrcMovie = (**theWindowObject).fMovie;
	if (mySrcMovie == NULL)
		goto bail;

	mySrcTrack = GetMovieIndTrackType(mySrcMovie, 1, VideoMediaType, movieTrackMediaType);
	if (mySrcTrack == NULL)
		goto bail;
	
	// stop the movie; we don't want it to be playing while we're (re)compressing it
	SetMovieRate(mySrcMovie, (Fixed)0L);

	// get the current movie time, when compression is begun; we'll restore this later
	myOrigMovieTime = GetMovieTime(mySrcMovie, NULL);

	//////////
	//
	// configure and display the Standard Image Compression dialog box
	//
	//////////
	
	// open an instance of the Standard Image Compression dialog component
	myComponent = OpenDefaultComponent(StandardCompressionType, StandardCompressionSubType);
	if (myComponent == NULL)
		goto bail;

	// turn off "best depth" option in the compression dialog, because all of our
	// buffering is done at 32-bits (regardless of the depth of the source data)
	//
	// a more ambitious approach would be to loop through each of the video sample
	// descriptions in each of the video tracks looking for the deepest depth, and
	// using that for the best depth; better yet, we could find out which compressors
	// were used and set one of those as the default in the compression dialog
	SCGetInfo(myComponent, scPreferenceFlagsType, &myFlags);
	myFlags &= ~scShowBestDepth;
	SCSetInfo(myComponent, scPreferenceFlagsType, &myFlags);

	// because we are recompressing a movie that may have a variable frame rate,
	// we want to allow the user to leave the frame rate text field blank (in which
	// case we can preserve the frame durations of the source movie); if the user
	// enters a number, we will resample the movie at a new frame rate; if we don't
	// clear this flag, the compression dialog will not allow zero in the frame rate field
	//
	// NOTE: we could have set this flag above when we cleared the scShowBestDepth flag;
	// it is done here for clarity.	
	SCGetInfo(myComponent, scPreferenceFlagsType, &myFlags);
	myFlags |= scAllowZeroFrameRate;
	SCSetInfo(myComponent, scPreferenceFlagsType, &myFlags);

	// get the number of video frames in the movie
	myNumFrames = QTUtils_GetFrameCount(mySrcTrack);

	// get the bounding rectangle of the movie, create a 32-bit GWorld with those
	// dimensions, and draw the movie poster picture into it; this GWorld will be
	// used for the test image in the compression dialog box and for rendering movie
	// frames
	myPicture = GetMoviePosterPict(mySrcMovie);
	if (myPicture == NULL)
		goto bail;
		
	GetMovieBox(mySrcMovie, &myRect);

	myErr = NewGWorld(&myImageWorld, 32, &myRect, NULL, NULL, 0L);
	if (myErr != noErr)
		goto bail;
		
	// get the pixmap of the GWorld; we'll lock the pixmap, just to be safe
	myPixMap = GetGWorldPixMap(myImageWorld);
	if (!LockPixels(myPixMap))
		goto bail;

	// draw the movie poster image into the GWorld
	GetGWorld(&mySavedPort, &mySavedDevice);
	SetGWorld(myImageWorld, NULL);
	EraseRect(&myRect);
	DrawPicture(myPicture, &myRect);
	KillPicture(myPicture);
	SetGWorld(mySavedPort, mySavedDevice);

	// set the picture to be displayed in the dialog box; passing NULL for the rect
	// means use the entire image; passing 0 for the flags means to use the default
	// system method of displaying the test image, which is currently a combination
	// of cropping and scaling; personally, I prefer scaling (your mileage may vary)
	SCSetTestImagePixMap(myComponent, myPixMap, NULL, scPreferScaling);

	// install the custom procs, if requested
	// we can install two kinds of custom procedures for use in connection with
	// the standard dialog box: (1) a modal-dialog filter function, and (2) a hook
	// function to handle the custom button in the dialog box
	if (gUseExtendedProcs)
		QTCmpr_InstallExtendedProcs(myComponent, (long)myPixMap);
	
	// set up some default settings for the compression dialog
	SCDefaultPixMapSettings(myComponent, myPixMap, true);
	
	// clear out the default frame rate chosen by Standard Compression (a frame rate
	// of 0 means to use the rate of the source movie)
	myErr = SCGetInfo(myComponent, scTemporalSettingsType, &myTimeSettings);
	if (myErr != noErr)
		goto bail;

	myTimeSettings.frameRate = 0;
	SCSetInfo(myComponent, scTemporalSettingsType, &myTimeSettings);

	// request image compression settings from the user; in other words, put up the dialog box
	myErr = SCRequestSequenceSettings(myComponent);
	if (myErr == scUserCancelled)
		goto bail;

	// get a copy of the temporal settings the user entered; we'll need them for some
	// of our calculations (in a simpler application, we'd never have to look at them)	
	SCGetInfo(myComponent, scTemporalSettingsType, &myTimeSettings);

	//////////
	//
	// adjust the data rate [to be supplied][relevant only for movies that have sound tracks]
	//
	//////////

	
	//////////
	//
	// adjust the sample count
	//
	// if the user wants to resample the frame rate of the movie (as indicated a non-zero
	// value in the frame rate field) calculate the number of frames and duration for the new movie
	//
	//////////
	
	if (myTimeSettings.frameRate != 0) {
		long	myDuration = GetMovieDuration(mySrcMovie);
		long	myTimeScale = GetMovieTimeScale(mySrcMovie);
		float	myFloat = (float)myDuration * myTimeSettings.frameRate;
		
		myNumFrames = myFloat / myTimeScale / 65536;
		if (myNumFrames == 0)
			myNumFrames = 1;
	}

	//////////
	//
	// get the name and location of the new movie file
	//
	//////////

	// prompt the user for a file to put the compressed image into; in theory, the name
	// should have a file extension appropriate to the type of compressed data selected by the user;
	// this is left as an exercise for the reader
	QTFrame_PutFile(myMoviePrompt, myMovieFileName, &myFile, &myIsSelected, &myIsReplacing);
	if (!myIsSelected)
		goto bail;

	// delete any existing file of that name
	if (myIsReplacing) {
		myErr = DeleteMovieFile(&myFile);
		if (myErr != noErr)
			goto bail;
	}
		
	//////////
	//
	// create the target movie
	//
	//////////
	
	myErr = CreateMovieFile(&myFile, sigMoviePlayer, smSystemScript, 
								createMovieFileDeleteCurFile | createMovieFileDontCreateResFile, &myRefNum, &myDstMovie);
	if (myErr != noErr)
		goto bail;
	
	// create a new video movie track with the same dimensions as the entire source movie
	myDstTrack = NewMovieTrack(myDstMovie,
								(long)(myRect.right - myRect.left) << 16,
								(long)(myRect.bottom - myRect.top) << 16, kNoVolume);
	if (myDstTrack == NULL)
		goto bail;
	
	// create a media for the new track with the same time scale as the source movie;
	// because the time scales are the same, we don't have to do any time scale conversions.
	myDstMedia = NewTrackMedia(myDstTrack, VIDEO_TYPE, GetMovieTimeScale(mySrcMovie), 0, 0);
	if (myDstMedia == NULL)
		goto bail;
	
	// copy the user data and settings from the source to the dest movie
	CopyMovieSettings(mySrcMovie, myDstMovie);
	
	// set movie matrix to identity and clear the movie clip region (because the conversion
	// process transforms and composites all video tracks into one untransformed video track)
	SetIdentityMatrix(&myMatrix);
	SetMovieMatrix(myDstMovie, &myMatrix);
	SetMovieClipRgn(myDstMovie, NULL);
	
	// set the movie to highest quality imaging
	SetMoviePlayHints(mySrcMovie, hintsHighQuality, hintsHighQuality);

	myImageDesc = (ImageDescriptionHandle)NewHandleClear(sizeof(ImageDescription));
	if (myImageDesc == NULL)
		goto bail;

	// prepare for adding frames to the movie
	myErr = BeginMediaEdits(myDstMedia);
	if (myErr != noErr)
		goto bail;

	//////////
	//
	// compress the image sequence
	//
	// we are going to step through the source movie, compress each frame, and then add
	// the compressed frame to the destination movie
	//
	//////////
	
	myErr = SCCompressSequenceBegin(myComponent, myPixMap, NULL, &myImageDesc);
	if (myErr != noErr)
		goto bail;
	
#if USE_ASYNC_COMPRESSION
	myFlags = codecFlagUpdatePrevious + codecFlagUpdatePreviousComp + codecFlagLiveGrab;
	SCSetInfo(myComponent, scCodecFlagsType, &myFlags);
#endif

	// clear out our image GWorld and set movie to draw into it
	SetGWorld(myImageWorld, NULL);
	EraseRect(&myRect);
	SetMovieGWorld(mySrcMovie, myImageWorld, GetGWorldDevice(myImageWorld));

	// set current time value to beginning of the source movie
	myCurMovieTime = 0;

	// get a value we'll need inside the loop
	mySrcMovieDuration = GetMovieDuration(mySrcMovie);

	// loop through all of the interesting times we counted above
	for (myFrameNum = 0; myFrameNum < myNumFrames; myFrameNum++) {
		short			mySyncFlag;
		TimeValue		myDuration;
		long			myDataSize;
		Handle			myCompressedData;

		//////////
		//
		// get the next frame of the source movie
		//
		//////////
		
		// if we are resampling the movie, step to the next frame
		if (myTimeSettings.frameRate) {
			myCurMovieTime = myFrameNum * mySrcMovieDuration / (myNumFrames - 1);
			myDuration = mySrcMovieDuration / myNumFrames;
		} else {
			OSType		myMediaType = VIDEO_TYPE;
			
			myFlags = nextTimeMediaSample;

			// if this is the first frame, include the frame we are currently on		
			if (myFrameNum == 0)
				myFlags |= nextTimeEdgeOK;
			
			// if we are maintaining the frame durations of the source movie,
			// skip to the next interesting time and get the duration for that frame
			GetMovieNextInterestingTime(mySrcMovie, myFlags, 1, &myMediaType, myCurMovieTime, 0, &myCurMovieTime, &myDuration);
		}
		
		SetMovieTimeValue(mySrcMovie, myCurMovieTime);
		MoviesTask(mySrcMovie, 0);
		MoviesTask(mySrcMovie, 0);
		MoviesTask(mySrcMovie, 0);

		// if data rate constraining is being done, tell Standard Compression the
		// duration of the current frame in milliseconds; we only need to do this
		// if the frames have variable durations
		if (!SCGetInfo(myComponent, scDataRateSettingsType, &myRateSettings)) {
			myRateSettings.frameDuration = myDuration * 1000 / GetMovieTimeScale(mySrcMovie);
			SCSetInfo(myComponent, scDataRateSettingsType, &myRateSettings);
		}

		//////////
		//
		// compress the current frame of the source movie and add it to the destination movie
		//
		//////////
		
		// if SCCompressSequenceFrame completes successfully, myCompressedData will hold
		// a handle to the newly-compressed image data and myDataSize will be the size of
		// the compressed data (which will usually be different from the size of the handle);
		// also mySyncFlag will be a value that that indicates whether or not the frame is a
		// key frame (and which we pass directly to AddMediaSample); note that we do not need
		// to dispose of myCompressedData, since SCCompressSequenceEnd will do that for us
#if !USE_ASYNC_COMPRESSION
		myErr = SCCompressSequenceFrame(myComponent, myPixMap, &myRect, &myCompressedData, &myDataSize, &mySyncFlag);
		if (myErr != noErr)
			goto bail;
#else
		if (myICMComplProcPtr == NULL) {
			myICMComplProcRec.completionProc = NewICMCompletionProc(QTCmpr_CompletionProc);
			myICMComplProcRec.completionRefCon = (long)&myICMComplProcErr;
			myICMComplProcPtr = &myICMComplProcRec;
		}
		
		myICMComplProcErr = kAsyncDefaultValue;
		
		myErr = SCCompressSequenceFrameAsync(myComponent, myPixMap, &myRect, &myCompressedData, &myDataSize, &mySyncFlag, myICMComplProcPtr);
		if (myErr != noErr)
			goto bail;

		// spin our wheels while we're waiting for the compress call to complete
		while (myICMComplProcErr == kAsyncDefaultValue) {
			EventRecord			myEvent;
			
			WaitNextEvent(0, &myEvent, 60, NULL);
			SCAsyncIdle(myComponent);
		}
		myErr = myICMComplProcErr;
#endif

		myErr = AddMediaSample(myDstMedia, myCompressedData, 0, myDataSize, myDuration, (SampleDescriptionHandle)myImageDesc, 1, mySyncFlag, NULL);
		if (myErr != noErr)
			goto bail;
	}
	
	// close the compression sequence; this will dispose of the image description
	// and compressed data handles allocated by SCCompressSequenceBegin
	SCCompressSequenceEnd(myComponent);

	//////////
	//
	// add the media data to the destination movie
	//
	//////////
	
	myErr = EndMediaEdits(myDstMedia);
	if (myErr != noErr)
		goto bail;
	
	InsertMediaIntoTrack(myDstTrack, 0, 0, GetMediaDuration(myDstMedia), fixed1);

	// add the movie resource to the dst movie file.
	myErr = AddMovieResource(myDstMovie, myRefNum, NULL, NULL);
	if (myErr != noErr)
		goto bail;

	// flatten the movie data [to be supplied]
	
	// close the movie file
	CloseMovieFile(myRefNum);
	
bail:
	// close the Standard Compression component
	if (myComponent != NULL)
		CloseComponent(myComponent);

	if (mySrcMovie != NULL) {
		// restore the source movie's original graphics port and device
		SetMovieGWorld(mySrcMovie, mySavedPort, mySavedDevice);

		// restore the source movie's original movie time
		SetMovieTimeValue(mySrcMovie, myOrigMovieTime);
	}
	
	// restore the original graphics port and device
	SetGWorld(mySavedPort, mySavedDevice);

	// delete the GWorld we were drawing frames into
	if (myImageWorld != NULL)
		DisposeGWorld(myImageWorld);
	
#if USE_ASYNC_COMPRESSION
	if (myICMComplProcRec.completionProc != NULL)
		DisposeICMCompletionUPP(myICMComplProcRec.completionProc);
#endif

	free(myMoviePrompt);
	free(myMovieFileName);
}