示例#1
0
int main(int argc, const char * argv[])
{
	setbuf (stdout, NULL);


#if TARGET_OS_MAC
	{
		thread_extended_policy_data_t		theFixedPolicy;
		theFixedPolicy.timeshare = false;	// set to true for a non-fixed thread
		thread_policy_set(pthread_mach_thread_np(pthread_self()), 
													THREAD_EXTENDED_POLICY, 
													(thread_policy_t)&theFixedPolicy, 
													THREAD_EXTENDED_POLICY_COUNT);

		// We keep a reference to the spawning thread's priority around (initialized in the constructor), 
		// and set the importance of the child thread relative to the spawning thread's priority.
		thread_precedence_policy_data_t		thePrecedencePolicy;
		
		thePrecedencePolicy.importance = 63 - 36;
		thread_policy_set(pthread_mach_thread_np(pthread_self()), 
													THREAD_PRECEDENCE_POLICY, 
													(thread_policy_t)&thePrecedencePolicy, 
													THREAD_PRECEDENCE_POLICY_COUNT);
	}
#endif


// These are the variables that are set up from the input parsing
	char* srcFilePath = NULL;
	char* destFilePath = NULL;
	char* auPresetFile = NULL;
	bool shortMemoryProfile = false;
	OSType manu, subType, type = 0;
	int userSetFrames = -1;
	
	for (int i = 1; i < argc; ++i)
	{
		if (strcmp (argv[i], "-au") == 0) {
            if ( (i + 3) < argc ) {                
                StrToOSType (argv[i + 1], type);
                StrToOSType (argv[i + 2], subType);
                StrToOSType (argv[i + 3], manu);
				i += 3;
			} else {
				printf ("Which Audio Unit:\n%s", usageStr);
				exit(1);
			}
		}
		else if (strcmp (argv[i], "-i") == 0) {
			srcFilePath = const_cast<char*>(argv[++i]);
		}
		else if (strcmp (argv[i], "-o") == 0) {
			destFilePath = const_cast<char*>(argv[++i]);
		}
		else if (strcmp (argv[i], "-p") == 0) {
			auPresetFile = const_cast<char*>(argv[++i]);
		}
		else if (strcmp (argv[i], "-m") == 0) {
			shortMemoryProfile = true;
		}
		else if (strcmp (argv[i], "-f") == 0) {
			sscanf(argv[++i], "%d", &userSetFrames);
		}
		else {
			printf ("%s\n", usageStr);
			exit(1);
		}
	}
	
	if (!type || !srcFilePath) {
		printf ("%s\n", usageStr);
		exit(1);
	}
	if (!destFilePath) {
		if (!shortMemoryProfile) {
			printf ("%s\n", usageStr);
			exit(1);
		}
	}
			// delete pre-existing output file
	if (!shortMemoryProfile) {
		FSRef destFSRef;
		if (FSPathMakeRef((UInt8 *)destFilePath, &destFSRef, NULL) == noErr) {
			// output file exists - delete it
			if (FSDeleteObject(&destFSRef)) {
				printf ("Cannot Delete Output File\n");
				exit(1);
			}
		}
	}
	
	CAComponentDescription desc(type, subType, manu);
	
	CFPropertyListRef presetDict = ReadPresetFromPresetFile(auPresetFile);
	
		// the num of frames to use when processing the file with the Render call
	UInt32 maxFramesToUse = shortMemoryProfile ? 512 : 32768;

		// not set from command line
	if (userSetFrames > 0) {
		maxFramesToUse = userSetFrames; 
	}
		
		// in some settings (for instance a delay with 100% feedback) tail time is essentially infinite
		// so you should safeguard the final OL render stage (post process) which is aimed at pulling the tail through
		// if you want to bypass this completely, just set this to zero.
	Float64 maxTailTimeSecs = 10.;
	
#pragma mark -
#pragma mark __ The driving code
#pragma mark -

	try 
	{
		CAComponent comp(desc);
			
			 // CAAUProcessor's constructor throws... so make sure the component is valid
		if (comp.IsValid() == false) {
			printf ("Can't Find Component\n");
			desc.Print();
			exit(1);
		}
			
		CAAUProcessor processor(comp);
													processor.AU().Print();
		
		CAAudioFile srcFile;
		CAAudioFile destFile; 
		
		srcFile.Open(srcFilePath);

		CAStreamBasicDescription procFormat (srcFile.GetFileDataFormat());
		procFormat.SetCanonical (srcFile.GetFileDataFormat().NumberChannels(), false);

													printf ("Processing Format:\n\t");
													procFormat.Print();
		
		
		if (!shortMemoryProfile) {
			FSRef parentDir;
			CFStringRef filename;
			PosixPathToParentFSRefAndName(destFilePath, parentDir, filename);
			destFile.CreateNew (parentDir, filename, 'AIFF', srcFile.GetFileDataFormat());
			destFile.SetClientFormat (procFormat);
		}
	
		srcFile.SetClientFormat (procFormat);
		
		AUOutputBL outputList(procFormat);

		ReadBuffer* readBuf = NULL;	

#if !CAAF_USE_EXTAUDIOFILE
		UInt64 numInputSamples = srcFile.GetNumberPackets();
#else
		UInt64 numInputSamples = srcFile.GetNumberFrames();
#endif
	
		if (shortMemoryProfile) {
			readBuf = new ReadBuffer;
			readBuf->readData = new AUOutputBL(procFormat);
			readBuf->readFrames = 0;
			UInt32 numFrames = UInt32(procFormat.mSampleRate / 2);
			readBuf->readData->Allocate (numFrames); // half a second of audio data
			readBuf->readData->Prepare(); // half a second of audio data
				
				// read 1/2 second of audio into this read buffer
			srcFile.Read (numFrames, readBuf->readData->ABL());
			
			sInputCallback.inputProc = MemoryInputCallback;
			sInputCallback.inputProcRefCon = readBuf;
			numInputSamples = numFrames;
		}
		else {
			if (desc.IsFConv()) {
				maxFramesToUse = userSetFrames == -1 ? 512 : maxFramesToUse; 
				// some format converter's can call you several times in small granularities
				// so you can't use a large buffer to render or you won't return all of the input data
				// this also lessens the final difference between what you should get and what you do
				// converter units *really* should have a version that are offline AU's to 
				// handle this for you.
				sInputCallback.inputProc = FConvInputCallback;
			} else
				sInputCallback.inputProc = InputCallback;
			
			sInputCallback.inputProcRefCon = &srcFile;
		}
				
		OSStatus result;
		require_noerr (result = processor.EstablishInputCallback (sInputCallback), home);
		require_noerr (result = processor.SetMaxFramesPerRender (maxFramesToUse), home); 
		processor.SetMaxTailTime (maxTailTimeSecs);
		require_noerr (result = processor.Initialize (procFormat, numInputSamples), home);
		if (presetDict) {
			require_noerr (result = processor.SetAUPreset (presetDict), home);
			CFRelease (presetDict);
		}
			// this does ALL of the preflighting.. could be specialise for an OfflineAU type
			// to do this piecemeal and do a progress bar by using the OfflineAUPreflight method
		require_noerr (result = processor.Preflight (), home);
		
		bool isDone; isDone = false;
		bool needsPostProcessing;
		bool isSilence;
		UInt32 numFrames; numFrames = processor.MaxFramesPerRender();

#if CA_AU_PROFILE_TIME
		sReadTime = 0;
		sRenderTime = 0;
#endif
					
PRINT_MARKS();
			// this is the render loop
		while (!isDone) 
		{
											#if CA_AU_PROFILE_TIME 
												UInt64 now = CAHostTimeBase::GetTheCurrentTime(); 
											#endif
			outputList.Prepare(); // have to do this every time...
			require_noerr (result = processor.Render (outputList.ABL(), numFrames, isSilence, &isDone,
											&needsPostProcessing), home);
											#if CA_AU_PROFILE_TIME 
												sRenderTime += (CAHostTimeBase::GetTheCurrentTime() - now);
											#endif

if (!shortMemoryProfile)
	PRINT_PROGRESS(processor.GetOLPercentComplete());
else
	PRINT_PROGRESS(((processor.SampleTime() / numInputSamples) * 100.));
	
			if (numFrames && !shortMemoryProfile)
				destFile.Write (numFrames, outputList.ABL());
		}
			
			// this is the postprocessing if needed
		if (!shortMemoryProfile && needsPostProcessing) 
		{
			isDone = false;
			numFrames = processor.MaxFramesPerRender();
			while (!isDone) {
				outputList.Prepare(); // have to do this every time...
											#if CA_AU_PROFILE_TIME 
												UInt64 now = CAHostTimeBase::GetTheCurrentTime(); 
											#endif
				require_noerr (result = processor.PostProcess (outputList.ABL(), numFrames, 
													isSilence, isDone), home);
											#if CA_AU_PROFILE_TIME 
												sRenderTime += (CAHostTimeBase::GetTheCurrentTime() - now); 
											#endif

PRINT_PROGRESS(processor.GetOLPercentComplete());

				if (numFrames && !shortMemoryProfile)
					destFile.Write (numFrames, outputList.ABL());
			}
		}

printf ("\n");

home:
		if (result) {
			printf ("Exit with bad result:%ld\n", result);
			exit(result);
		}
		
		if (readBuf) {
			delete readBuf->readData;
			delete readBuf;
		}
					
#if CA_AU_PROFILE_TIME
	if (!shortMemoryProfile) {
			// this flushes any remaing data to be written to the disk. 
			// the source file is closed in its destructor of course
		destFile.Close(); 
			// open the file again, to get stats about it for profiling
		destFile.Open(destFilePath);
	}

	SInt64 numWritten;
	if (shortMemoryProfile)
		numWritten = 0;
	else {
#if !CAAF_USE_EXTAUDIOFILE
		numWritten = destFile.GetNumberPackets();
#else
		numWritten = destFile.GetNumberFrames();
#endif
	}

	printf ("Read File Time:%.2f secs for %lld packets (%.1f secs), wrote %lld packets\n", 
						(CAHostTimeBase::ConvertToNanos (sReadTime) / 1.0e9),
						numInputSamples,
						(numInputSamples / procFormat.mSampleRate),
						numWritten);

	if (!shortMemoryProfile) 
	{
#if !CAAF_USE_EXTAUDIOFILE
		UInt64 numOutputSamples = destFile.GetNumberPackets();
#else
		UInt64 numOutputSamples = destFile.GetNumberFrames();
#endif
	
		if (numOutputSamples == numInputSamples) {
			printf ("\tWrote the same number of packets as read\n");
		} else {
			bool expectationMet = !desc.IsOffline(); // we don't have any expectations for offline AU's
			if (processor.LatencySampleCount() || processor.TailSampleCount()) {
				if (numOutputSamples - numInputSamples == processor.TailSampleCount())
					expectationMet = true;
				if (expectationMet)	
					printf ("Correctly wrote \'Read Size + Tail\'. ");
				printf ("AU reports (samples): %ld latency, %ld tail\n", 
										processor.LatencySampleCount(), processor.TailSampleCount());
			}
			if (expectationMet == false) 
			{
				if (numOutputSamples > numInputSamples) {
					printf ("\tWrote %lld packets (%.2f secs) more than read\n", 
								(numOutputSamples - numInputSamples), 
								((numOutputSamples - numInputSamples) / procFormat.mSampleRate));
				} else {
					printf ("\tRead %lld packets (%.2f secs) more than wrote\n", 
								(numInputSamples - numOutputSamples), 
								((numInputSamples - numOutputSamples) / procFormat.mSampleRate));
				}
			}
		}
	}
	
	Float64 renderTimeSecs = CAHostTimeBase::ConvertToNanos (sRenderTime - sReadTime) / 1.0e9;
	printf ("Total Render Time:%.2f secs, using render slice size of %ld frames\n", 
							renderTimeSecs, maxFramesToUse);
	
	Float64 cpuUsage;
	if (shortMemoryProfile)
		cpuUsage = (renderTimeSecs / 0.5) * 100.;
	else
		cpuUsage = (renderTimeSecs / (numInputSamples / procFormat.mSampleRate)) * 100.;
	printf ("CPU Usage for Render Time:%.2f%%\n", cpuUsage);

	CFStringRef str = comp.GetCompName();
	UInt32 compNameLen = CFStringGetLength (str);
	
	CFStringRef presetName = NULL;
	if (auPresetFile) {
		CFPropertyListRef dict;
		if (processor.AU().GetAUPreset (dict) == noErr) {
			presetName = (CFStringRef)CFDictionaryGetValue((CFDictionaryRef)dict, CFSTR("name"));
			CFRelease (dict);
		}
	}

	UInt32 presetLen = presetName ? CFStringGetLength(presetName) : 0;

	char* cstr = (char*)malloc (compNameLen + presetLen + 2 + 1);
	CFStringGetCString (str, cstr, (CFStringGetLength (str) + 1), kCFStringEncodingASCII);
	if (presetName) {
		cstr[compNameLen] = ':';
		cstr[compNameLen+1] = ':';
		CFStringGetCString (presetName, cstr + compNameLen + 2, (CFStringGetLength (presetName) + 1), kCFStringEncodingASCII);
	}
	PerfResult("AudioUnitProcess", EndianU32_NtoB(comp.Desc().componentSubType), cstr, cpuUsage, "%realtime");
	free (cstr);
#endif


	}
	catch (CAXException &e) {
		char buf[256];
		printf("Error: %s (%s)\n", e.mOperation, e.FormatError(buf));
		exit(1);
	}
	catch (...) {
		printf("An unknown error occurred\n");
		exit(1);
	}
			
	return 0;
}
示例#2
0
int qtaacdecode(const char *infileName, const char *outfileName)
{
	QTAACFile infile;
	CAAudioFile outfile;
	CABufferList *readBuffer = NULL, *readPtrs = NULL;
	bool createdOutputFile = false;
	
	try {
		EnterMovies();
		infile.Open(infileName);
		
		// for now we'll always convert to 16-bit stereo 44100 AIFF
		CAStreamBasicDescription dataFormat;
		dataFormat.mSampleRate = 44100.;
		dataFormat.mFormatID = kAudioFormatLinearPCM;
		dataFormat.mFormatFlags = kAudioFormatFlagIsBigEndian | kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
		dataFormat.mFramesPerPacket = 1;
		dataFormat.mChannelsPerFrame = 2;
		dataFormat.mBitsPerChannel = 16;
		dataFormat.mBytesPerPacket = dataFormat.mBytesPerFrame = 4;

		infile.SetClientFormat(dataFormat);
		outfile.PrepareNew(dataFormat);
		
		// create the output file
		outfile.Create(outfileName, kAudioFileAIFFType);
		createdOutputFile = true;
		
		// prepare I/O buffers
		const UInt32 bytesToRead = 0x8000;
		const UInt32 packetsToRead = bytesToRead;	// OK, ReadPackets will limit as appropriate
		
		readBuffer = CABufferList::New("readbuf", dataFormat);
		readBuffer->AllocateBuffers(bytesToRead);
		readPtrs = CABufferList::New("readptrs", dataFormat);
		
		while (true) {
			UInt32 nPackets = packetsToRead;
			readPtrs->SetFrom(readBuffer);
			AudioBufferList *readbuf = &readPtrs->GetModifiableBufferList();
			
			infile.ReadPackets(nPackets, readbuf);
			if (nPackets == 0)
				break;

			outfile.WritePackets(nPackets, readbuf);
		}
	}
	catch (CAXException &e) {
		char buf[256];
		fprintf(stderr, "Error: %s (%s)\n", e.mOperation, CAXException::FormatError(buf, e.mError));
		delete readBuffer;
		delete readPtrs;
		infile.Close();
		if (createdOutputFile)
			outfile.Delete();
		return 1;
	}
	catch (...) {
		fprintf(stderr, "An unknown error occurred\n");
		delete readBuffer;
		delete readPtrs;
		infile.Close();
		if (createdOutputFile)
			outfile.Delete();
		return 1;
	}
	infile.Close();
	outfile.Close();
	return 0;
}