コード例 #1
0
ファイル: TValuePictControl.cpp プロジェクト: osoumen/SFCEcho
// -----------------------------------------------------------------------------
//	StillTracking
// -----------------------------------------------------------------------------
//	Called during tracking.
//
OSStatus TValuePictControl::StillTracking(TCarbonEvent&inEvent, HIPoint& from)
{
    HIPoint mouse;
    float deltaX, deltaY;
    SInt32 mini, maxi;

    mini = GetMinimum();
    maxi = GetMaximum();

    inEvent.GetParameter<HIPoint>( kEventParamWindowMouseLocation, typeHIPoint, &mouse );
    ConvertToLocal(mouse);

    SInt32 curVal = GetValue();
    deltaX = mouse.x-from.x;
    deltaY = mouse.y-from.y;

    SInt32 val = SInt32(rint(curVal + (deltaX - deltaY) * (maxi - mini) / 300.));
    if ( val > maxi) val = maxi;
    if ( val < mini) val = mini;
    if (val != curVal) {
		SetValue ( val );
		from = mouse;
    }
    return noErr;
}
コード例 #2
0
ファイル: CAAUProcessor.cpp プロジェクト: abscura/audiounitjs
OSStatus	CAAUProcessor::PostProcess (AudioBufferList 	*ioData, 
										UInt32 				&ioNumFrames, 
										bool				&outIsSilence,
										bool 				&outDone)
{
	if (IsOfflineAU() || !IsOfflineContext()) 
		return kAudioUnitErr_CannotDoInCurrentContext;
	
	outDone = false;
	
		// we've got less samples to process than we've been asked to process
	if (mTailSamplesRemaining <= SInt32(ioNumFrames)) {
		outDone = true;
		ioNumFrames = mTailSamplesRemaining > 0 ? mTailSamplesRemaining : 0;
		SetBufferListToNumFrames (*ioData, ioNumFrames);
		if (ioNumFrames == 0)
			return noErr;
	}
	
	AudioUnitRenderActionFlags renderFlags = 0;
	OSStatus result;
	ca_require_noerr (result = mUnit.Render (&renderFlags, &mRenderTimeStamp, 0, ioNumFrames, ioData), home);
	mRenderTimeStamp.mSampleTime += ioNumFrames;
	mTailSamplesRemaining -= ioNumFrames;
	outIsSilence = (renderFlags & kAudioUnitRenderAction_OutputIsSilence);
			
	if (outDone) {
		ca_require_noerr (result = SetInputCallback (mUnit, mUserCallback), home);
		mUnit.GlobalReset (); //flush this out, as we're done with this phase
	}
home:
	return result;
}		
コード例 #3
0
ファイル: XControl.cpp プロジェクト: fruitsamples/XFramework
XControl::XControl(XWindow *window, OSType signature, UInt32 id)
    :mListener(0)
{
    ControlID cid = { signature, id };
    RequireNoErrString(GetControlByID(window->MacWindow(), &cid, &mControl), "GetControlByID failed");
    SetControlReference(mControl, SInt32(this));
}
コード例 #4
0
ファイル: sint32codec.cpp プロジェクト: KDE/okteta
QVariant SInt32Codec::value( const PODData& data, int* byteCount ) const
{
    const qint32* pointer = (qint32*)data.pointer( 4 );

    *byteCount = pointer ? 4 : 0;
    return pointer ? QVariant::fromValue<SInt32>( SInt32(*pointer) ) : QVariant();
}
コード例 #5
0
ファイル: AUCarbonViewControl.cpp プロジェクト: kdridi/acau
void	AUCarbonViewControl::SetValueFract(double value)
{
#if !__LP64__
	SInt32 minimum = GetControl32BitMinimum(mControl);
	SInt32 maximum = GetControl32BitMaximum(mControl);
	SInt32 cval = SInt32(value * (maximum - minimum) + minimum + 0.5);
	SetControl32BitValue(mControl, cval);
//	printf("set: value=%lf, min=%ld, max=%ld, ctl value=%ld\n", value, minimum, maximum, cval);
#endif
}
コード例 #6
0
void	AudioThruEngine::ComputeThruOffset()
{
	if (!mRunning) {
		mActualThruLatency = 0;
		mInToOutSampleOffset = 0;
		return;
	}
//	AudioTimeStamp inputTime, outputTime;
//	verify_noerr (AudioDeviceGetCurrentTime(mInputDevice.mID, &inputTime));
//	verify_noerr (AudioDeviceGetCurrentTime(mOutputDevice.mID, &outputTime));
	
//	printf(" in host: %20.0f  samples: %20.f  safety: %7ld  buffer: %4ld\n", Float64(inputTime.mHostTime), inputTime.mSampleTime,
//		mInputDevice.mSafetyOffset, mInputDevice.mBufferSizeFrames);
//	printf("out host: %20.0f  samples: %20.f  safety: %7ld  buffer: %4ld\n", Float64(outputTime.mHostTime), outputTime.mSampleTime,
//		mOutputDevice.mSafetyOffset, mOutputDevice.mBufferSizeFrames);
	mActualThruLatency = SInt32(mInputDevice.mSafetyOffset + /*2 * */ mInputDevice.mBufferSizeFrames +
						mOutputDevice.mSafetyOffset + mOutputDevice.mBufferSizeFrames) + mExtraLatencyFrames;
	mInToOutSampleOffset = mActualThruLatency + mIODeltaSampleCount;
//	printf("thru latency: %.0f frames, inToOutOffset: %0.f frames\n", latency, mInToOutSampleOffset);
}
コード例 #7
0
void SensorFactoryCalibrationImpl::Pack()
{
    SInt32 x, y, z;

    Buffer[0] = 3;

    x = SInt32(AccelOffset.x * 1e4f);
    y = SInt32(AccelOffset.y * 1e4f);
    z = SInt32(AccelOffset.z * 1e4f);
    PackSensor(Buffer + 3, x, y, z);

    x = SInt32(GyroOffset.x * 1e4f);
    y = SInt32(GyroOffset.y * 1e4f);
    z = SInt32(GyroOffset.z * 1e4f);
    PackSensor(Buffer + 11, x, y, z);

    // ignore the scale matrices for now
}
コード例 #8
0
ファイル: CAPlayThrough.cpp プロジェクト: aranm/CAPlayThrough
void	CAPlayThrough::ComputeThruOffset()
{
	//The initial latency will at least be the saftey offset's of the devices + the buffer sizes
	mInToOutSampleOffset = SInt32(mInputDevice.mSafetyOffset +  mInputDevice.mBufferSizeFrames +
						mOutputDevice.mSafetyOffset + mOutputDevice.mBufferSizeFrames);
}
コード例 #9
0
pid_t	HP_HogMode::GetOwnerFromPreference(bool inSendNotifications) const
{
    pid_t theAnswer = -1;

#if HogMode_UseCFPrefs
    //	get the preference
    CFNumberRef theCFNumber = CACFPreferences::CopyNumberValue(mPrefName, false, true);
    if(theCFNumber != NULL)
    {
        //	get the number
        pid_t theOwner = -1;
        CFNumberGetValue(theCFNumber, kCFNumberSInt32Type, &theOwner);

        //	make sure the process exists
        if(theOwner == -1)
        {
            //	hog mode is free
            theAnswer = -1;
        }
        else if(CAProcess::ProcessExists(theOwner))
        {
            //	it does, so set the return value
            theAnswer = theOwner;
        }
        else
        {
            //	it doesn't, so delete the pref
            SetOwnerInPreference((pid_t)-1);

            if(inSendNotifications)
            {
                //	signal that hog mode changed
                SendHogModeChangedNotification();
            }
        }
        CFRelease(theCFNumber);
    }
#else
    //	get the owner from the preference
    SInt32 theOwner = -1;
    sSettingsStorage->CopySInt32Value(mPrefName, theOwner, SInt32(-1));

    //	make sure the process exists
    if(theOwner == -1)
    {
        //	hog mode is free
        theAnswer = -1;
    }
    else if(CAProcess::ProcessExists(theOwner))
    {
        //	the process that owns hog mode exists
        theAnswer = theOwner;
    }
    else
    {
        //	the process that owns hog mode doesn't exist, so delete the pref
        theAnswer = -1;
        SetOwnerInPreference((pid_t)-1);

        if(inSendNotifications)
        {
            //	signal that hog mode changed
            SendHogModeChangedNotification();
        }
    }
#endif

    return theAnswer;
}
コード例 #10
0
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
//	AUPulseDetector::AUPulseDetectorKernel::Process
//
//		pass-through audio
//		do spike detection
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
void AUPulseDetector::AUPulseDetectorKernel::Process(const Float32 	*inSourceP,
                                                    Float32		 	*inDestP,
                                                    UInt32 			inFramesToProcess,
                                                    UInt32			inNumChannels,
                                                    bool			&ioSilence )
{
	if (GetParameter (kDoPulseDetection) == 0) {
		memset (inDestP, 0, (inFramesToProcess * sizeof(Float32)));
		return;
	}
	
	switch (mWhichMode)
	{
		case kDetectMode:
		{
			Float64 now = SampleTime();
			Float64 sampleRate = GetSampleRate();

			if ((now - mPulseStartTime) > (sampleRate * GetParameter (kPulseRestTime))) {
				mDoneClean = 0;
				mWhichMode = kCleanMode;
				mWasSuccessful = false;
				mParentObject->PropertyChanged (kAUPulseMetricsPropertyID, kAudioUnitScope_Global, 0);
				break;
			}

			float pulseThreshold = GetParameter (kPulseThreshold);
			
			for (unsigned int i = 0; i < inFramesToProcess; ++i) 
			{
				Float32 inputSample = inSourceP[i];
				
				if(fabs(inputSample) >= pulseThreshold) {
					mLastMeasurement = UInt32(now + i - mPulseStartTime);
					
					mTotalMeasurements += mLastMeasurement;
					mTotalMeasurementsSquared += pow (mLastMeasurement, 2);
					mNumMeasurements++;
					
					if (mLastMeasurement > mMaxTime)
						mMaxTime = mLastMeasurement;
					if (mLastMeasurement < mMinTime)
						mMinTime = mLastMeasurement;
						
					mDoneClean = 0;
					mWhichMode = kCleanMode;
					
					mLastFrames = inFramesToProcess;
					mWasSuccessful = true;
					mParentObject->DetectedPulse (mPulseStartTime, mLastMeasurement);
					break;
				}				
			}
			memset (inDestP, 0, (inFramesToProcess * sizeof(Float32)));
		}
		break;

		case kCleanMode:
		{
			if (mDoneClean == 0) {
				float secs = GetParameter (kPulseRestTime);
				mDoneClean = SInt32(secs * GetSampleRate());
			}
			
			memset (inDestP, 0, (inFramesToProcess * sizeof(Float32)));
			ioSilence = true;
			mDoneClean -= inFramesToProcess;
			if (mDoneClean <= 0)
				mWhichMode = kEstablishMode;
		}
		break;
		
		case kEstablishMode:
		{
			memset (inDestP, 0, (inFramesToProcess * sizeof(Float32)));
			unsigned int pulseLength = (unsigned int)GetParameter (kPulseLength);
			if (pulseLength > inFramesToProcess)
				pulseLength = inFramesToProcess;

			for (unsigned int i = (inFramesToProcess - pulseLength); i < inFramesToProcess; ++i)
				inDestP[i] = 1.0;
			
			ioSilence = false;
			mWhichMode = kDetectMode;
			mPulseStartTime = SampleTime() + inFramesToProcess - pulseLength;
		}
		break;
	}
}
コード例 #11
0
ファイル: AudioClip.cpp プロジェクト: andyvand/VoodooHDA_AnV
IOReturn VoodooHDAEngine::convertInputSamples(const void *sampleBuf, void *destBuf,
		UInt32 firstSampleFrame, UInt32 numSampleFrames, const IOAudioStreamFormat *streamFormat,
		__unused IOAudioStream *audioStream)
{
	UInt32	numSamplesLeft, numSamples;
	Float32 	*floatDestBuf;
	
//    floatDestBuf = (float *)destBuf;
	UInt32 firstSample = firstSampleFrame * streamFormat->fNumChannels;
	numSamples = numSamplesLeft = numSampleFrames * streamFormat->fNumChannels;
	long int noiseMask = ~((1 << noiseLevel) - 1);
	
	UInt8 *sourceBuf = (UInt8 *) sampleBuf; 

	// figure out what sort of blit we need to do
	if ((streamFormat->fSampleFormat == kIOAudioStreamSampleFormatLinearPCM) && streamFormat->fIsMixable) {
		// it's linear PCM, which means the target is Float32 and we will be calling a blitter, which
		// works in samples not frames
		floatDestBuf = (Float32 *) destBuf;

		if (streamFormat->fNumericRepresentation == kIOAudioStreamNumericRepresentationSignedInt) {
			// it's some kind of signed integer, which we handle as some kind of even byte length
			bool nativeEndianInts;
			nativeEndianInts = (streamFormat->fByteOrder == kIOAudioStreamByteOrderLittleEndian);

			switch (streamFormat->fBitWidth) {
				case 8:
					SInt8 *inputBuf8;
					
					inputBuf8 = &(((SInt8 *)sampleBuf)[firstSample]);
#if defined(__ppc__)
					Int8ToFloat32(inputBuf8, floatDestBuf, numSamplesLeft);
#elif defined(__i386__) || defined(__x86_64__)
					while (numSamplesLeft-- > 0) 
					{	
						*(floatDestBuf++) = (float)(*(inputBuf8++) &= (SInt8)noiseMask) * kOneOverMaxSInt8Value;
					}
#endif
					
					break;
				case 16:
				if (nativeEndianInts)
					if (vectorize) {
						NativeInt16ToFloat32((SInt16 *) &sampleBuf[2 * firstSample], floatDestBuf, numSamples);
					} else {
						SInt16 *inputBuf16;
						
						inputBuf16 = &(((SInt16 *)sampleBuf)[firstSample]);						
#if defined(__ppc__)
						SwapInt16ToFloat32(inputBuf16, floatDestBuf, numSamplesLeft, 16);
#elif defined(__i386__) || defined(__x86_64__)
						while (numSamplesLeft-- > 0) 
						{	
							*(floatDestBuf++) = (float)(*(inputBuf16++) &= (SInt16)noiseMask) * kOneOverMaxSInt16Value;
						}
#endif
					}

					
				else
					SwapInt16ToFloat32((SInt16 *) &sampleBuf[2 * firstSample], floatDestBuf, numSamples);
				break;

			case 20:
			case 24:
				if (nativeEndianInts)
					if (vectorize) {
						NativeInt24ToFloat32(&sourceBuf[3 * firstSample], floatDestBuf, numSamples);
					} else {
						register SInt8 *inputBuf24;
						
						// Multiply by 3 because 20 and 24 bit samples are packed into only three bytes, so we have to index bytes, not shorts or longs
						inputBuf24 = &(((SInt8 *)sampleBuf)[firstSample * 3]);
						
#if defined(__ppc__)
						SwapInt24ToFloat32((long *)inputBuf24, floatDestBuf, numSamplesLeft, 24);
#elif defined(__i386__) || defined(__x86_64__)
						register SInt32 inputSample;
						
						// [rdar://4311684] - Fixed 24-bit input convert routine. /thw
						while (numSamplesLeft-- > 1) 
						{	
							inputSample = (* (UInt32 *)inputBuf24) & 0x00FFFFFF & noiseMask;
							// Sign extend if necessary
							if (inputSample > 0x7FFFFF)
							{
								inputSample |= 0xFF000000;
							}
							inputBuf24 += 3;
							*(floatDestBuf++) = (float)inputSample * kOneOverMaxSInt24Value;
						}
						// Convert last sample. The following line does the same work as above without going over the edge of the buffer.
						inputSample = SInt32 ((UInt32 (*(UInt16 *) inputBuf24) & 0x0000FFFF & noiseMask)
											  | (SInt32 (*(inputBuf24 + 2)) << 16));
						*(floatDestBuf++) = (float)inputSample * kOneOverMaxSInt24Value;
#endif
						
					}

					
				else
					SwapInt24ToFloat32(&sourceBuf[3 * firstSample], floatDestBuf, numSamples);
				break;

			case 32:
				if (nativeEndianInts) {
					if (vectorize) {
						NativeInt32ToFloat32((SInt32 *) &sourceBuf[4 * firstSample], floatDestBuf, numSamples);
					} else {
						register SInt32 *inputBuf32;
						inputBuf32 = &(((SInt32 *)sampleBuf)[firstSample]);
						
#if defined(__ppc__)
						SwapInt32ToFloat32(inputBuf32, floatDestBuf, numSamplesLeft, 32);
#elif defined(__i386__) || defined(__x86_64__)
						while (numSamplesLeft-- > 0) {	
							*(floatDestBuf++) = (float)(*(inputBuf32++) & noiseMask) * kOneOverMaxSInt32Value;
						}
#endif
						
					}
				}
				else
					SwapInt32ToFloat32((SInt32 *) &sourceBuf[4 * firstSample], floatDestBuf, numSamples);
				break;

			default:
				errorMsg("convertInputSamples: can't handle signed integers with a bit width of %d",
						streamFormat->fBitWidth);
				break;

			}
			
			//Меняю местами значения для левого и правого канала
			if(mDevice && mDevice->mSwitchCh && (streamFormat->fNumChannels > 1)) {
				UInt32 i;
				Float32 tempSamples;
				
				for(i = 0; i < numSamples; i+= streamFormat->fNumChannels) {
					tempSamples = floatDestBuf[i];
					floatDestBuf[i] = floatDestBuf[i+1];
					floatDestBuf[i+1] = tempSamples;
				}
			}
			
		} else if (streamFormat->fNumericRepresentation == kIOAudioStreamNumericRepresentationIEEE754Float) {
			// it is some kind of floating point format
			if ((streamFormat->fBitWidth == 32) && (streamFormat->fBitDepth == 32) &&
					(streamFormat->fByteOrder == kIOAudioStreamByteOrderLittleEndian)) {
				// it's Float32, so we are just going to copy the data
				memcpy(floatDestBuf, &((Float32 *) sampleBuf)[firstSample], numSamples * sizeof (Float32));
			} else
				errorMsg("convertInputSamples: can't handle floats with a bit width of %d, bit depth of %d, "
						"and/or the given byte order", streamFormat->fBitWidth, streamFormat->fBitDepth);
		}
	} else {
		// it's not linear PCM or it's not mixable, so just copy the data into the target buffer
		UInt32 offset = firstSampleFrame * (streamFormat->fBitWidth / 8) * streamFormat->fNumChannels;
		UInt32 size = numSampleFrames * (streamFormat->fBitWidth / 8) * streamFormat->fNumChannels;
		memcpy(destBuf, &sourceBuf[offset], size);
	}

	return kIOReturnSuccess;
}
コード例 #12
0
ファイル: RLDisplayMachO.cpp プロジェクト: mctully/tntbasic
//
//	AskIfNewResolutionWorks() creates a dialog box in the center of the screen. The dialog asks the 
//	user if the current display setting works. This is necessary because a number of display settings
//	listed by the OS dont actually work and leave the user with a black screen.  The dialog has a 5
//	second timeout. If the user does not hit ok within 5 seconds the cancel item is chosen automatically
//	for him. This feature allows the user to do nothing (which he will probably do if confronted by a 
//	black screen) and still be ok.  The method that I have employed to do the timeout requires Appearances
//	1.1. I believe this was introduced with OS 8.5. If you want to support back to OS 8.1, then you will
//	have to do your own modal dialog event proc that implements a timeout. The dialog has not default 
//	button by default. Cmd-period and esc trigger the cancel button.
//
//
OSStatus		AskIfNewResolutionWorks( ScreenRef screen )
{
	DEBUGMESSAGE( "Querying user whether the new resolution works...." );

	if( ! screen )
		return noErr;

	//Read the new screen dimensions
	RLDrawInfo 	screenData;
	Rect		dialogBounds = {0,0,130, 340};
	OSStatus 		error = GetCurrentScreenDrawingInfo( screen, &screenData );
	if( error )
	{
		DEBUGMESSAGE( "Unable to get current screen drawing information. Got back error # " << error );
		return error;
	}
	
	//Make a copy of our dialog item list. This will be destroyed when the dialog is destroyed.
	Handle ditlCopy = LoadDITL();
	HandToHand( &ditlCopy );
	
	//Center the dialog rect on the screen
	{
		SInt32 horizontalOffset = (SInt32( screenData.bounds.right) + SInt32( screenData.bounds.left ) - SInt32(dialogBounds.right) +  SInt32(dialogBounds.left) ) / 2;
		SInt32 verticalOffset = (SInt32( screenData.bounds.bottom) + SInt32( screenData.bounds.top ) - SInt32(dialogBounds.bottom) +  SInt32(dialogBounds.top) ) / 2;
		dialogBounds.left += horizontalOffset;
		dialogBounds.right += horizontalOffset;
		dialogBounds.top += verticalOffset;
		dialogBounds.bottom += verticalOffset;
	}
		
	//Init a new dialog hidden
	DialogPtr dialog =  NewFeaturesDialog( NULL, &dialogBounds, "\pResolution Verification", true, kWindowModalDialogProc,
									(WindowPtr) -1L, false, TickCount(), ditlCopy, 0 );
	if( ! dialog )
	{
		DEBUGMESSAGE( "Unable to init the \"AskIfNewResolutionWorks\" dialog window. Perhaps there is insufficient free memory or the DITL did not load properly at library startup?" );
		return rlOutOfMemory;
	}
	
	//Make sure the dialog cancel item is button # 2	
	SetDialogCancelItem( dialog, 2 );

	//Set dialog to timeout after 5 seconds	
	SetDialogTimeout( dialog, 2, 5 );

	for( bool done = false; !done; )
	{
		short itemHit = 0;
		ModalDialog ( NULL, &itemHit );  

		switch( itemHit )
		{
			case 2:	//cancel
				DEBUGMESSAGE( "The user hit cancel or the dialog timed out. The new resolution is probably not good." );
				done = true;
				error = rlRezNotFound;
				break;

			case 3:	//ok
				DEBUGMESSAGE( "The user hit ok. The new resolution seems to be Okay!" );
				done = true;
				error = noErr;
				break;		
		}	
	}
	
	DisposeDialog( dialog );
	
	return error;
}