示例#1
0
void Alg_UpdateProximityData(pIvhAlgo algo, int proximity)
{
    IvhSensorData data;
    data.data.proximityMiniMeter = proximity;

    if(algo->proximity)
    {
        algo->proximity->Update(algo->proximity, &data, IVH_SENSOR_TYPE_PROXIMITY);
    }
    TraceLog(TRACE_LEVEL_VERBOSE, TRACE_ALGO, "[%!FUNC!]proximity = %d",proximity);
}
示例#2
0
void Alg_UpdateTemperatureData(pIvhAlgo algo, int temperature)
{
    IvhSensorData data;
    data.data.temperatureMiniCentigrade = temperature;

    if(algo->thermometer)
    {
        algo->thermometer->Update(algo->thermometer, &data, IVH_SENSOR_TYPE_THERMOMETER);
    }
    TraceLog(TRACE_LEVEL_VERBOSE, TRACE_ALGO, "[%!FUNC!]temperature = %d",temperature);
}
示例#3
0
/* -----------------------------------------------
    Plug-and-play and power management events
   ----------------------------------------------- */
static void EDEN_EventHandler(WD_EVENT *pEvent, PVOID pData)
{
    PWDC_DEVICE pDev = (PWDC_DEVICE)pData;
    PEDEN_DEV_CTX pDevCtx = (PEDEN_DEV_CTX)WDC_GetDevContext(pDev);

    TraceLog("EDEN_EventHandler entered, pData: 0x%p, dwAction 0x%lx\n",
        pData, pEvent->dwAction);
    
    /* Execute the diagnostics application's event handler function */
    pDevCtx->funcDiagEventHandler((WDC_DEVICE_HANDLE)pDev, pEvent->dwAction);
}
示例#4
0
void Alg_UpdateAmbientlightData(pIvhAlgo algo, unsigned als)
{
    IvhSensorData data;
    data.data.alsMilliLux = als;

    if(algo->ambientlight != NULL)
    {
        algo->ambientlight->Update(algo->ambientlight, &data, IVH_SENSOR_TYPE_AMBIENTLIGHT);
    }
    TraceLog(TRACE_LEVEL_VERBOSE, TRACE_ALGO, "[%!FUNC!]als = %d",als);
}
示例#5
0
文件: audio.c 项目: MarcMDE/raylib
// Resume music playing
void ResumeMusicStream(void)
{
    // Resume music playing... if music available!
    ALenum state;
    alGetSourcei(currentMusic.source, AL_SOURCE_STATE, &state);

    if (state == AL_PAUSED)
    {
        TraceLog(INFO, "Resuming music stream");
        alSourcePlay(currentMusic.source);
        musicEnabled = true;
    }
}
static ERROR_T DataUpdate(IvhSensor* me, const IvhSensorData* data, const IvhSensorType type)
{
    ERROR_T retVal = ERROR_OK;
    pIvhSensorOrientation sensor = (pIvhSensorOrientation)(me);
    const int16_t MIN_ACCEL_CHANGE = 10;

    switch(type)
    {
    case IVH_SENSOR_TYPE_ACCELEROMETER3D :
        // The normal minimum sensitivity for accel is 35mg.  We can set it
        // to zero and get all samples, but it's too jittery.  This is a secondary
        // filter that works as if we specified a sensitivity of 10mg
        if (fabs(sensor->lastCalibratedAccel[AXIS_X] - data->data.accel.xyz.x) > MIN_ACCEL_CHANGE  ||
            fabs(sensor->lastCalibratedAccel[AXIS_Y] - data->data.accel.xyz.y) > MIN_ACCEL_CHANGE ||
            fabs(sensor->lastCalibratedAccel[AXIS_Z] - data->data.accel.xyz.z) > MIN_ACCEL_CHANGE)
        {
            sensor->lastCalibratedAccel[AXIS_X] = (ROTATION_VECTOR_T)(data->data.accel.xyz.x);
            sensor->lastCalibratedAccel[AXIS_Y] = (ROTATION_VECTOR_T)(data->data.accel.xyz.y);
            sensor->lastCalibratedAccel[AXIS_Z] = (ROTATION_VECTOR_T)(data->data.accel.xyz.z);
        }

        sensor->lastTimestampAccel = data->timeStampInMs;
        break;
    case IVH_SENSOR_TYPE_GYROSCOPE3D :
        sensor->lastCalibratedGyro[AXIS_X] = (ROTATION_VECTOR_T)(data->data.gyro.xyz.x);
        sensor->lastCalibratedGyro[AXIS_Y] = (ROTATION_VECTOR_T)(data->data.gyro.xyz.y);
        sensor->lastCalibratedGyro[AXIS_Z] = (ROTATION_VECTOR_T)(data->data.gyro.xyz.z);
        sensor->lastTimestampGyro = data->timeStampInMs;
        Calibrate(me);
        break;
    case IVH_SENSOR_TYPE_MAGNETOMETER3D :
        sensor->lastCalibratedMag[AXIS_X] = data->data.mag.xyzCalibrated.x;
        sensor->lastCalibratedMag[AXIS_Y] = data->data.mag.xyzCalibrated.y;
        sensor->lastCalibratedMag[AXIS_Z] = data->data.mag.xyzCalibrated.z;
        sensor->lastRotatedMag[AXIS_X] = data->data.mag.xyzRotated.x;
        sensor->lastRotatedMag[AXIS_Y] = data->data.mag.xyzRotated.y;
        sensor->lastRotatedMag[AXIS_Z] = data->data.mag.xyzRotated.z;
        sensor->lastRawMag[AXIS_X] = (ROTATION_VECTOR_T)(data->data.mag.xyzRaw.x);
        sensor->lastRawMag[AXIS_Y] = (ROTATION_VECTOR_T)(data->data.mag.xyzRaw.y);
        sensor->lastRawMag[AXIS_Z] = (ROTATION_VECTOR_T)(data->data.mag.xyzRaw.z);
        memcpy(&sensor->lastMagCovariance,
            data->data.mag.covariance,
            (COVARIANCE_MATRIX_SIZE * sizeof(float)));
        sensor->lastTimestampMag = data->timeStampInMs;
        break;
    default:
        TraceLog(TRACE_LEVEL_ERROR, TRACE_ALGO, "[%!FUNC!]invalid sensor type, expect [IVH_SENSOR_TYPE_ACCELEROMETER3D|IVH_SENSOR_TYPE_GYROSCOPE3D|IVH_SENSOR_TYPE_MAGNETOMETER3D]");
    }

    return retVal;
}
示例#7
0
文件: audio.c 项目: MarcMDE/raylib
// Load sound from wave data
Sound LoadSoundFromWave(Wave wave)
{
    Sound sound;

    if (wave.data != NULL)
    {
        ALenum format = 0;
        // The OpenAL format is worked out by looking at the number of channels and the bits per sample
        if (wave.channels == 1)
        {
            if (wave.bitsPerSample == 8 ) format = AL_FORMAT_MONO8;
            else if (wave.bitsPerSample == 16) format = AL_FORMAT_MONO16;
        }
        else if (wave.channels == 2)
        {
            if (wave.bitsPerSample == 8 ) format = AL_FORMAT_STEREO8;
            else if (wave.bitsPerSample == 16) format = AL_FORMAT_STEREO16;
        }

        // Create an audio source
        ALuint source;
        alGenSources(1, &source);            // Generate pointer to audio source

        alSourcef(source, AL_PITCH, 1);
        alSourcef(source, AL_GAIN, 1);
        alSource3f(source, AL_POSITION, 0, 0, 0);
        alSource3f(source, AL_VELOCITY, 0, 0, 0);
        alSourcei(source, AL_LOOPING, AL_FALSE);

        // Convert loaded data to OpenAL buffer
        //----------------------------------------
        ALuint buffer;
        alGenBuffers(1, &buffer);            // Generate pointer to buffer

        // Upload sound data to buffer
        alBufferData(buffer, format, wave.data, wave.dataSize, wave.sampleRate);

        // Attach sound buffer to source
        alSourcei(source, AL_BUFFER, buffer);

        // Unallocate WAV data
        UnloadWave(wave);

        TraceLog(INFO, "[Wave] Sound file loaded successfully (SampleRate: %i, BitRate: %i, Channels: %i)", wave.sampleRate, wave.bitsPerSample, wave.channels);

        sound.source = source;
        sound.buffer = buffer;
    }

    return sound;
}
示例#8
0
// Public interface
void BackupRegistersInit(HANDLE_DEV dev)
{
    memset(gs_backupRegisters, BKP_UNINITIALIZED_VALUE, sizeof(gs_backupRegisters));
    ReadBackupRegistersFromRegistry(dev);

#ifdef ENABLE_TIMER_DPC
    LARGE_INTEGER due;
    due.QuadPart = 10000*10000UL;
    dpc = (PRKDPC)SAFE_ALLOCATE_POOL(NonPagedPool, sizeof(KDPC), IVH_BACKUP_POOL_TAG);

    if (dpc)
    {   // init it!
        KeInitializeDpc(dpc, TimerHandle, dev);
        TraceLog(TRACE_LEVEL_INFORMATION, TRACE_ALGO, "malloc memory for backup dpc");
    }
    else 
    {
        //  No memory
        TraceLog(TRACE_LEVEL_ERROR, TRACE_ALGO, "[%!FUNC!]not enough memory");
        return;
    }

    timer = (PKTIMER)SAFE_ALLOCATE_POOL(NonPagedPool, sizeof(KTIMER), IVH_BACKUP_POOL_TAG);
    if (timer)
    {   // init it!
        KeInitializeTimer(timer);
        TraceLog(TRACE_LEVEL_INFORMATION, TRACE_ALGO, "malloc memory for backup timer");
    }
    else 
    {
        //  No memory
        TraceLog(TRACE_LEVEL_ERROR, TRACE_ALGO, "[%!FUNC!]not enough memory");
        return;
    }

    KeSetTimerEx(timer, due, (ULONG)(due.QuadPart), dpc);
#endif
}
示例#9
0
void Alg_UpdateMagnetometerData(pIvhAlgo algo, int MagX, int MagY, int MagZ, int ts)
{
    IvhSensorData data;
    data.timeStampInMs = ts;
    data.data.mag.xyzRaw.x = MagX;
    data.data.mag.xyzRaw.y = MagY;
    data.data.mag.xyzRaw.z = MagZ;

    if(algo->magnetometer != NULL)
    {
        algo->magnetometer->Update(algo->magnetometer, &data, IVH_SENSOR_TYPE_MAGNETOMETER3D);
    }
    TraceLog(TRACE_LEVEL_VERBOSE, TRACE_ALGO, "[%!FUNC!]mag(%d,%d,%d)",data.data.mag.xyzRaw.x,data.data.mag.xyzRaw.y,data.data.mag.xyzRaw.z);
}
示例#10
0
void Alg_UpdateAccelerometerData(pIvhAlgo algo, int AccX, int AccY, int AccZ, int ts)
{
    IvhSensorData data;
    data.timeStampInMs = ts;
    data.data.accel.xyz.x = AccX;
    data.data.accel.xyz.y = AccY;
    data.data.accel.xyz.z = AccZ;

    if(algo->accelerometer != NULL)
    {
        algo->accelerometer->Update(algo->accelerometer, &data, IVH_SENSOR_TYPE_ACCELEROMETER3D);
    }
    TraceLog(TRACE_LEVEL_VERBOSE, TRACE_ALGO, "[%!FUNC!]accel(%d,%d,%d)",data.data.accel.xyz.x,data.data.accel.xyz.y,data.data.accel.xyz.z);
}
示例#11
0
void Alg_UpdateGyrometerData(pIvhAlgo algo, int GyrX, int GyrY, int GyrZ, int ts)
{
    IvhSensorData data;
    data.timeStampInMs = ts;
    data.data.gyro.xyz.x = GyrX;
    data.data.gyro.xyz.y = GyrY;
    data.data.gyro.xyz.z = GyrZ;

    if(algo->gyrometer != NULL)
    {
        algo->gyrometer->Update(algo->gyrometer, &data, IVH_SENSOR_TYPE_GYROSCOPE3D);
    }
    TraceLog(TRACE_LEVEL_VERBOSE, TRACE_ALGO, "[%!FUNC!]gyro(%d,%d,%d)",data.data.gyro.xyz.x,data.data.gyro.xyz.y,data.data.gyro.xyz.z);
}
示例#12
0
// Unload SpriteFont from GPU memory
void UnloadSpriteFont(SpriteFont spriteFont)
{
    // NOTE: Make sure spriteFont is not default font (fallback)
    if (spriteFont.texture.id != defaultFont.texture.id)
    {
        UnloadTexture(spriteFont.texture);
        free(spriteFont.charValues);
        free(spriteFont.charRecs);
        free(spriteFont.charOffsets);
        free(spriteFont.charAdvanceX);

        TraceLog(DEBUG, "Unloaded sprite font data");
    }
}
示例#13
0
文件: audio.c 项目: Danlestal/raylib
// Fill music buffers with new data from music stream
static bool BufferMusicStream(ALuint buffer)
{
	short pcm[MUSIC_BUFFER_SIZE];
    
	int  size = 0;              // Total size of data steamed (in bytes)
	int  streamedBytes = 0;     // Bytes of data obtained in one samples get
    
    bool active = true;         // We can get more data from stream (not finished)
    
    if (musicEnabled)
    {
        while (size < MUSIC_BUFFER_SIZE)
        {
            streamedBytes = stb_vorbis_get_samples_short_interleaved(currentMusic.stream, currentMusic.channels, pcm + size, MUSIC_BUFFER_SIZE - size);
            
            if (streamedBytes > 0) size += (streamedBytes*currentMusic.channels);
            else break;
        }
        
        TraceLog(DEBUG, "Streaming music data to buffer. Bytes streamed: %i", size);
    }
    
	if (size > 0)
    {
        alBufferData(buffer, currentMusic.format, pcm, size*sizeof(short), currentMusic.sampleRate);
        
        currentMusic.totalSamplesLeft -= size;
    }
    else
    {
        active = false;
        TraceLog(WARNING, "No more data obtained from stream");
    }

	return active;
}
示例#14
0
文件: audio.c 项目: Danlestal/raylib
// Close the audio device for the current context, and destroys the context
void CloseAudioDevice()
{
    StopMusicStream();      // Stop music streaming and close current stream

    ALCdevice *device;
    ALCcontext *context = alcGetCurrentContext();
    
    if (context == NULL) TraceLog(WARNING, "Could not get current audio context for closing");

    device = alcGetContextsDevice(context);

    alcMakeContextCurrent(NULL);
    alcDestroyContext(context);
    alcCloseDevice(device);
}
示例#15
0
/* -----------------------------------------------
    Device open/close
   ----------------------------------------------- */
WDC_DEVICE_HANDLE PCIE_SW_DeviceOpen(const WD_PCI_CARD_INFO *pDeviceInfo)
{
    DWORD dwStatus;
    PPCIE_SW_DEV_CTX pDevCtx = NULL;
    WDC_DEVICE_HANDLE hDev = NULL;

    /* Validate arguments */
    if (!pDeviceInfo)
    {
        ErrLog("PCIE_SW_DeviceOpen: Error - NULL device information struct pointer\n");
        return NULL;
    }

    /* Allocate memory for the PCIE_SW device context */
    pDevCtx = (PPCIE_SW_DEV_CTX)malloc(sizeof (PCIE_SW_DEV_CTX));
    if (!pDevCtx)
    {
        ErrLog("Failed allocating memory for PCIE_SW device context\n");
        return NULL;
    }

    BZERO(*pDevCtx);

    /* Open a WDC device handle */
    dwStatus = WDC_PciDeviceOpen(&hDev, pDeviceInfo, pDevCtx, NULL, NULL, NULL);
    if (WD_STATUS_SUCCESS != dwStatus)
    {
        ErrLog("Failed opening a WDC device handle. Error 0x%lx - %s\n",
            dwStatus, Stat2Str(dwStatus));
        goto Error;
    }

    /* Validate device information */
    if (!DeviceValidate((PWDC_DEVICE)hDev))
        goto Error;

    /* Return handle to the new device */
    TraceLog("PCIE_SW_DeviceOpen: Opened a PCIE_SW device (handle 0x%p)\n", hDev);
    return hDev;

Error:    
    if (hDev)
        PCIE_SW_DeviceClose(hDev);
    else
        free(pDevCtx);

    return NULL;
}
示例#16
0
文件: audio.c 项目: Danlestal/raylib
// Update (re-fill) music buffers if data already processed
extern void UpdateMusicStream()
{
    ALuint buffer = 0;
    ALint processed = 0;
    bool active = true;
    
    if (musicEnabled)
    {
        // Get the number of already processed buffers (if any)
        alGetSourcei(currentMusic.source, AL_BUFFERS_PROCESSED, &processed);
        
        while (processed > 0)
        {
            // Recover processed buffer for refill
            alSourceUnqueueBuffers(currentMusic.source, 1, &buffer);

            // Refill buffer
            active = BufferMusicStream(buffer);
            
            // If no more data to stream, restart music (if loop)
            if ((!active) && (currentMusic.loop))   
            {
                if (currentMusic.loop)
                {
                    stb_vorbis_seek_start(currentMusic.stream);
                    currentMusic.totalSamplesLeft = stb_vorbis_stream_length_in_samples(currentMusic.stream) * currentMusic.channels;
                    
                    active = BufferMusicStream(buffer);
                }
            }
            
            // Add refilled buffer to queue again... don't let the music stop!
            alSourceQueueBuffers(currentMusic.source, 1, &buffer);
            
            if(alGetError() != AL_NO_ERROR) TraceLog(WARNING, "Ogg playing, error buffering data...");
            
            processed--;
        }
        
        ALenum state;
        alGetSourcei(currentMusic.source, AL_SOURCE_STATE, &state);
        
        if ((state != AL_PLAYING) && active) alSourcePlay(currentMusic.source);
        
        if (!active) StopMusicStream();
    }
}
示例#17
0
文件: audio.c 项目: Danlestal/raylib
// Play a sound
void PlaySound(Sound sound)
{
    alSourcePlay(sound.source);        // Play the sound
    
    TraceLog(INFO, "Playing sound");

    // Find the current position of the sound being played
    // NOTE: Only work when the entire file is in a single buffer
    //int byteOffset;
    //alGetSourcei(sound.source, AL_BYTE_OFFSET, &byteOffset);
    //
    //int sampleRate;
    //alGetBufferi(sound.buffer, AL_FREQUENCY, &sampleRate);    // AL_CHANNELS, AL_BITS (bps)
    
    //float seconds = (float)byteOffset / sampleRate;      // Number of seconds since the beginning of the sound
    //or
    //float result;
    //alGetSourcef(sound.source, AL_SEC_OFFSET, &result);   // AL_SAMPLE_OFFSET
}
示例#18
0
static BOOL DeviceValidate(const PWDC_DEVICE pDev)
{
    DWORD i, dwNumAddrSpaces = pDev->dwNumAddrSpaces;

    /* TODO: You can modify the implementation of this function in order to
             verify that the device has all expected resources. */
    
    /* Verify that the device has at least one active address space */
    for (i = 0; i < dwNumAddrSpaces; i++)
    {
        if (WDC_AddrSpaceIsActive(pDev, i))
            return TRUE;
    }
    
    /* In this sample we accept the device even if it doesn't have any
     * address spaces */
    TraceLog("Device does not have any active memory or I/O address spaces\n");
    return TRUE;
}
示例#19
0
文件: arcan.c 项目: uincore/OpenSAR
// ======================== [    FUNCTIONS    ] ==============================================
void ArCan_Schedule(void)
{
	ArMsgType      Message;
	for(int i=0;i<PortNumber;i++)
	{
		if(Poll(Ports[i],&Message))
		{
			TraceLog(Ports[i],(ArCanMsgType*)&Message);
			for(int j=0;j<PortNumber;j++)
			{
				if(i!=j)
				{
					Forward(Ports[j],&Message);
				}
			}
			// Tester may do something
			Ardl_RxIndication(&Message);
		}
	}
}
示例#20
0
BOOL PCIE_SW_DeviceClose(WDC_DEVICE_HANDLE hDev)
{
    DWORD dwStatus;
    PWDC_DEVICE pDev = (PWDC_DEVICE)hDev;
    PPCIE_SW_DEV_CTX pDevCtx;
    
    TraceLog("PCIE_SW_DeviceClose entered. Device handle: 0x%p\n", hDev);

    if (!hDev)
    {
        ErrLog("PCIE_SW_DeviceClose: Error - NULL device handle\n");
        return FALSE;
    }

    pDevCtx = (PPCIE_SW_DEV_CTX)WDC_GetDevContext(pDev);
    
    /* Disable interrupts */
    if (WDC_IntIsEnabled(hDev))
    {
        dwStatus = PCIE_SW_IntDisable(hDev);
        if (WD_STATUS_SUCCESS != dwStatus)
        {
            ErrLog("Failed disabling interrupts. Error 0x%lx - %s\n",
                dwStatus, Stat2Str(dwStatus));
        }
    }

    /* Close the device */
    dwStatus = WDC_PciDeviceClose(hDev);
    if (WD_STATUS_SUCCESS != dwStatus)
    {
        ErrLog("Failed closing a WDC device handle (0x%p). Error 0x%lx - %s\n",
            hDev, dwStatus, Stat2Str(dwStatus));
    }

    /* Free PCIE_SW device context memory */
    if (pDevCtx)
        free (pDevCtx);
    
    return (WD_STATUS_SUCCESS == dwStatus);
}
示例#21
0
文件: file.cpp 项目: lufb/code
/*
int	RTFile::CheckCache(MCounter * pCounter)
{
	MLocalSection		local;
	FileList		*	pstList = NULL;
	
	return 1;
}

void * __stdcall RTFile::UpdateCacheThread(void * In)
{
	RTFile *	classptr;
	MCounter	counter;
	int			ret;

	classptr = (RTFile *)In;

	counter.SetCurTickCount();
	while ( classptr->m_stUpdateCacheThread.GetThreadStopFlag( ) == false )
	{
		try
		{
			ret = 0;
			while(ret >= 0)
			{
				ret = classptr->CheckCache(&counter);
				if(ret == 2)
					MThread::Sleep(100);
			}
		//	MThread::Sleep(Global_Option.GetCheckFileCycle());
		}
		catch( ... )
		{
			TraceLog( LOG_ERROR_NORMAL, MODULENAME, "缓存更新线程发生未知异常");
		}
	}
	return 0;
}
*/
void * __stdcall RTFile::ProcessFileThread(void * In)
{
	int	no;
	
	no = (int)In;
	
	while (Global_DataIO.m_stProcessFileThread[no].GetThreadStopFlag( ) == false )
	{
		Global_DataIO.CheckFile();
		try
		{
			//Global_DataIO.CheckFile();
			MThread::Sleep(50);
		}
		catch( ... )
		{
			TraceLog( LOG_ERROR_NORMAL, MODULENAME, "文件检查线程发生未知异常");
		}
	}
	return 0;
}
示例#22
0
// Once connected to the network, check the sockets for pending information
// and when information is ready, send either a Ping or a Pong.
void NetworkUpdate()
{
	// CheckSockets
	//
	// If any of the sockets in the socket_set are pending (received data, or requests)
	// then mark the socket as being ready. You can check this with IsSocketReady(client_res->socket)
	int active = CheckSockets(socket_set, 0);
	if (active != 0) {
		TraceLog(LOG_DEBUG,
				 "There are currently %d socket(s) with data to be processed.", active);
	}

	// IsSocketReady
	//
	// If the socket is ready, attempt to receive data from the socket
	//  int bytesRecv = 0;
	//  if (IsSocketReady(server_res->socket)) {
	//      bytesRecv = SocketReceive(server_res->socket, recvBuffer, msglen);
	//  }
	int bytesRecv = SocketReceive(server_res->socket, recvBuffer, msglen);

	// If we received data, was that data a "Ping!" or a "Pong!"
	if (bytesRecv > 0) {
		if (strcmp(recvBuffer, pingmsg) == 0) { pong = true; }
		if (strcmp(recvBuffer, pongmsg) == 0) { ping = true; }
	}

	// After each delay has expired, send a response "Ping!" for a "Pong!" and vice versa
	elapsed += GetFrameTime();
	if (elapsed > delay) {
		if (ping) {
			ping = false;
			SocketSend(server_res->socket, pingmsg, msglen);
		} else if (pong) {
			pong = false;
			SocketSend(server_res->socket, pongmsg, msglen);
		}
		elapsed = 0.0f;
	}
}
示例#23
0
/*
#define fork forkX
*/
int ForkY(PCStr(what),int (*xproc)(const char *what,int xpid))
{   register int pid;

    endhostent();
    MyPID = 0;
    pid = fork();

    if( xproc )
        if( pid == -1 && errno == EAGAIN ) {
            int fi,xi,xn = 0,xid = 0;
            for( fi = 0; fi < 30 && pid < 0 && errno == EAGAIN; fi++ ) {
                usleep((100+fi*10)*1000);
                if( 0 < (xi = NoHangWait()) ) {
                    xid = xi;
                    xn++;
                    if( (*xproc)(what,xi) != 0 )
                        break;
                }
                pid = fork();
            }
            if( pid != 0 ) {
                fprintf(stderr,"----[%d] Fork(%s) AGAIN(%d/%d/%d)=%d\n",
                        getpid(),what,fi,xn,xid,pid);
            }
        }
    if( pid == -1 ) {
        /*
        syslog_ERROR("-- FAILED fork(%s), errno=%d\n",what,errno);
        */
        daemonlog("F","-- FAILED fork(%s), errno=%d\n",what,errno);
    } else if( pid == 0 ) {
        syslog_ERROR("-- Fork(%s): %d -> %d\n",what,getppid(),MyPID);
    }
    else {
        if( lTRVERB() )
            if( doTracePid == getpid() )
                TraceLog("+ Fork(%s) = %d\n",what,pid);
    }
    return pid;
}
示例#24
0
文件: audio.c 项目: Danlestal/raylib
// Load OGG file into Wave structure
static Wave LoadOGG(char *fileName)
{
    Wave wave;
    
    stb_vorbis *oggFile = stb_vorbis_open_filename(fileName, NULL, NULL);
    stb_vorbis_info info = stb_vorbis_get_info(oggFile);
    
    wave.sampleRate = info.sample_rate;
    wave.bitsPerSample = 16;
    wave.channels = info.channels;
    
    TraceLog(DEBUG, "[%s] Ogg sample rate: %i", fileName, info.sample_rate);
    TraceLog(DEBUG, "[%s] Ogg channels: %i", fileName, info.channels);

    int totalSamplesLength = (stb_vorbis_stream_length_in_samples(oggFile) * info.channels);
    
    wave.dataSize = totalSamplesLength*sizeof(short);   // Size must be in bytes
    
    TraceLog(DEBUG, "[%s] Samples length: %i", fileName, totalSamplesLength);
    
    float totalSeconds = stb_vorbis_stream_length_in_seconds(oggFile);
    
    TraceLog(DEBUG, "[%s] Total seconds: %f", fileName, totalSeconds);
    
    if (totalSeconds > 10) TraceLog(WARNING, "[%s] Ogg audio lenght is larger than 10 seconds (%f), that's a big file in memory, consider music streaming", fileName, totalSeconds);
    
    int totalSamples = totalSeconds*info.sample_rate*info.channels;
   
    TraceLog(DEBUG, "[%s] Total samples calculated: %i", fileName, totalSamples);
    
    //short *data 
    wave.data = malloc(sizeof(short)*totalSamplesLength);

    int samplesObtained = stb_vorbis_get_samples_short_interleaved(oggFile, info.channels, wave.data, totalSamplesLength);
    
    TraceLog(DEBUG, "[%s] Samples obtained: %i", fileName, samplesObtained);

    stb_vorbis_close(oggFile);
    
    return wave;
}
示例#25
0
pIvhSensor CreateSensorOrientation9Axis(const pIvhPlatform platform, pIvhSensor accl, pIvhSensor gyro, pIvhSensor magn)
{
    pIvhSensorOrientation sensor = (pIvhSensorOrientation)SAFE_ALLOCATE_POOL(NonPagedPool, sizeof(IvhSensorOrientation), IVH_SENSOR_ORIENTATIONT_9AXIS__POOL_TAG);

    if (sensor) 
    {
        // ZERO it!
        SAFE_FILL_MEM (sensor, sizeof (IvhSensorOrientation), 0);
        TraceLog(TRACE_LEVEL_INFORMATION, TRACE_ALGO, "malloc memory for orientation9axis");
    }
    else 
    {
        //  No memory
        TraceLog(TRACE_LEVEL_ERROR, TRACE_ALGO, "not enough memory");
        return NULL;
    }

    sensor->sensor.Update = DataUpdate;
    sensor->sensor.Notify = SensorNotify;
    sensor->sensor.Attach = SensorAttach;
    sensor->sensor.QueryData = SensorQueryData;
    sensor->sensor.type = IVH_SENSOR_TYPE_ORIENTATION_9AXIS;
    sensor->sensor.d[0] = &sensor->input;
    sensor->sensor.d[1] = &sensor->output;
    accl->Attach(accl, &(sensor->sensor));
    gyro->Attach(gyro, &(sensor->sensor));
    magn->Attach(magn, &(sensor->sensor));

    ROTATION_MATRIX_T rm[] = {1, 0, 0, \
        0, 1, 0, \
        0, 0, 1};
    SAFE_MEMSET(sensor->lastCalibratedAccel, 0);
    SAFE_MEMSET(sensor->lastCalibratedGyro, 0);
    SAFE_MEMSET(sensor->lastCalibratedMag, 0);
    SAFE_MEMSET(sensor->lastRotatedMag, 0);
    SAFE_MEMSET(sensor->lastRawMag, 0);

    SAFE_MEMSET(sensor->lastMagCovariance, 0);

    sensor->magConfidence = 0;
    sensor->lastTimestampAccel = 0;
    sensor->lastTimestampGyro = 0;
    sensor->lastTimestampMag = 0;

    sensor->estimatedHeadingError = MAX_HEADING_ERROR;

    // Don't apply ZRT until at least one sample has been calculated and
    // SensorManager has been notified.
    sensor->atLeastOneSampleCalculated = FALSE;

    SAFE_MEMCPY(sensor->rotationMatrixStruct, rm);
    SAFE_MEMSET(sensor->driftCorrection, 0);

    SAFE_MEMSET(sensor->s_prevGyro, 0);
    sensor->s_motionlessTime = 0;
    sensor->s_startMotionlessTime = 0;

    sensor->s_firstTime = TRUE;

    SAFE_MEMSET(sensor->prevGyro, 0);
    SAFE_MEMSET(sensor->prevAccel, 0);
    sensor->previousTimestampMag = 0;

    sensor->platform = platform;
    InitRotationParameters(&sensor->Rotation);
    sensor->Calibration = platform->Calibrate;

    return (pIvhSensor)(sensor);
}
示例#26
0
ERROR_T CalculateRotation(pIvhSensorOrientation fusion)
{
    ERROR_T retVal = ERROR_OK;

    // ZRT was removed from GyroCalibrated, so there will be jitter in the incoming
    // gyro.  Apply a weak LPF to reduce the jittery behavior
    const float GYRO_LPF_ALPHA = 0.4f;

    ApplyLowPassFilter(fusion->lastCalibratedGyro,
        fusion->prevGyro,
        3,
        GYRO_LPF_ALPHA);

    // ZRT was removed from AccelerometerCalibrated, so there will be jitter in the incoming
    // accel.  Apply a weak LPF to reduce the jittery behavior
    const float ACCEL_LPF_ALPHA = 0.3f;

    ApplyLowPassFilter(fusion->lastCalibratedAccel ,
        fusion->prevAccel,
        3,
        ACCEL_LPF_ALPHA);

    // Calibrated magnetometer microdriver just maps the axes for this device
    // We need to send it to MagDynamicCali to get the mag data centered at zero
    // and mapped to a unit circle

    if (fusion->platform->CalibrationFeatures.enableMagCalibratedDynamicFilter)
    {
        //use dynamic filter aligning with gyro speed to filter out jitter
        ApplyDynamicLowPassFilter(fusion, fusion->lastCalibratedMag, fusion->lastCalibratedGyro);
    }

    // If gyro has been motionless for some time, there is no need to
    // run the device orientation algo.  This will prevent jitter at rest.
    if (TRUE == fusion->atLeastOneSampleCalculated && TRUE == ShouldApplyZRT(fusion))
    {
        fusion->lastCalibratedGyro[AXIS_X] = fusion->lastCalibratedGyro[AXIS_Y] = fusion->lastCalibratedGyro[AXIS_Z] = 0;
    }
    else
    {

        ROTATION_VECTOR_T incrementalRotationDegrees[3] = {0, 0, 0};

        // Use the ROTATED mag with differential correction for finding direction
        TraceLog(TRACE_LEVEL_VERBOSE, TRACE_ALGO, "a(%f,%f,%f)g(%f,%f,%f)m(%f,%f,%f)",
            fusion->lastCalibratedAccel[0],fusion->lastCalibratedAccel[1],fusion->lastCalibratedAccel[2],
            fusion->lastCalibratedGyro[0],fusion->lastCalibratedGyro[1],fusion->lastCalibratedGyro[2],
            fusion->lastRotatedMag[0],fusion->lastRotatedMag[1],fusion->lastRotatedMag[2]);

        CalculateRotationUsingGyro(&fusion->Rotation,
            fusion->lastCalibratedAccel , fusion->lastCalibratedGyro , fusion->lastRotatedMag,
            fusion->lastTimestampGyro,
            fusion->lastTimestampMag,
            fusion->lastTimestampAccel,
            fusion->magConfidence,
            fusion->rotationMatrixStruct,
            fusion->driftCorrection,
            incrementalRotationDegrees);

        if (fusion->lastTimestampMag > fusion->previousTimestampMag)
        {
            // Use the calibrated mag, NOT the rotated mag to check anomaly and refine offsets
            MagDetectAnomalyWithGyro(fusion->Calibration, fusion->lastCalibratedMag, fusion->lastCalibratedGyro , &fusion->magConfidence);
            RefineMagOffsetsWithGyro(fusion->Calibration, fusion->lastRawMag, fusion->lastCalibratedGyro, fusion->magConfidence);
            fusion->previousTimestampMag = fusion->lastTimestampMag;
        }

        EstimateHeadingError(fusion->magConfidence, incrementalRotationDegrees, &fusion->estimatedHeadingError);
        fusion->estimatedHeadingError = (uint8_t)(fusion->Calibration->Error);
    }

    fusion->atLeastOneSampleCalculated = TRUE;

    return retVal;
}
示例#27
0
//----------------------------------------------------------------------------------
// Main Entry point
//----------------------------------------------------------------------------------
int main(void)
{
    // Initialization
    //--------------------------------------------------------------------------------------
    int screenWidth = 1080;     // Mirror screen width (set to hmdDesc.Resolution.w/2)
    int screenHeight = 600;     // Mirror screen height (set to hmdDesc.Resolution.h/2)
    
    // NOTE: Mirror screen size can be set to any desired resolution!
    
    // GLFW3 Initialization + OpenGL 3.3 Context + Extensions
    //--------------------------------------------------------
    glfwSetErrorCallback(ErrorCallback);
    
    if (!glfwInit())
    {
        TraceLog(WARNING, "GLFW3: Can not initialize GLFW");
        return 1;
    }
    else TraceLog(INFO, "GLFW3: GLFW initialized successfully");
    
    glfwWindowHint(GLFW_SAMPLES, 4);
    glfwWindowHint(GLFW_DEPTH_BITS, 16);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
    glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
    glfwWindowHint(GLFW_OPENGL_DEBUG_CONTEXT, GL_TRUE);
   
    GLFWwindow *window = glfwCreateWindow(screenWidth, screenHeight, "rlgl oculus rift", NULL, NULL);
    
    if (!window)
    {
        glfwTerminate();
        return 2;
    }
    else TraceLog(INFO, "GLFW3: Window created successfully");
    
    glfwSetKeyCallback(window, KeyCallback);
    
    glfwMakeContextCurrent(window);
    glfwSwapInterval(0);

    // Load OpenGL 3.3 supported extensions
    rlglLoadExtensions(glfwGetProcAddress);
    //--------------------------------------------------------
    
    // Initialize OpenGL context (states and resources)
    rlglInit(screenWidth, screenHeight);
    
    rlClearColor(245, 245, 245, 255);                   // Define clear color
    rlEnableDepthTest();                                // Enable DEPTH_TEST for 3D
    
    // Define custom camera to initialize projection and view matrices
    Camera camera;
    camera.position = (Vector3){ 5.0f, 5.0f, 5.0f };    // Camera position
    camera.target = (Vector3){ 0.0f, 0.0f, 0.0f };      // Camera looking at point
    camera.up = (Vector3){ 0.0f, 1.0f, 0.0f };          // Camera up vector (rotation towards target)
    camera.fovy = 45.0f;                                // Camera field-of-view Y

    // Initialize viewport and internal projection/modelview matrices
    rlViewport(0, 0, screenWidth, screenHeight);
    rlMatrixMode(RL_PROJECTION);                        // Switch to PROJECTION matrix
    rlLoadIdentity();                                   // Reset current matrix (PROJECTION)
    
    // Setup perspective projection
    float aspect = (float)screenWidth/(float)screenHeight;
    double top = 0.01*tan(camera.fovy*PI/360.0);
    double right = top*aspect;
    rlFrustum(-right, right, -top, top, 0.01, 1000.0);
    
    rlMatrixMode(RL_MODELVIEW);                         // Switch back to MODELVIEW matrix
    rlLoadIdentity();                                   // Reset current matrix (MODELVIEW)
    
    // Setup Camera view
    Matrix cameraView = MatrixLookAt(camera.position, camera.target, camera.up);
    rlMultMatrixf(MatrixToFloat(cameraView));      // Multiply MODELVIEW matrix by view matrix (camera)

    InitOculusDevice();                                 // Initialize Oculus Rift CV1
    
    Vector3 cubePosition = { 0.0f, 0.0f, 0.0f };
    //--------------------------------------------------------------------------------------    

    // Main game loop    
    while (!glfwWindowShouldClose(window)) 
    {
        // Update
        //----------------------------------------------------------------------------------
        UpdateOculusTracking();
        //----------------------------------------------------------------------------------

        // Draw
        //----------------------------------------------------------------------------------
        BeginOculusDrawing();
        
            rlClearScreenBuffers();             // Clear current framebuffer(s)

            DrawCube(cubePosition, 2.0f, 2.0f, 2.0f, RED);
            DrawCubeWires(cubePosition, 2.0f, 2.0f, 2.0f, RAYWHITE);
            DrawGrid(10, 1.0f);

            // NOTE: Internal buffers drawing (3D data)
            rlglDraw();

        EndOculusDrawing();

        glfwSwapBuffers(window);
        glfwPollEvents();
        //----------------------------------------------------------------------------------
    }

    // De-Initialization
    //--------------------------------------------------------------------------------------
    CloseOculusDevice();            // Close Oculus device and clear resources

    rlglClose();                    // Unload rlgl internal buffers and default shader/texture
    
    glfwDestroyWindow(window);      // Close window
    glfwTerminate();                // Free GLFW3 resources
    //--------------------------------------------------------------------------------------
    
    return 0;
}
示例#28
0
文件: audio.c 项目: Danlestal/raylib
// Load WAV file into Wave structure
static Wave LoadWAV(const char *fileName)
{
    // Basic WAV headers structs
    typedef struct {
        char chunkID[4];
        long chunkSize;
        char format[4];
    } RiffHeader;

    typedef struct {
        char subChunkID[4];
        long subChunkSize;
        short audioFormat;
        short numChannels;
        long sampleRate;
        long byteRate;
        short blockAlign;
        short bitsPerSample;
    } WaveFormat;

    typedef struct {
        char subChunkID[4];
        long subChunkSize;
    } WaveData;
    
    RiffHeader riffHeader;
    WaveFormat waveFormat;
    WaveData waveData;
    
    Wave wave;
    FILE *wavFile;
    
    wavFile = fopen(fileName, "rb");
    
    if (!wavFile)
    {
        TraceLog(WARNING, "[%s] Could not open WAV file", fileName);
    }
    else
    {
        // Read in the first chunk into the struct
        fread(&riffHeader, sizeof(RiffHeader), 1, wavFile);
     
        // Check for RIFF and WAVE tags
        if (((riffHeader.chunkID[0] != 'R') || (riffHeader.chunkID[1] != 'I') || (riffHeader.chunkID[2] != 'F') || (riffHeader.chunkID[3] != 'F')) ||
            ((riffHeader.format[0] != 'W') || (riffHeader.format[1] != 'A') || (riffHeader.format[2] != 'V') || (riffHeader.format[3] != 'E')))
        {
                TraceLog(WARNING, "[%s] Invalid RIFF or WAVE Header", fileName);
        }
        else
        {
            // Read in the 2nd chunk for the wave info
            fread(&waveFormat, sizeof(WaveFormat), 1, wavFile);
            
            // Check for fmt tag
            if ((waveFormat.subChunkID[0] != 'f') || (waveFormat.subChunkID[1] != 'm') ||
                (waveFormat.subChunkID[2] != 't') || (waveFormat.subChunkID[3] != ' '))
            {
                TraceLog(WARNING, "[%s] Invalid Wave format", fileName);
            }
            else
            {
                // Check for extra parameters;
                if (waveFormat.subChunkSize > 16) fseek(wavFile, sizeof(short), SEEK_CUR);
             
                // Read in the the last byte of data before the sound file
                fread(&waveData, sizeof(WaveData), 1, wavFile);
                
                // Check for data tag
                if ((waveData.subChunkID[0] != 'd') || (waveData.subChunkID[1] != 'a') ||
                    (waveData.subChunkID[2] != 't') || (waveData.subChunkID[3] != 'a'))
                {
                    TraceLog(WARNING, "[%s] Invalid data header", fileName);
                }
                else
                {
                    // Allocate memory for data
                    wave.data = (unsigned char *)malloc(sizeof(unsigned char) * waveData.subChunkSize); 
                 
                    // Read in the sound data into the soundData variable
                    fread(wave.data, waveData.subChunkSize, 1, wavFile);
                    
                    // Now we set the variables that we need later
                    wave.dataSize = waveData.subChunkSize;
                    wave.sampleRate = waveFormat.sampleRate;
                    wave.channels = waveFormat.numChannels;
                    wave.bitsPerSample = waveFormat.bitsPerSample;
                    
                    TraceLog(INFO, "[%s] Wave file loaded successfully", fileName);
                }
            }
        }

        fclose(wavFile);
    }
    
    return wave;
}
示例#29
0
文件: audio.c 项目: Danlestal/raylib
// Load sound to memory
Sound LoadSound(char *fileName)
{
    Sound sound;
    Wave wave;
    
    // NOTE: The entire file is loaded to memory to play it all at once (no-streaming)
    
    // Audio file loading
    // NOTE: Buffer space is allocated inside function, Wave must be freed
    
    if (strcmp(GetExtension(fileName),"wav") == 0) wave = LoadWAV(fileName);
    else if (strcmp(GetExtension(fileName),"ogg") == 0) wave = LoadOGG(fileName);
    else TraceLog(WARNING, "[%s] Sound extension not recognized, it can't be loaded", fileName);
    
    if (wave.data != NULL)
    {
        ALenum format = 0;
        // The OpenAL format is worked out by looking at the number of channels and the bits per sample
        if (wave.channels == 1) 
        {
            if (wave.bitsPerSample == 8 ) format = AL_FORMAT_MONO8;
            else if (wave.bitsPerSample == 16) format = AL_FORMAT_MONO16;
        } 
        else if (wave.channels == 2) 
        {
            if (wave.bitsPerSample == 8 ) format = AL_FORMAT_STEREO8;
            else if (wave.bitsPerSample == 16) format = AL_FORMAT_STEREO16;
        }
        
        // Create an audio source
        ALuint source;
        alGenSources(1, &source);            // Generate pointer to audio source

        alSourcef(source, AL_PITCH, 1);    
        alSourcef(source, AL_GAIN, 1);
        alSource3f(source, AL_POSITION, 0, 0, 0);
        alSource3f(source, AL_VELOCITY, 0, 0, 0);
        alSourcei(source, AL_LOOPING, AL_FALSE);
        
        // Convert loaded data to OpenAL buffer
        //----------------------------------------
        ALuint buffer;
        alGenBuffers(1, &buffer);            // Generate pointer to buffer

        // Upload sound data to buffer
        alBufferData(buffer, format, wave.data, wave.dataSize, wave.sampleRate);

        // Attach sound buffer to source
        alSourcei(source, AL_BUFFER, buffer);
        
        // Unallocate WAV data
        UnloadWave(wave);
        
        TraceLog(INFO, "[%s] Sound file loaded successfully", fileName);  
        TraceLog(INFO, "[%s] Sample rate: %i - Channels: %i", fileName, wave.sampleRate, wave.channels);
        
        sound.source = source;
        sound.buffer = buffer;
    }
    
    return sound;
}
示例#30
0
文件: audio.c 项目: Danlestal/raylib
// Load sound to memory from rRES file (raylib Resource)
Sound LoadSoundFromRES(const char *rresName, int resId)
{
    // NOTE: rresName could be directly a char array with all the data!!! --> TODO
    Sound sound;
    bool found = false;

    char id[4];             // rRES file identifier
    unsigned char version;  // rRES file version and subversion
    char useless;           // rRES header reserved data
    short numRes;
    
    ResInfoHeader infoHeader;
    
    FILE *rresFile = fopen(rresName, "rb");

    if (!rresFile) TraceLog(WARNING, "[%s] Could not open raylib resource file", rresName);
    else
    {
        // Read rres file (basic file check - id)
        fread(&id[0], sizeof(char), 1, rresFile);
        fread(&id[1], sizeof(char), 1, rresFile);
        fread(&id[2], sizeof(char), 1, rresFile);
        fread(&id[3], sizeof(char), 1, rresFile);
        fread(&version, sizeof(char), 1, rresFile);
        fread(&useless, sizeof(char), 1, rresFile);
        
        if ((id[0] != 'r') && (id[1] != 'R') && (id[2] != 'E') &&(id[3] != 'S'))
        {
            TraceLog(WARNING, "[%s] This is not a valid raylib resource file", rresName);
        }
        else
        {
            // Read number of resources embedded
            fread(&numRes, sizeof(short), 1, rresFile);
            
            for (int i = 0; i < numRes; i++)
            {
                fread(&infoHeader, sizeof(ResInfoHeader), 1, rresFile);
                
                if (infoHeader.id == resId)
                {
                    found = true;

                    // Check data is of valid SOUND type
                    if (infoHeader.type == 1)   // SOUND data type
                    {
                        // TODO: Check data compression type
                        // NOTE: We suppose compression type 2 (DEFLATE - default)
                        
                        // Reading SOUND parameters
                        Wave wave;
                        short sampleRate, bps;
                        char channels, reserved;
                    
                        fread(&sampleRate, sizeof(short), 1, rresFile); // Sample rate (frequency)
                        fread(&bps, sizeof(short), 1, rresFile);        // Bits per sample
                        fread(&channels, 1, 1, rresFile);               // Channels (1 - mono, 2 - stereo)
                        fread(&reserved, 1, 1, rresFile);               // <reserved>
                                
                        wave.sampleRate = sampleRate;
                        wave.dataSize = infoHeader.srcSize;
                        wave.bitsPerSample = bps;
                        wave.channels = (short)channels;
                        
                        unsigned char *data = malloc(infoHeader.size);

                        fread(data, infoHeader.size, 1, rresFile);
                        
                        wave.data = DecompressData(data, infoHeader.size, infoHeader.srcSize);
                        
                        free(data);
                        
                        // Convert wave to Sound (OpenAL)
                        ALenum format = 0;
                        
                        // The OpenAL format is worked out by looking at the number of channels and the bits per sample
                        if (wave.channels == 1) 
                        {
                            if (wave.bitsPerSample == 8 ) format = AL_FORMAT_MONO8;
                            else if (wave.bitsPerSample == 16) format = AL_FORMAT_MONO16;
                        } 
                        else if (wave.channels == 2) 
                        {
                            if (wave.bitsPerSample == 8 ) format = AL_FORMAT_STEREO8;
                            else if (wave.bitsPerSample == 16) format = AL_FORMAT_STEREO16;
                        }
                        
                        
                        // Create an audio source
                        ALuint source;
                        alGenSources(1, &source);            // Generate pointer to audio source

                        alSourcef(source, AL_PITCH, 1);    
                        alSourcef(source, AL_GAIN, 1);
                        alSource3f(source, AL_POSITION, 0, 0, 0);
                        alSource3f(source, AL_VELOCITY, 0, 0, 0);
                        alSourcei(source, AL_LOOPING, AL_FALSE);
                        
                        // Convert loaded data to OpenAL buffer
                        //----------------------------------------
                        ALuint buffer;
                        alGenBuffers(1, &buffer);            // Generate pointer to buffer

                        // Upload sound data to buffer
                        alBufferData(buffer, format, (void*)wave.data, wave.dataSize, wave.sampleRate);

                        // Attach sound buffer to source
                        alSourcei(source, AL_BUFFER, buffer);
                        
                        // Unallocate WAV data
                        UnloadWave(wave);

                        TraceLog(INFO, "[%s] Sound loaded successfully from resource, sample rate: %i", rresName, (int)sampleRate);
                        
                        sound.source = source;
                        sound.buffer = buffer;
                    }
                    else
                    {
                        TraceLog(WARNING, "[%s] Required resource do not seem to be a valid SOUND resource", rresName);
                    }
                }
                else
                {
                    // Depending on type, skip the right amount of parameters
                    switch (infoHeader.type)
                    {
                        case 0: fseek(rresFile, 6, SEEK_CUR); break;   // IMAGE: Jump 6 bytes of parameters
                        case 1: fseek(rresFile, 6, SEEK_CUR); break;   // SOUND: Jump 6 bytes of parameters
                        case 2: fseek(rresFile, 5, SEEK_CUR); break;   // MODEL: Jump 5 bytes of parameters (TODO: Review)
                        case 3: break;   // TEXT: No parameters
                        case 4: break;   // RAW: No parameters
                        default: break;
                    }
                    
                    // Jump DATA to read next infoHeader
                    fseek(rresFile, infoHeader.size, SEEK_CUR);
                }    
            }
        }
        
        fclose(rresFile);
    }
    
    if (!found) TraceLog(WARNING, "[%s] Required resource id [%i] could not be found in the raylib resource file", rresName, resId);
    
    return sound;
}