Beispiel #1
0
/******************************************************************************
*
* smfTicksToMillisecs
*
* This function returns the millisecond offset into the file given the
* tick offset.
*
* hSmf                      - Specifies the open MIDI file to perform
*                             the conversion on.
*
* tkOffset                  - Specifies the tick offset into the stream
*                             to convert.
*
* Returns the number of milliseconds from the start of the stream.
*
* The conversion is performed taking into account the file's time division and
* tempo map from the first track. Note that the same millisecond value
* might not be valid at a later time if the tempo track is rewritten.
*
*****************************************************************************/
DWORD FNLOCAL smfTicksToMillisecs(
    HSMF                    hSmf,
    TICKS                   tkOffset)
{
    PSMF                    pSmf            = (PSMF)hSmf;
    PTEMPOMAPENTRY          pTempo;
    UINT                    idx;
    UINT                    uSMPTE;
    DWORD                   dwTicksPerSec;

    assert(pSmf != NULL);

    if (tkOffset > pSmf->tkLength)
    {
        DPF(1, "sTTM: Clipping ticks to file length!");
        tkOffset = pSmf->tkLength;
    }

    /* SMPTE time is easy -- no tempo map, just linear conversion
    ** Note that 30-Drop means nothing to us here since we're not
    ** converting to a colonized format, which is where dropping
    ** happens.
    */
    if (pSmf->dwTimeDivision & 0x8000)
    {
        uSMPTE = -(int)(char)((pSmf->dwTimeDivision >> 8)&0xFF);
        if (29 == uSMPTE)
            uSMPTE = 30;
        
        dwTicksPerSec = (DWORD)uSMPTE *
                        (DWORD)(BYTE)(pSmf->dwTimeDivision & 0xFF);
        
        return (DWORD)muldiv32(tkOffset, 1000L, dwTicksPerSec);
    }
Beispiel #2
0
void PaintAudio(
		HDC hdc, PRECT prc, PAVISTREAM pavi, LONG lStart, LONG lLen)
	{
#ifndef INTERIM_64_BIT	// CCJ
    LPVOID lpAudio=NULL;
    PCMWAVEFORMAT wf;
    int i;
    int x,y;
    int w,h;
    BYTE b;
    HBRUSH hbr;
    RECT rc = *prc;
    LONG    lBytes;
    LONG    l, lLenOrig = lLen;
    LONG    lWaveBeginTime = AVIStreamStartTime(pavi);
    LONG    lWaveEndTime   = AVIStreamEndTime(pavi);

    //
    // We can't draw before the beginning of the stream - adjust
    //
    if (lStart < lWaveBeginTime) {
		lLen -= lWaveBeginTime - lStart;
		lStart = lWaveBeginTime;
		// right justify the legal samples in the rectangle - don't stretch
		rc.left = rc.right - (int)muldiv32(rc.right - rc.left, lLen, lLenOrig);
	    }

    //
    // We can't draw past the end of the stream
    //
    if (lStart + lLen > lWaveEndTime) {
		lLenOrig = lLen;
		lLen = max(0, lWaveEndTime - lStart);	// maybe nothing to draw!
		// left justify the legal samples in the rectangle - don't stretch
		rc.right = rc.left + (int)muldiv32(rc.right - rc.left, lLen, lLenOrig);
	    }

    // Now start working with samples, not time
    l = lStart;
    lStart = AVIStreamTimeToSample(pavi, lStart);
    lLen   = AVIStreamTimeToSample(pavi, l + lLen) - lStart;

    //
    // Get the format of the wave data
    //
    l = sizeof(wf);
    AVIStreamReadFormat(pavi, lStart, &wf, &l);
    if (!l)
        return;

    w = rc.right - rc.left;
    h = rc.bottom - rc.top;

    //
    // We were starting before the beginning or continuing past the end.
    // We're not painting in the whole original rect --- use a dark background
    //
    if (rc.left > prc->left) {
        SelectObject(hdc, GetStockObject(DKGRAY_BRUSH));
		PatBlt(hdc, prc->left, rc.top, rc.left - prc->left,
						rc.bottom - rc.top, PATCOPY);
    	}
    if (rc.right < prc->right) {
        SelectObject(hdc, GetStockObject(DKGRAY_BRUSH));
		PatBlt(hdc, rc.right, rc.top, prc->right - rc.right,
						rc.bottom - rc.top, PATCOPY);
    	}

#define BACKBRUSH  (GetSysColor(COLOR_BTNFACE))		// background
#define MONOBRUSH  (GetSysColor(COLOR_BTNSHADOW))	// for mono audio
#define LEFTBRUSH  (RGB(0,0,255))			// left channel
#define RIGHTBRUSH (RGB(0,255,0))			// right channel
#define HPOSBRUSH  (RGB(255,0,0))			// current position
    
    //
    // Paint the background
    //
    hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(BACKBRUSH));
    PatBlt(hdc, rc.left, rc.top, w, h, PATCOPY);
    DeleteObject(SelectObject(hdc, hbr));

    //
    // !!! we can only paint PCM data right now.  Sorry!
    //
    if (wf.wf.wFormatTag != WAVE_FORMAT_PCM)
        return;

    //
    // How many bytes are we painting? Alloc some space for them
    //
    lBytes = lLen * wf.wf.nChannels * wf.wBitsPerSample / 8;
    if (!lpAudio)
        lpAudio = GlobalAllocPtr (GHND, lBytes);
    else if ((LONG)GlobalSizePtr(lpAudio) < lBytes)
        lpAudio = GlobalReAllocPtr(lpAudio, lBytes, GMEM_MOVEABLE);
    if (!lpAudio)
        return;

    //
    // Read in the wave data
    //
    AVIStreamRead(pavi, lStart, lLen, lpAudio, lBytes, NULL, &l);
    if (l != lLen)
        return;
    
#define MulDiv(a,b,c) (UINT)((DWORD)(UINT)(a) * (DWORD)(UINT)(b) / (UINT)(c))

    //
    // !!! Flickers less painting it NOW or LATER?
    // First show the current position as a bar
    //
    //hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(HPOSBRUSH));
    //PatBlt(hdc, prc->right / 2, prc->top, 1, prc->bottom - prc->top, PATCOPY);
    //DeleteObject(SelectObject(hdc, hbr));

    //
    // Paint monochrome wave data
    //
    if (wf.wf.nChannels == 1) {

		//
		// Draw the x-axis
		//
        hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(MONOBRUSH));
        y = rc.top + h/2;
        PatBlt(hdc, rc.left, y, w, 1, PATCOPY);
    
		//
		// 8 bit data is centred around 0x80
		//
        if (wf.wBitsPerSample == 8) {
            for (x=0; x<w; x++) {

				// which byte of audio data belongs at this pixel?
                b = *((HPBYTE)lpAudio + muldiv32(x, lLen, w));

                if (b > 0x80) {
                    i = y - MulDiv(b - 0x80, (h / 2), 128);
                    PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY);
                	}
                else {
                    i = y + MulDiv(0x80 - b, (h / 2), 128);
                    PatBlt(hdc, rc.left + x, y, 1, i - y, PATCOPY);
                	}
            	}
        	}

		//
		// 16 bit data is centred around 0x00
		//
        else if (wf.wBitsPerSample == 16) {
            for (x=0; x<w; x++) {

				// which byte of audio data belongs at this pixel?
	            i = *((HPINT)lpAudio + muldiv32(x,lLen,w));

	            if (i > 0) {
	               i = y - (int) ((LONG)i * (h/2) / 32768);
	               PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY);
	            	}
	            else {
	               i = (int) ((LONG)i * (h/2) / 32768);
	               PatBlt(hdc, rc.left+x, y, 1, -i, PATCOPY);
	            	}
	            }
        	}
        DeleteObject(SelectObject(hdc, hbr));
	    } // endif mono

    //
    // Draw stereo waveform data
    //
    else if (wf.wf.nChannels == 2) {
		//
		// 8 bit data is centred around 0x80
		//
        if (wf.wBitsPerSample == 8) {

            // Left channel
            hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(LEFTBRUSH));
            y = rc.top + h/4;
            PatBlt(hdc, rc.left, y, w, 1, PATCOPY);

            for (x=0; x<w; x++) {
                b = *((HPBYTE)lpAudio + muldiv32(x,lLen,w) * 2);

                if (b > 0x80) {
                    i = y - MulDiv(b-0x80,(h/4),128);
                    PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY);
                	}
                else {
                    i = y + MulDiv(0x80-b,(h/4),128);
                    PatBlt(hdc, rc.left+x, y, 1, i-y, PATCOPY);
                	}
            	}
            DeleteObject(SelectObject(hdc, hbr));
                
            // Right channel
            hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(RIGHTBRUSH));
            y = rc.top + h * 3 / 4;
            PatBlt(hdc, rc.left, y, w, 1, PATCOPY);

            for (x=0; x<w; x++) {
                b = *((HPBYTE)lpAudio + muldiv32(x,lLen,w) * 2 + 1);

                if (b > 0x80) {
                    i = y - MulDiv(b-0x80,(h/4),128);
                    PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY);
                	}
                else {
                    i = y + MulDiv(0x80-b,(h/4),128);
                    PatBlt(hdc, rc.left+x, y, 1, i-y, PATCOPY);
                	}
            	}
            DeleteObject(SelectObject(hdc, hbr));
        }

		//
		// 16 bit data is centred around 0x00
		//
        else if (wf.wBitsPerSample == 16) {

            // Left channel
            hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(LEFTBRUSH));
            y = rc.top + h/4;
            PatBlt(hdc, rc.left, y, w, 1, PATCOPY);

            for (x=0; x<w; x++) {
                i = *((HPINT)lpAudio + muldiv32(x,lLen,w) * 2);
                if (i > 0) {
                    i = y - (int) ((LONG)i * (h/4) / 32768);
                    PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY);
                	}
                else {
                    i = (int) ((LONG)i * (h/4) / 32768);
                    PatBlt(hdc, rc.left+x, y, 1, -i, PATCOPY);
                	}
            	}
            DeleteObject(SelectObject(hdc, hbr));

            // Right channel
            hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(RIGHTBRUSH));
            y = rc.top + h * 3 / 4;
            PatBlt(hdc, rc.left, y, w, 1, PATCOPY);

            for (x=0; x<w; x++) {
                i = *((HPINT)lpAudio + muldiv32(x,lLen,w) * 2 + 1);
                if (i > 0) {
                	i = y - (int) ((LONG)i * (h/4) / 32768);
                	PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY);
               	 	}
                else {
                	i = (int) ((LONG)i * (h/4) / 32768);
                	PatBlt(hdc, rc.left+x, y, 1, -i, PATCOPY);
                	}
            	}
            DeleteObject(SelectObject(hdc, hbr));
        	}
    	} // endif stereo

	if (lpAudio) {
		GlobalFreePtr(lpAudio);
    	lpAudio = NULL;
		}
#endif	// INTERIM_64_BIT
	}
Beispiel #3
-1
// Return the time in milliseconds corresponding to the  currently playing audio sample, or -1 if no audio is playing.
LONG CALLBACK aviaudioTime(void)
{
	MMTIME      mmtime;

	if (audioPlayable <= 0)
		return -1;

	if (!sfPlaying)
		return -1;

	//not sure
	if (recalc) {
		streamEnd = AVIStreamEnd(spavi);
		streamStart = AVIStreamStart(spavi);
		recalc = 0;
		//ErrMsg("recalc");
	}

	if ((streamEnd<=streamStart) || (streamEnd<=0))
		return -1;

	mmtime.wType = TIME_SAMPLES;

	waveOutGetPosition(shWaveOut, &mmtime, sizeof(mmtime));

	if (mmtime.wType == TIME_SAMPLES)
		return AVIStreamSampleToTime(spavi, slBegin) + muldiv32(mmtime.u.sample, 1000, sdwSamplesPerSec);
	else if (mmtime.wType == TIME_BYTES)
		return AVIStreamSampleToTime(spavi, slBegin) + muldiv32(mmtime.u.cb, 1000, sdwBytesPerSec);
	else
		return -1;
}