Example #1
0
// Play audio, starting at a given frame/sample
BOOL CALLBACK aviaudioPlay(HWND hwnd, PAVISTREAM pavi, LONG lStart, LONG lEnd, BOOL fWait)
{
	if (audioPlayable <= 0)
		return FALSE;

	recalc = 1;

	//CString tx;
	//tx.Format("audioPlayable %d",audioPlayable);
	//MessageBox(NULL,tx,"Note",MB_OK);

	CString msx;

	if (lStart < 0)
		lStart = ::AVIStreamStart(pavi);

	if (lEnd < 0)
		lEnd = AVIStreamEnd(pavi);

	if (lStart >= lEnd) {
		return FALSE;
	}

	if (!aviaudioOpenDevice(hwnd, pavi)) {
		MessageBox(NULL,"AudioOpen failed","Note",MB_OK | MB_ICONEXCLAMATION);
		return FALSE;
	}

	if (!sfPlaying) {
		// We're beginning play, so pause until we've filled the buffers
		// for a seamless start
		waveOutPause(shWaveOut);

		slBegin = lStart;
		slCurrent = lStart;
		slEnd = lEnd;
		sfPlaying = TRUE;
	} else {
		slEnd = lEnd;
	}

	aviaudioiFillBuffers();

	// Now unpause the audio and away it goes!
	waveOutRestart(shWaveOut);

	// Caller wants us not to return until play is finished
	if (fWait) {
		while (swBuffersOut > 0)
			Yield();
	}

	return TRUE;
}
Example #2
0
	/////////////////////////////////////////////////////////
	//get bitmpainfoheader information
	/////////////////////////////////////////////////////////
	CAviMan::GetBmpHeader(BITMAPINFO *pBmpHeader)
	{
		LONG bmp_size = sizeof( BITMAPINFO ); 

		AVIStreamReadFormat(m_aviVideoStream, 0, &(pBmpHeader->bmiHeader), &bmp_size); // Read format 

		memcpy(&m_bmpHeader,pBmpHeader,bmp_size);

		m_dwStartFrame = AVIStreamStart(m_aviVideoStream);
		m_dwTotalFrame = AVIStreamEnd(m_aviVideoStream);				//get number of frames

		m_dwImageSize = m_bmpHeader.bmiHeader.biWidth * m_bmpHeader.bmiHeader.biHeight;

		switch(m_bmpHeader.bmiHeader.biBitCount)
		{
		case(24):m_dwImageSize*=3;break;
		case(16):m_dwImageSize*=2;break;			
		}

	}
Example #3
0
void VideoReader::ReadNextFrame(FrameData& data)
{
    
    // Read the stream data using AVIStreamRead. 
    if (m_currentSize < AVIStreamEnd(m_pAviStream)) 
    { 
            //Context::Oversubscribe(true);
            HRESULT hr = AVIStreamRead(m_pAviStream, m_currentSize, 1, (LPVOID)m_lpBuffer, m_bi.biSizeImage,
                NULL, NULL); 

            data.m_BBP = m_bi.biBitCount;
            data.m_ColorPlanes = m_bi.biPlanes;
            data.m_EndHeight = m_bi.biHeight;
            data.m_EndWidth = m_bi.biWidth;
            data.m_pFrame = m_lpBuffer;
            data.m_Pitch = m_bi.biWidth * (m_bi.biBitCount / 8);
            data.m_Size = m_bi.biSizeImage;
            data.m_StartHeight = 0;
            data.m_StartWidth = 0;
            m_pVideoAgent->ProcessFrame(data);
            ++m_currentSize;

    } 
}
VDPosition AVIReadTunnelStream::End() {
    return AVIStreamEnd(pas);
}
Example #5
0
void VideoHelper::OpenVideo(CString strFilePath, FrameData& data)
{    
    AVIFileInit();

    LONG hr;  
    hr = AVIStreamOpenFromFile(&m_pAviStream, strFilePath, streamtypeVIDEO, 0, OF_READ, NULL);
    if (hr != 0){ 
        // Handle failure.
        AfxMessageBox(L"Failed to open file."); 
    }
    else
    {
        PAVIFILE         pf; 
        PAVISTREAM       psSmall; 
        HRESULT          hr; 
        AVISTREAMINFO    strhdr; 
        BITMAPINFOHEADER bi; 
        BITMAPINFOHEADER biNew; 
        LONG             lStreamSize; 
        LPVOID           lpOld; 
        LPVOID           lpNew; 

        // Determine the size of the format data using 
        // AVIStreamFormatSize. 
        AVIStreamFormatSize(m_pAviStream, 0, &lStreamSize); 
        if (lStreamSize > sizeof(bi)) // Format too large? 
            return; 

        lStreamSize = sizeof(bi); 
        hr = AVIStreamReadFormat(m_pAviStream, 0, &bi, &lStreamSize); // Read format 
        if (bi.biCompression != BI_RGB) // Wrong compression format? 
            return; 

        hr = AVIStreamInfo(m_pAviStream, &strhdr, sizeof(strhdr)); 

        // Create new AVI file using AVIFileOpen. 
        hr = AVIFileOpen(&pf, strFilePath + L".Processed.avi", OF_WRITE | OF_CREATE, NULL); 
        if (hr != 0) 
            return; 

        // Set parameters for the new stream. 
        biNew = bi; 

        SetRect(&strhdr.rcFrame, 0, 0, (int) biNew.biWidth, 
            (int) biNew.biHeight); 

        // Create a stream using AVIFileCreateStream. 
        hr = AVIFileCreateStream(pf, &psSmall, &strhdr); 
        if (hr != 0) {            //Stream created OK? If not, close file. 
            AVIFileRelease(pf); 
            return; 
        } 

        // Set format of new stream using AVIStreamSetFormat. 
        hr = AVIStreamSetFormat(psSmall, 0, &biNew, sizeof(biNew)); 
        if (hr != 0) { 
            AVIStreamRelease(psSmall); 
            AVIFileRelease(pf); 
            return; 
        } 

        // Allocate memory for the bitmaps. 
        lpOld = malloc(bi.biSizeImage); 

        // Read the stream data using AVIStreamRead. 
        for (lStreamSize = AVIStreamStart(m_pAviStream); lStreamSize <
            AVIStreamEnd(m_pAviStream)/*1500*/; lStreamSize++) { 
                //Context::Oversubscribe(true);
                hr = AVIStreamRead(m_pAviStream, lStreamSize, 1, lpOld, bi.biSizeImage,
                    NULL, NULL); 
                //Context::Oversubscribe(false);
                //memcpy_s(lpNew, bi.biSizeImage, lpOld, bi.biSizeImage);
                data.m_BBP = bi.biBitCount;
                data.m_ColorPlanes = bi.biPlanes;
                data.m_EndHeight = bi.biHeight;
                data.m_EndWidth = bi.biWidth;
                data.m_pFrame = (BYTE*)lpOld;
                data.m_Pitch = bi.biWidth * (bi.biBitCount / 8);
                data.m_Size = bi.biSizeImage;
                data.m_StartHeight = 0;
                data.m_StartWidth = 0;
                lpNew = m_pVideoAgent->ProcessFrame(data);
               

                if(NULL != lpNew)
                {
                    // Save the compressed data using AVIStreamWrite.
                    hr = AVIStreamWrite(psSmall, lStreamSize, 1, lpNew,
                    biNew.biSizeImage, AVIIF_KEYFRAME, NULL, NULL);
                }
        } 
        free(lpOld);
        // Close the stream and file. 
        AVIStreamRelease(psSmall); 
        AVIFileRelease(pf); 
    }
    AVIFileExit();
}
Example #6
0
// AVI_stream_open() will open the AVI file and prepare it for reading, but will not 
// store any of the frame data. 
//
//	returns:   0 ==> success
//           !0 ==> could not open the AVI stream
//
// The filename is expected to be an absolute pathname (or file in the current working directory)
//
int AVI_stream_open(char* filename)
{
	if ( !AVI_stream_inited )
		AVI_stream_init();

	int				hr; 
	PAVIFILE			pfile; 
	PAVISTREAM		pstream;
	AVISTREAMINFO	avi_stream_info;

	Assert( !(AVI_stream.flags & AVI_STREAM_F_USED) );

	// Open the AVI file
	hr = AVIFileOpen(&pfile, filename, OF_SHARE_DENY_WRITE, 0); 
	if (hr != 0){ 
//		nprintf(("Warning", "AVI ==> Unable to open %s", filename)); 
		return -1; 
	} 
 
	strcpy(AVI_stream.filename, filename);

	// Get a handle to the video stream within the AVI file	
	hr = AVIFileGetStream(pfile, &pstream, streamtypeVIDEO, 0); 
	if (hr != 0){ 
		//nprintf(("Warning", "AVI ==> Unable to open video stream in %s", filename)); 
		return -1; 
	} 

	// Store the pointer to stream, since we'll need it later to read from disk
	AVI_stream.pstream = pstream;
	AVI_stream.pfile = pfile;

	// Get information on the stream
	hr = AVIStreamInfo( pstream, &avi_stream_info, sizeof(AVISTREAMINFO) );
	if (hr != 0){ 
		//nprintf(("Warning", "AVI ==> Unable to retreive stream info in %s", filename)); 
		return -1; 
	} 


	int buffer_size;
	
	int start_sample = AVIStreamStart(pstream);
	Assert( start_sample == 0 );

	int end_sample = AVIStreamEnd(pstream);
	Assert( end_sample >= start_sample );

	// store the number of frames in the AVI_info[] structure
	AVI_stream.num_frames = end_sample;		// start sample must be 0
	Assert(AVI_stream.num_frames == AVIStreamLength(pstream) );


	// Get information on the stream
	hr = AVIStreamInfo( pstream, &avi_stream_info, sizeof(AVISTREAMINFO) );
	if (hr != 0){ 
		//nprintf(("Warning", "AVI ==> Unable to retreive stream info in %s", filename)); 
		return -1; 
	} 

	buffer_size = avi_stream_info.dwSuggestedBufferSize;
	Assert( buffer_size > 0 );
	AVI_stream.min_compressed_buffer_size = buffer_size;

	// determine the format of the AVI image data
	ubyte* format_buffer;
	long format_buffer_size;
	BITMAPINFO* bitmap_info;

	hr = AVIStreamFormatSize(pstream, 0, &format_buffer_size);
	Assert( format_buffer_size > 0 );

	format_buffer = (ubyte*) malloc(format_buffer_size);
	Assert(format_buffer != NULL);	// format_buffer is free'ed when AVI is free'ed, since memory is used by b_info member in AVI_info[] structure

	hr = AVIStreamReadFormat(pstream, 0, format_buffer, &format_buffer_size);
	bitmap_info = (BITMAPINFO*)format_buffer;


	switch ( bitmap_info->bmiHeader.biCompression ) {
		case BI_RLE8:
			break;

		default:
			Assert(0);
			break;
	}

	AVI_stream.w = bitmap_info->bmiHeader.biWidth;
	AVI_stream.h = bitmap_info->bmiHeader.biHeight;
	AVI_stream.bpp = bitmap_info->bmiHeader.biBitCount;
		
	// create the palette translation look-up table
	//
	// Transparency:  If the palette color is full green, then treat as transparent
	//						
	RGBQUAD* pal;
	pal = (RGBQUAD*)(bitmap_info->bmiColors);

	// Store the palette in the AVI stream structure
	for ( int i = 0; i < 256; i++ ) {
		AVI_stream.palette[i*3]	  = pal[i].rgbRed;
		AVI_stream.palette[i*3+1] = pal[i].rgbGreen;
		AVI_stream.palette[i*3+2] = pal[i].rgbBlue;
	}	


	//	memcpy(AVI_stream.palette, pal, 256*3);
	
/*
	int transparent_found = 0;
	for ( i = 0; i < 256; i++ ) {

		//nprintf(("AVI", "AVI ==> R: %d  G: %d  B: %d\n", pal[i].rgbRed, pal[i].rgbGreen, pal[i].rgbBlue));
		if ( pal[i].rgbRed < 5 && pal[i].rgbGreen > 250 && pal[i].rgbBlue < 5 ) {
			avi_stream->pal_translation[i]	= TRANSPARENT_INDEX;
			break;	// found transparent, continue in j for loop, since don't need check any more
		}
		else
			avi_stream->pal_translation[i] = palette_find(	pal[i].rgbRed, pal[i].rgbGreen, pal[i].rgbBlue ); 
	}	

	for ( j = i+1; j < 256; j++ ) {
		avi_stream->pal_translation[j] = palette_find(	pal[j].rgbRed, pal[j].rgbGreen, pal[j].rgbBlue ); 
	}
*/

	free(format_buffer);

	// set the flag to used, so to make sure we only process one AVI stream at a time
	AVI_stream.flags |= AVI_STREAM_F_USED;	


	return 0;
}
Example #7
0
// Play audio, starting at a given frame/sample
BOOL CALLBACK aviaudioPlay(HWND hwnd, PAVISTREAM pavi, LONG lStart, LONG lEnd, BOOL fWait)
{
	if (audioPlayable <= 0)
		return FALSE;

	//CString tx;
	//tx.Format("audioPlayable %d",audioPlayable);
	//MessageBox(NULL,tx,"Note",MB_OK);

	recalc = 1;

//	CString msx;
	if (lStart < 0)
		lStart = AVIStreamStart(pavi);

    if (lEnd < 0)
		lEnd = AVIStreamEnd(pavi);

    if (lEnd <= lStart) {
		return FALSE;
	}

    if (!aviaudioOpenDevice(hwnd, pavi)) {
		if ((runmode == 0) || (runmode == 1)) {
			//MessageBox(NULL,"AudioOpen failed","Note",MB_OK | MB_ICONEXCLAMATION);
			 MessageOut(NULL,IDS_AOF, IDS_NOTE, MB_OK | MB_ICONEXCLAMATION);
		}

		return FALSE;
	}

    if (!sfPlaying) {
		// We're beginning play, so pause until we've filled the buffers
		// for a seamless start
		waveOutPause(shWaveOut);

		slBegin = lStart;
		slCurrent = lStart;
		slEnd = lEnd;
		sfPlaying = TRUE;
    } else {
		slEnd = lEnd;
    }

	//ErrMsg("playbegin");

	//Error here -- program crash when play at non start time after steam is pasted
    aviaudioiFillBuffers();

	//ErrMsg("play");

    // Now unpause the audio and away it goes!
    waveOutRestart(shWaveOut);

    // Caller wants us not to return until play is finished
    if (fWait) {
		while (swBuffersOut > 0)
			Yield();
    }

    return TRUE;
}
Example #8
-1
// Return the time in milliseconds corresponding to the  currently playing audio sample, or -1 if no audio is playing.
LONG CALLBACK aviaudioTime(void)
{
	MMTIME      mmtime;

	if (audioPlayable <= 0)
		return -1;

	if (!sfPlaying)
		return -1;

	//not sure
	if (recalc) {
		streamEnd = AVIStreamEnd(spavi);
		streamStart = AVIStreamStart(spavi);
		recalc = 0;
		//ErrMsg("recalc");
	}

	if ((streamEnd<=streamStart) || (streamEnd<=0))
		return -1;

	mmtime.wType = TIME_SAMPLES;

	waveOutGetPosition(shWaveOut, &mmtime, sizeof(mmtime));

	if (mmtime.wType == TIME_SAMPLES)
		return AVIStreamSampleToTime(spavi, slBegin) + muldiv32(mmtime.u.sample, 1000, sdwSamplesPerSec);
	else if (mmtime.wType == TIME_BYTES)
		return AVIStreamSampleToTime(spavi, slBegin) + muldiv32(mmtime.u.cb, 1000, sdwBytesPerSec);
	else
		return -1;
}