Exemplo n.º 1
0
int AVIOBJ::OpenAVI(LPCSTR szFile)										// Opens An AVI File (szFile)
{

    AVIFileInit();												// Opens The AVIFile Library
    pgf=NULL;

    if (AVIStreamOpenFromFile(&pavi, szFile, streamtypeVIDEO, 0, OF_READ, NULL) !=0)
        return(0);

    AVIStreamInfo(pavi, &psi, sizeof(psi));						// Reads Information About The Stream Into psi
    width=psi.rcFrame.right-psi.rcFrame.left;					// Width Is Right Side Of Frame Minus Left
    height=psi.rcFrame.bottom-psi.rcFrame.top;					// Height Is Bottom Of Frame Minus Top

    lastframe=AVIStreamLength(pavi);							// The Last Frame Of The Stream

    mpf=AVIStreamSampleToTime(pavi,lastframe)/lastframe;		// Calculate Rough Milliseconds Per Frame

    bmih.biSize = sizeof (BITMAPINFOHEADER);					// Size Of The BitmapInfoHeader
    bmih.biPlanes = 1;											// Bitplanes
    bmih.biBitCount = 24;										// Bits Format We Want (24 Bit, 3 Bytes)
    bmih.biWidth = width;											// Width We Want
    bmih.biHeight = height;										// Height We Want
    bmih.biCompression = BI_RGB;								// Requested Mode = RGB

    hBitmap = CreateDIBSection (hdc, (BITMAPINFO*)(&bmih), DIB_RGB_COLORS, (void**)(&data), NULL, 0);
    SelectObject (hdc, hBitmap);								// Select hBitmap Into Our Device Context (hdc)

    pgf=AVIStreamGetFrameOpen(pavi, NULL);						// Create The PGETFRAME	Using Our Request Mode
    if (pgf==NULL) return(0);

    return(1);
}
Exemplo n.º 2
0
bool AVI :: Load(std :: string fname,bool loopflag,Texture *tex)				// Opens An AVI File
{
	// copy over looping flag
	loop=loopflag;

	// do we have a texture manager?
	if(!tex)
		return false;
	// copy it over
	tman=tex;

	hdc=CreateCompatibleDC(0);									// Get a compatible DC
	hdd = DrawDibOpen();										// Grab A Device Context For Our Dib


	AVIFileInit();												// Opens The AVIFile Library

	// Opens The AVI Stream
	if (AVIStreamOpenFromFile(&pavi, fname.c_str(), streamtypeVIDEO, 0, OF_READ, NULL) !=0)
		return false;

	AVIStreamInfo(pavi, &psi, sizeof(psi));						// Reads Information About The Stream Into psi
	width=psi.rcFrame.right-psi.rcFrame.left;					// Width Is Right Side Of Frame Minus Left
	height=psi.rcFrame.bottom-psi.rcFrame.top;					// Height Is Bottom Of Frame Minus Top

	lastframe=AVIStreamLength(pavi);							// The Last Frame Of The Stream

	mpf=AVIStreamSampleToTime(pavi,lastframe)/lastframe;		// Calculate Rough Milliseconds Per Frame

	bmih.biSize = sizeof (BITMAPINFOHEADER);					// Size Of The BitmapInfoHeader
	bmih.biPlanes = 1;											// Bitplanes	
	bmih.biBitCount = 24;										// Bits Format We Want (24 Bit, 3 Bytes)
	bmih.biWidth = 256;											// Width We Want (256 Pixels)
	bmih.biHeight = 256;										// Height We Want (256 Pixels)
	bmih.biCompression = BI_RGB;								// Requested Mode = RGB

	hBitmap = CreateDIBSection (hdc, (BITMAPINFO*)(&bmih), DIB_RGB_COLORS, (void**)(&data), NULL, NULL);
	SelectObject (hdc, hBitmap);								// Select hBitmap Into Our Device Context (hdc)

	pgf=AVIStreamGetFrameOpen(pavi, NULL);						// Create The PGETFRAME	Using Our Request Mode
	if (pgf==NULL)
		return false;

	// create the texture
	Image img;
	img.Create(256,256,false);									// set dimensions of texture
	img.SetImage(data);											// set the texture information
	texid=tman->Create(&img);									// create the texture

	// clear the time position 
	pos=0;

	// success
	return true;
}
Exemplo n.º 3
0
void VideoReader::Open(CString strFilePath)
{    
    AVIFileInit();

    LONG hr;  
    hr = AVIStreamOpenFromFile(&m_pAviStream, strFilePath, streamtypeVIDEO, 0, OF_READ, NULL);
    if (hr != 0){ 
        // Handle failure.
        AfxMessageBox(L"Failed to open file, file must be an uncompressed video."); 
    }
    else
    {
        HRESULT          hr; 
        AVISTREAMINFO    strhdr; 
        LONG             lStreamSize; 
 

        // Determine the size of the format data using 
        // AVIStreamFormatSize. 
        AVIStreamFormatSize(m_pAviStream, 0, &lStreamSize); 
        if (lStreamSize > sizeof(m_bi)) // Format too large? 
            return; 

        lStreamSize = sizeof(m_bi); 
        hr = AVIStreamReadFormat(m_pAviStream, 0, &m_bi, &lStreamSize); // Read format 
        if (m_bi.biCompression != BI_RGB) // Wrong compression format? 
            return; 

        hr = AVIStreamInfo(m_pAviStream, &strhdr, sizeof(strhdr)); 

        // Create new AVI file using AVIFileOpen. 
        hr = AVIFileOpen(&m_pf, strFilePath + L".Processed.avi", OF_WRITE | OF_CREATE, NULL); 
        if (hr != 0) 
            return; 

        m_currentSize = AVIStreamStart(m_pAviStream);

        // Allocate memory for the bitmaps. 
        m_lpBuffer = (BYTE *)malloc(m_bi.biSizeImage); 
    }

}
Exemplo n.º 4
0
Arquivo: AVI.cpp Projeto: DCubix/1.4.0
bool CAvi::OpenAVI( LPCWSTR szFile )
{
	AVISTREAMINFO psi;
	AVIFileInit();
	if( AVIStreamOpenFromFile( &m_pavi, szFile, streamtypeVIDEO, 0, OF_READ, NULL) !=0)
	{
		MessageBox( NULL, _T("Couldnt open the CAvi stream."), _T("VandaEngine Error"), MB_OK | MB_ICONERROR );
		return 0;
	}
	AVIStreamInfo(m_pavi, &psi, sizeof(psi));				// Reads Information About The Stream Into psi
	m_width=psi.rcFrame.right-psi.rcFrame.left;			// Width Is Right Side Of Frame Minus Left
	m_height=psi.rcFrame.bottom-psi.rcFrame.top;			// Height Is Bottom Of Frame Minus Top
	m_lastFrame = AVIStreamLength(m_pavi);	
	m_mpf=AVIStreamSampleToTime(m_pavi,m_lastFrame)/m_lastFrame;		// Calculate Rough Milliseconds Per Frame
	if( m_mpf == 0 )
	{
		MessageBox( NULL, _T("MPF is 0"), _T("VandaEngine Error"), MB_OK | MB_ICONINFORMATION );
		return 0;
	}

    BITMAPINFOHEADER bmih;
	bmih.biSize = sizeof (BITMAPINFOHEADER);					// Size Of The BitmapInfoHeader
	bmih.biPlanes = 1;											// Bitplanes	
	bmih.biBitCount = 24;										// Bits Format We Want (24 Bit, 3 Bytes)
	bmih.biWidth = 1024;											// Width We Want (512 Pixels)
	bmih.biHeight = 512;										// Height We Want (256 Pixels)
	bmih.biCompression = BI_RGB;								// Requested Mode = RGB

	m_hBitmap = CreateDIBSection (m_hDC, (BITMAPINFO*)(&bmih), DIB_RGB_COLORS, (void**)(&m_data), NULL, NULL);
	SelectObject (m_hDC, m_hBitmap);		// Select m_hBitmap Into Our Device Context (hdc)

	m_pgf=AVIStreamGetFrameOpen(m_pavi, NULL);	// Create The PGETFRAME	Using Our Request Mode
	if (m_pgf==NULL)
	{
		//An Error Occurred Opening The Frame
		MessageBox( NULL, _T("Couldn't open the CAvi frame."), _T("VandaEngine Error"), MB_OK | MB_ICONINFORMATION );
		return 0;
	}
	return 1;
}
Exemplo n.º 5
0
static FIMULTIBITMAP* ReadFromAvi(const char* filename) {
	int err=0;
	AVIFileInit();
	
	PAVISTREAM pavi; // Handle To An Open Stream
	if( AVIStreamOpenFromFile(&pavi, filename, streamtypeVIDEO, 0, OF_READ, NULL) != 0) {
		AVIFileExit();
		return NULL;
	}



	AVISTREAMINFO		psi;				// Pointer To A Structure Containing Stream Info
	AVIStreamInfo(pavi, &psi, sizeof(psi));				// Reads Information About The Stream Into psi
	int width  = psi.rcFrame.right-psi.rcFrame.left;			// Width Is Right Side Of Frame Minus Left
	int height = psi.rcFrame.bottom-psi.rcFrame.top;			// Height Is Bottom Of Frame Minus Top
	int frameCount = AVIStreamLength(pavi);							// The Last Frame Of The Stream

	double mpf = AVIStreamSampleToTime(pavi, frameCount) / (double)frameCount;		// Calculate Rough Milliseconds Per Frame

	PGETFRAME pgf = AVIStreamGetFrameOpen(pavi, NULL);				// Create The PGETFRAME Using Our Request Mode
	if (pgf==NULL)
	{
		// An Error Occurred Opening The Frame
		error("Failed To Open frame from AVI");
	}

	//HDC hdc = GetDC(0);

	HDRAWDIB hdd = DrawDibOpen();													// Handle For Our Dib

	BITMAPINFOHEADER bmih;										// Header Information For DrawDibDraw Decoding
	bmih.biSize = sizeof (BITMAPINFOHEADER);					// Size Of The BitmapInfoHeader
	bmih.biPlanes = 1;											// Bitplanes	
	bmih.biBitCount = 24;										// Bits Format We Want (24 Bit, 3 Bytes)
	bmih.biWidth = width;										// Width We Want (256 Pixels)
	bmih.biHeight = height;										// Height We Want (256 Pixels)
	bmih.biCompression = BI_RGB;								// Requested Mode = RGB

	char		*data;						// Pointer To Texture Data
	HBITMAP hBitmap = CreateDIBSection(hdc, (BITMAPINFO*)(&bmih), DIB_RGB_COLORS, (void**)(&data), NULL, NULL);
	SelectObject(hdc, hBitmap);								// Select hBitmap Into Our Device Context (hdc)

	// create a new freeimage anim
	someError=false;
	FIMULTIBITMAP* ret = FreeImage_OpenMultiBitmap(FIF_TIFF, "temp.tiff", TRUE, FALSE);
	if (!ret || someError) {
		error("Couldnt create multibitmap");
	}

	for (int frame=0; frame<frameCount; frame++) {
		fprintf(stderr, "Loading frame %i\n", frame);
		// Grab Data From The AVI Stream
		LPBITMAPINFOHEADER lpbi = (LPBITMAPINFOHEADER)AVIStreamGetFrame(pgf, frame);	
		// Pointer To Data Returned By AVIStreamGetFrame
		// (Skip The Header Info To Get To The Data)
		char* pdata = (char *)lpbi + lpbi->biSize + lpbi->biClrUsed * sizeof(RGBQUAD);	
		
		// Convert Data To Requested Bitmap Format
		DrawDibDraw(hdd, hdc, 0, 0, width, height, lpbi, pdata, 0, 0, width, height, 0);

		// copy into the freeimage thing
		FIBITMAP* fiBitmap = FreeImage_ConvertFromRawBits((BYTE*)data, width, height, width*3, 24, 0xFF0000, 0x00FF00, 0x0000FF);
/*		BYTE* src = (BYTE*)data;
		for (int y=0; y<height; y++) {
			BYTE* dst = FreeImage_GetScanLine(fiBitmap, y);
			for (int x=0; x<width; x++) {
				//src++;
				*dst++ = *src++;
				*dst++ = *src++;
				*dst++ = *src++;
			}
		}
*/
		FIBITMAP* grayBitmap = FreeImage_ConvertToGreyscale(fiBitmap);
		FreeImage_Unload(fiBitmap);

		FreeImage_AppendPage(ret, grayBitmap);
	}
	FreeImage_CloseMultiBitmap(ret);
	ret = FreeImage_OpenMultiBitmap(FIF_TIFF, "temp.tiff", FALSE, TRUE);

	DeleteObject(hBitmap);										// Delete The Device Dependant Bitmap Object
	DrawDibClose(hdd);											// Closes The DrawDib Device Context
	AVIStreamGetFrameClose(pgf);								// Deallocates The GetFrame Resources
	AVIStreamRelease(pavi);										// Release The Stream
	AVIFileExit();												// Release The File

	return ret;
}
Exemplo n.º 6
0
void VideoHelper::OpenVideo(CString strFilePath, FrameData& data)
{    
    AVIFileInit();

    LONG hr;  
    hr = AVIStreamOpenFromFile(&m_pAviStream, strFilePath, streamtypeVIDEO, 0, OF_READ, NULL);
    if (hr != 0){ 
        // Handle failure.
        AfxMessageBox(L"Failed to open file."); 
    }
    else
    {
        PAVIFILE         pf; 
        PAVISTREAM       psSmall; 
        HRESULT          hr; 
        AVISTREAMINFO    strhdr; 
        BITMAPINFOHEADER bi; 
        BITMAPINFOHEADER biNew; 
        LONG             lStreamSize; 
        LPVOID           lpOld; 
        LPVOID           lpNew; 

        // Determine the size of the format data using 
        // AVIStreamFormatSize. 
        AVIStreamFormatSize(m_pAviStream, 0, &lStreamSize); 
        if (lStreamSize > sizeof(bi)) // Format too large? 
            return; 

        lStreamSize = sizeof(bi); 
        hr = AVIStreamReadFormat(m_pAviStream, 0, &bi, &lStreamSize); // Read format 
        if (bi.biCompression != BI_RGB) // Wrong compression format? 
            return; 

        hr = AVIStreamInfo(m_pAviStream, &strhdr, sizeof(strhdr)); 

        // Create new AVI file using AVIFileOpen. 
        hr = AVIFileOpen(&pf, strFilePath + L".Processed.avi", OF_WRITE | OF_CREATE, NULL); 
        if (hr != 0) 
            return; 

        // Set parameters for the new stream. 
        biNew = bi; 

        SetRect(&strhdr.rcFrame, 0, 0, (int) biNew.biWidth, 
            (int) biNew.biHeight); 

        // Create a stream using AVIFileCreateStream. 
        hr = AVIFileCreateStream(pf, &psSmall, &strhdr); 
        if (hr != 0) {            //Stream created OK? If not, close file. 
            AVIFileRelease(pf); 
            return; 
        } 

        // Set format of new stream using AVIStreamSetFormat. 
        hr = AVIStreamSetFormat(psSmall, 0, &biNew, sizeof(biNew)); 
        if (hr != 0) { 
            AVIStreamRelease(psSmall); 
            AVIFileRelease(pf); 
            return; 
        } 

        // Allocate memory for the bitmaps. 
        lpOld = malloc(bi.biSizeImage); 

        // Read the stream data using AVIStreamRead. 
        for (lStreamSize = AVIStreamStart(m_pAviStream); lStreamSize <
            AVIStreamEnd(m_pAviStream)/*1500*/; lStreamSize++) { 
                //Context::Oversubscribe(true);
                hr = AVIStreamRead(m_pAviStream, lStreamSize, 1, lpOld, bi.biSizeImage,
                    NULL, NULL); 
                //Context::Oversubscribe(false);
                //memcpy_s(lpNew, bi.biSizeImage, lpOld, bi.biSizeImage);
                data.m_BBP = bi.biBitCount;
                data.m_ColorPlanes = bi.biPlanes;
                data.m_EndHeight = bi.biHeight;
                data.m_EndWidth = bi.biWidth;
                data.m_pFrame = (BYTE*)lpOld;
                data.m_Pitch = bi.biWidth * (bi.biBitCount / 8);
                data.m_Size = bi.biSizeImage;
                data.m_StartHeight = 0;
                data.m_StartWidth = 0;
                lpNew = m_pVideoAgent->ProcessFrame(data);
               

                if(NULL != lpNew)
                {
                    // Save the compressed data using AVIStreamWrite.
                    hr = AVIStreamWrite(psSmall, lStreamSize, 1, lpNew,
                    biNew.biSizeImage, AVIIF_KEYFRAME, NULL, NULL);
                }
        } 
        free(lpOld);
        // Close the stream and file. 
        AVIStreamRelease(psSmall); 
        AVIFileRelease(pf); 
    }
    AVIFileExit();
}
Exemplo n.º 7
0
bool CLoaderAVI::loadFromFile(const CString& fileName)
{
    CApplication::getApp()->log(CString::fromUTF8("Chargement de la vidéo %1").arg(fileName));

#ifdef T_SYSTEM_WINDOWS
    AVIStreamGetFrameClose(m_pgf);

    // Suppression des flux
    if (m_video)
        AVIStreamRelease(m_video);

    if (m_audio)
    {
        //AVIStreamRelease(m_audio);
        Game::soundEngine->removeMusic(m_audio);
        delete m_audio;
        m_audio = nullptr;
    }

    Game::textureManager->deleteTexture(m_texture);
    AVIFileExit();
#endif

    m_time = 0;
    m_play = false;

    // Suppression de la précédente texture
    if (m_texture)
    {
        Game::textureManager->deleteTexture(m_texture);
    }

#ifdef T_SYSTEM_WINDOWS

    AVIFileInit();

    // Ouverture du flux vidéo
    if (AVIStreamOpenFromFile(&m_video, fileName.toCharArray(), streamtypeVIDEO, 0, OF_READ, nullptr))
    {
        CApplication::getApp()->log(CString::fromUTF8("AVIStreamOpenFromFile : impossible de lire le flux video."), ILogger::Error);
        return false;
    }

    // Infos sur le stream video
    AVISTREAMINFO psi;

    if (AVIStreamInfo(m_video, &psi, sizeof(psi)))
    {
        CApplication::getApp()->log("AVIStreamInfo donne une erreur", ILogger::Error);
        return false;
    }

    // Dimensions de la vidéo
    m_width = psi.rcFrame.right - psi.rcFrame.left;
    m_height = psi.rcFrame.bottom - psi.rcFrame.top;

    m_fileName = fileName;

    m_pixels.resize(4 * m_width * m_height);

    for (int i = 0; i < m_width * m_height; ++i)
    {
        m_pixels[4 * i + 0] = 0x00;
        m_pixels[4 * i + 1] = 0x00;
        m_pixels[4 * i + 2] = 0x00;
        m_pixels[4 * i + 3] = 0xFF;
    }

    // Création de la texture
    CImage img(m_width, m_height, reinterpret_cast<CColor *>(&m_pixels[0]));

    m_texture = Game::textureManager->loadTexture(m_fileName, img, CTextureManager::FilterNone);


    m_lastFrame = AVIStreamLength(m_video);

    if (m_lastFrame == 0)
    {
        return false;
    }

    m_frameTime = AVIStreamSampleToTime(m_video, m_lastFrame) / m_lastFrame;
    m_pgf = AVIStreamGetFrameOpen(m_video, nullptr);

    // On ne peut pas récupérer les frames
    if (m_pgf == nullptr)
    {
        CApplication::getApp()->log("AVIStreamGetFrameOpen retourne un pointeur nul", ILogger::Error);
        return false;
    }

    // Ouverture du flux audio
    m_audio = new CMusic();
    m_audio->loadFromVideo(m_fileName);

    if (m_loop)
    {
        m_audio->setLoop();
    }

    m_audio->play();
    Game::soundEngine->addMusic(m_audio);

#endif

    //m_play = true;
    return true;
}
Exemplo n.º 8
0
bool VideoComponent::openAvi(const std::string& filename)
{
	// Stop any currently loaded avi
	closeAvi();

	AVIFileInit();							// Opens The AVIFile Library
	// Opens The AVI Stream
	if (AVIStreamOpenFromFile(&m_pavi, filename.c_str(), streamtypeVIDEO, 0, OF_READ, NULL) !=0)
	{
		GameLog::errorMessage("Error opening avi: %s", filename.c_str());
		// An Error Occurred Opening The Stream
		AVIFileExit();								// Release The File
		return false;
	}
	AVIStreamInfo(m_pavi, &m_psi, sizeof(m_psi));						// Reads Information About The Stream Into psi
	m_width = m_psi.rcFrame.right-m_psi.rcFrame.left;					// Width Is Right Side Of Frame Minus Left
	m_height = m_psi.rcFrame.bottom-m_psi.rcFrame.top;					// Height Is Bottom Of Frame Minus Top
	if (!m_resize)
	{
		// Size should be kept
		m_resizeWidth = m_width;
		m_resizeHeight = m_height;
	}
	m_lastframe = AVIStreamLength(m_pavi);								// The Last Frame Of The Stream
	m_timePerFrame = ((float)AVIStreamSampleToTime(m_pavi, m_lastframe) / (float) m_lastframe) / 1000.0f;	// Calculate Rough Seconds Per Frame
	m_bmih.biSize		= sizeof (BITMAPINFOHEADER);					// Size Of The BitmapInfoHeader
	m_bmih.biPlanes		= 1;					// Bitplanes
	m_bmih.biBitCount	= 24;					// Bits Format We Want 24 / 8  = 3 bytes
	m_bmih.biWidth		= m_resizeWidth;		// Width We Want
	m_bmih.biHeight		= m_resizeHeight;		// Height We Want
	m_bmih.biCompression= BI_RGB;				// Requested Mode = RGB
	m_hBitmap = CreateDIBSection (m_hdc, (BITMAPINFO*)(&m_bmih), DIB_RGB_COLORS, (void**)(&m_data), NULL, NULL);	
	SelectObject (m_hdc, m_hBitmap);					// Select hBitmap Into Our Device Context (hdc)
	// Bitmapinfo header for decoding (needed for xvid)
	m_bmiavih.biSize = sizeof(BITMAPINFOHEADER);
	m_bmiavih.biPlanes			= 1;					// Bitplanes
	m_bmiavih.biBitCount		= 24;					// Bits Format We Want 24 / 8  = 3 bytes
	m_bmiavih.biWidth			= m_width;				// Width We Want
	m_bmiavih.biHeight			= m_height;				// Height We Want
	m_bmiavih.biCompression		= BI_RGB;				// Requested Mode = RGB
	// And some more infos
	m_bmiavih.biClrImportant	= 0;
	m_bmiavih.biClrUsed			= 0;
	m_bmiavih.biXPelsPerMeter	= 0;
	m_bmiavih.biYPelsPerMeter	= 0;
	m_bmiavih.biSizeImage = (((m_bmiavih.biWidth * 3) + 3) & 0xFFFC) * m_bmiavih.biHeight;
	m_pgf=AVIStreamGetFrameOpen(m_pavi, &m_bmiavih);// Create The PGETFRAME Using Our Request Mode
	if (m_pgf==0x0)
	{
		GameLog::errorMessage("Error opening first frame of avi: %s", filename.c_str());
		// An Error Occurred Opening The Frame
		DeleteObject(m_hBitmap);					// Delete The Device Dependant Bitmap Object
		AVIStreamRelease(m_pavi);					// Release The Stream
		AVIFileExit();								// Release The File
		return false;
	}
	m_fileName = filename;

	// Create buffer for converted data
	// width*height = count pixel; each pixel has 4 channels for rgba with each one byte
	int dataSize = 4*m_resizeWidth*m_resizeHeight;
	m_bgraData = new unsigned char[dataSize];
	// Initialize with 255 (black screen with full alpha)
	memset(m_bgraData, 255, dataSize);

	// Prepare horde texture stream named like the video file name, to get a unique name
	m_videoTexture = h3dCreateTexture(filename.c_str(), m_resizeWidth, m_resizeHeight, H3DFormats::TEX_BGRA8, H3DResFlags::NoTexMipmaps);
	if (m_videoTexture == 0)
	{
		GameLog::errorMessage("Error creating texture for playing avi: %s", filename.c_str());
		// Failure creating the dynamic texture
		closeAvi();
		return false;
	}

	// Find the sampler index within the material
	m_samplerIndex = h3dFindResElem(m_material, H3DMatRes::SamplerElem, H3DMatRes::SampNameStr, "albedoMap");	
	if (m_samplerIndex == -1)
	{
		GameLog::errorMessage("Error preparing material with resID %d for playing avi: %s", m_material, filename.c_str());
		// No sampler found in material
		closeAvi();
		return false;
	}

	// Store old sampler
	m_originalSampler = h3dGetResParamI(m_material, H3DMatRes::SamplerElem, m_samplerIndex, H3DMatRes::SampTexResI);

	
	// Now open the audio stream
	PAVISTREAM audioStream;
	if (AVIStreamOpenFromFile(&audioStream, filename.c_str(), streamtypeAUDIO, 0, OF_READ, NULL) == 0)
	{
		// Audio stream found
		// Get format info
		PCMWAVEFORMAT audioFormat;
		long formatSize = sizeof(audioFormat);
		int start = AVIStreamStart(audioStream);
		// TODO get channelsmask and use it
		AVIStreamReadFormat(audioStream, start, &audioFormat, &formatSize);
		long numSamples = AVIStreamLength(audioStream);
		int bitsPerSample = (audioFormat.wf.nAvgBytesPerSec * 8) / (audioFormat.wf.nSamplesPerSec * audioFormat.wf.nChannels);
		/*if (audioFormat.wf.wFormatTag == WAVE_FORMAT_MPEGLAYER3)
		{
			// TODO
			MPEGLAYER3WAVEFORMAT mp3Format;
			formatSize = sizeof(mp3Format);
			AVIStreamReadFormat(audioStream, start, &mp3Format, &formatSize);
		}*/

		// Create buffer with appropriate size
		long bufferSize = (bitsPerSample * numSamples) / 8;
		char* buffer = new char[bufferSize];
		// Read the audio data
		long bytesWritten = 0;
		AVIStreamRead(audioStream, start, numSamples, buffer, bufferSize, &bytesWritten, 0x0);

		if (bytesWritten > 0)
		{
			// Send the audio data to the sound component
			SoundResourceData eventData(buffer, bytesWritten, audioFormat.wf.nSamplesPerSec, bitsPerSample, audioFormat.wf.nChannels);
			GameEvent event(GameEvent::E_SET_SOUND_WITH_USER_DATA, &eventData, this);
			m_owner->executeEvent(&event);
			m_hasAudio = true;
		}

		// Delete the buffer data
		delete[] buffer;
	}

	if (m_autoStart)
		// Play video directly
		playAvi();

	return true;
}
Exemplo n.º 9
0
bool CMusic::loadFromVideo(const CString& fileName)
{
    m_loaded = false;
    m_fileName = fileName;
    m_file = nullptr;
    m_fromVideo = true;

#ifdef T_SYSTEM_WINDOWS

    m_sampleCount = 0;
    ALenum error = alGetError();

    CApplication::getApp()->log(CString::fromUTF8("Chargement de la musique de la vidéo %1").arg(m_fileName));

    // Ouverture du flux audio
    if (AVIStreamOpenFromFile(&m_aviStream, m_fileName.toCharArray(), streamtypeAUDIO, 0, OF_READ, nullptr))
    {
        CApplication::getApp()->log("AVIStreamOpenFromFile : impossible de lire le flux audio", ILogger::Error);
        return false;
    }


    LONG buffer_size;
    AVIStreamRead(m_aviStream, AVIStreamStart(m_aviStream), (-1L), nullptr, 0, &buffer_size, nullptr);

    PBYTE tmp_format = new BYTE[buffer_size];
    AVIStreamReadFormat(m_aviStream, AVIStreamStart(m_aviStream), tmp_format, &buffer_size);
    LPWAVEFORMATEX wave_format = reinterpret_cast<LPWAVEFORMATEX>(tmp_format);

    // Lecture du nombre d'échantillons et du taux d'échantillonnage
    m_nbrSamples = AVIStreamLength(m_aviStream);
    m_sampleRate = wave_format->nSamplesPerSec;

    // Détermination du format en fonction du nombre de canaux
    switch (wave_format->nChannels)
    {
        case 1:
            m_format = AL_FORMAT_MONO16;
            break;

        case 2:
            m_format = AL_FORMAT_STEREO16;
            break;

        case 4:

            if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS"))
            {
                return false;
            }

            m_format = alGetEnumValue("AL_FORMAT_QUAD16");
            break;

        case 6:

            if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS"))
            {
                return false;
            }

            m_format = alGetEnumValue("AL_FORMAT_51CHN16");
            break;

        case 7:

            if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS"))
            {
                return false;
            }

            m_format = alGetEnumValue("AL_FORMAT_61CHN16");
            break;

        case 8:

            if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS"))
            {
                return false;
            }

            m_format = alGetEnumValue("AL_FORMAT_71CHN16");
            break;

        default:
            return false;
    }

    // Création des buffers OpenAL
    if (m_buffer[0] == 0 || m_buffer[1] == 0)
    {
        alGenBuffers(2, m_buffer);

        // Traitement des erreurs
        if ((error = alGetError()) != AL_NO_ERROR)
        {
            CSoundEngine::displayOpenALError(error, "alGenBuffers", __LINE__);
            return false;
        }

        // Les buffers sont invalides
        if (m_buffer[0] == 0 || m_buffer[1] == 0)
        {
            CApplication::getApp()->log("Les buffers audio sont invalides", ILogger::Error);
            return false;
        }
    }

    // Création d'une source
    if (m_source == 0)
    {
        alGenSources(1, &m_source);

        // Traitement des erreurs
        if ((error = alGetError()) != AL_NO_ERROR)
        {
            CSoundEngine::displayOpenALError(error, "alGenSources", __LINE__);
            return false;
        }

        // La source est invalide
        if (m_source == 0)
        {
            CApplication::getApp()->log("La source audio est invalide", ILogger::Error);
            return false;
        }
    }

    // On remplit les deux buffers
    readData(m_buffer[0], 44100);
    readData(m_buffer[1], 44100);

    // Remplissage avec les échantillons lus
    alSourceQueueBuffers(m_source, 2, m_buffer);

    // Traitement des erreurs
    if ((error = alGetError()) != AL_NO_ERROR)
    {
        CSoundEngine::displayOpenALError(error, "alSourceQueueBuffers", __LINE__);
        return false;
    }

    // Paramètres de la source
    alSourcei(m_source, AL_LOOPING, false);

    // Traitement des erreurs
    if ((error = alGetError()) != AL_NO_ERROR)
    {
        CSoundEngine::displayOpenALError(error, "alGetSourcei", __LINE__);
    }

    alSourcef(m_source, AL_PITCH, 1.0f);

    // Traitement des erreurs
    if ((error = alGetError()) != AL_NO_ERROR)
    {
        CSoundEngine::displayOpenALError(error, "alSourcef", __LINE__);
    }

    alSourcef(m_source, AL_GAIN, 1.0f);

    // Traitement des erreurs
    if ((error = alGetError() ) != AL_NO_ERROR)
    {
        CSoundEngine::displayOpenALError(error, "alSourcef", __LINE__);
    }

    alSource3f(m_source, AL_POSITION, 0.0f, 0.0f, 0.0f);

    // Traitement des erreurs
    if ((error = alGetError()) != AL_NO_ERROR)
    {
        CSoundEngine::displayOpenALError(error, "alSource3f", __LINE__);
    }

    m_loaded = true;

#endif

    return true;
}
Exemplo n.º 10
0
bool Movie::reload(const io::stringc &Filename, const s32 Resolution)
{
    if (DataPointer_)
        close();
    
    /* Information message */
    io::Log::message("Load movie: \"" + Filename + "\"");
    io::Log::upperTab();
    
    /* General settings */
    SMovieSequencePacket* MovieData = new SMovieSequencePacket;
    
    DataPointer_                = MovieData;
    
    MovieData->Next             = MovieData->Frame      = 0;
    MovieData->State            = MOVIESTATE_STOPED;
    MovieData->Time             = MovieData->LastTime   = 0;
    MovieData->RawData          = 0;
    MovieData->Resolution       = Resolution;
    
    MovieData->hDeviceContext   = CreateCompatibleDC(0);
    MovieData->hDrawDIB         = DrawDibOpen();
    
    AVIFileInit();
    
    /* Open video stream */
    if (AVIStreamOpenFromFile(&MovieData->pVideoStream, Filename.c_str(), streamtypeVIDEO, 0, OF_READ, 0) != 0)
        return exitWithError("Could not open video stream");
    if (AVIStreamInfo(MovieData->pVideoStream, &MovieData->VideoStreamInfo, sizeof(MovieData->VideoStreamInfo)) != 0)
        return exitWithError("Video stream information process failed");
    
    MovieData->VideoLastFrame = AVIStreamLength(MovieData->pVideoStream);
    
    if (MovieData->VideoLastFrame == -1)
        return exitWithError("Video stream length is invalid");
    
    MovieData->VideoMPF = AVIStreamSampleToTime(MovieData->pVideoStream, MovieData->VideoLastFrame) / MovieData->VideoLastFrame;
    
    if (MovieData->VideoMPF == -1)
        return exitWithError("Video stream sample is invalid");
    
    #if 0
    
    /* Open audio stream */
    if (AVIStreamOpenFromFile(&MovieData->pAudioStream, Filename.c_str(), streamtypeAUDIO, 0, OF_READ, 0))
        return exitWithError("Could not open audio stream");
    if (AVIStreamInfo(MovieData->pAudioStream, &MovieData->AudioStreamInfo, sizeof(MovieData->AudioStreamInfo)))
        return exitWithError("Audio stream information process failed");
    
    MovieData->AudioLastFrame = AVIStreamLength(MovieData->pAudioStream);
    
    if (MovieData->AudioLastFrame == -1)
        return exitWithError("Audio stream length is invalid");
    
    MovieData->AudioMPF = AVIStreamSampleToTime(MovieData->pAudioStream, MovieData->AudioLastFrame) / MovieData->AudioLastFrame;
    
    if (MovieData->AudioMPF == -1)
        return exitWithError("Audio stream sample is invalid");
    
    #endif
    
    /* Start configuration process */
    MovieData->Size.Width   = MovieData->VideoStreamInfo.rcFrame.right  - MovieData->VideoStreamInfo.rcFrame.left;
    MovieData->Size.Height  = MovieData->VideoStreamInfo.rcFrame.bottom - MovieData->VideoStreamInfo.rcFrame.top;
    
    MovieData->BitmapInfoHeader.biSize          = sizeof(BITMAPINFOHEADER);
    MovieData->BitmapInfoHeader.biPlanes        = 1;
    MovieData->BitmapInfoHeader.biBitCount      = 24;
    MovieData->BitmapInfoHeader.biWidth         = MovieData->Resolution;
    MovieData->BitmapInfoHeader.biHeight        = MovieData->Resolution;
    MovieData->BitmapInfoHeader.biCompression   = BI_RGB;
    
    /* Create the movie bitmap context */
    MovieData->hBitmap = CreateDIBSection(
        MovieData->hDeviceContext,
        (BITMAPINFO*)(&MovieData->BitmapInfoHeader),
        DIB_RGB_COLORS,
        (void**)(&MovieData->RawData),
        0, 0
    );

    if (!MovieData->hBitmap)
        return exitWithError("Could not create device independent bitmap (DIB) for video stream");
    
    SelectObject(MovieData->hDeviceContext, MovieData->hBitmap);
    
    MovieData->BitmapInfoHeader.biWidth         = MovieData->Size.Width;
    MovieData->BitmapInfoHeader.biHeight        = MovieData->Size.Height;
    MovieData->BitmapInfoHeader.biSizeImage     = 0;
    MovieData->BitmapInfoHeader.biClrUsed       = 0;
    MovieData->BitmapInfoHeader.biClrImportant  = 0;
    
    /* Get first video frame */
    MovieData->pGetFrame = AVIStreamGetFrameOpen(MovieData->pVideoStream, &MovieData->BitmapInfoHeader);
    
    if (!MovieData->pGetFrame)
        return exitWithError("Could not open first video stream");
    
    io::Log::lowerTab();
    
    return true;
}
HRESULT CAVIFileReader::Initialize(IN char *pszFileName)
{

    HRESULT hr = S_OK;

    
    //
    // would do better argument checking in a real-life application
    //

    _ASSERTE(pszFileName);


    //
    // log file name and requested wave format
    //

    LogMessage("CAVIFileReader::Initialize started. file [%s]", 
                pszFileName);


    //
    // initialize avi file library. AVIFileExit should be called on the same 
    // thread when AVI library is no longer needed
    //
    // AVIFileExit is called in the destructor. 
    //
    // Assuption: instances of CAVIFileReader are initialized and destroyed on 
    // the same thread
    //
    
    AVIFileInit();

    
    //
    // open the first audio stream in the file
    //

    hr = AVIStreamOpenFromFile(&m_pAudioStream,
                               pszFileName,
                               streamtypeAUDIO,
                               0,
                               OF_READ | OF_SHARE_DENY_WRITE,
                               NULL);

    if (FAILED(hr))
    {
        LogError("CAVIFileReader::Initialize "
                 "Failed to open stream from the file");

        m_pAudioStream = NULL;

        return hr;
    }


    //
    // read the size of the stream's format
    //

    LONG nFormatSize = 0;

    hr = AVIStreamReadFormat(m_pAudioStream, 0, NULL, &nFormatSize);

    if ( FAILED(hr) || (0 == nFormatSize) )
    {
        LogError("CAVIFileReader::Initialize"
                 "Failed to get stream format's size");

        m_pAudioStream->Release();
        m_pAudioStream = NULL;

        return E_FAIL;
    }


    //
    // allocate memory for audio format. keep it in a waveformatex structure. 
    // if the structure returned is waveformat, still allocate waveformatex
    //

    nFormatSize = max((LONG)sizeof(WAVEFORMATEX), nFormatSize);
       
    m_pWaveFormat = (WAVEFORMATEX*)AllocateMemory(nFormatSize);

    if (NULL == m_pWaveFormat)
    {
        LogError("CAVIFileReader::Initialize "
                 "Failed to allocate memory for wave format, size %ld", 
                 nFormatSize);

        m_pAudioStream->Release();
        m_pAudioStream = NULL;

        return E_OUTOFMEMORY;

    }


    //
    // read stream format into allocated structure
    //

    hr = AVIStreamReadFormat(m_pAudioStream, 0, m_pWaveFormat, &nFormatSize);

    if (FAILED(hr))
    {
        LogError("CAVIFileReader::Initialize "
                 "Failed to read stream format");

        m_pAudioStream->Release();
        m_pAudioStream = NULL;

        FreeMemory(m_pWaveFormat);
        m_pWaveFormat = NULL;

        return hr;
    }


    //
    // log stream's format
    //

    LogMessage("CAVIFileReader::CAVIFileReader stream opened");
    LogFormat(m_pWaveFormat);


    LogMessage("CAVIFileReader::CAVIFileReader finished");

    return S_OK;
}