HRESULT CBSoundAVI::InitializeBuffer(PAVISTREAM Stream) { if(!Stream) return E_FAIL; SetStreaming(true); m_Type = SOUND_SFX; m_Looping = false; m_AudioStream = Stream; LONG FormatSize; if(AVIStreamReadFormat(m_AudioStream, 0, NULL, &FormatSize)!=0) return E_FAIL; LPWAVEFORMAT Format = (LPWAVEFORMAT)new BYTE[FormatSize]; if(AVIStreamReadFormat(m_AudioStream, 0, Format, &FormatSize)!=0){ delete [] (BYTE*)Format; return E_FAIL; } m_TotalDataLength = AVIStreamLength(m_AudioStream) * Format->nBlockAlign; HRESULT ret; memcpy(&m_Format, Format, sizeof(PCMWAVEFORMAT)); m_Format.wf.wFormatTag = WAVE_FORMAT_PCM; // create buffer ret = CreateSoundBuffer(m_TotalDataLength, (PCMWAVEFORMAT*)Format); delete [] (BYTE*)Format; return ret; }
static BOOL AVIFILE_FormatsEqual(PAVISTREAM avi1, PAVISTREAM avi2) { LPVOID fmt1 = NULL, fmt2 = NULL; LONG size1, size2, start1, start2; BOOL status = FALSE; assert(avi1 != NULL && avi2 != NULL); /* get stream starts and check format sizes */ start1 = AVIStreamStart(avi1); start2 = AVIStreamStart(avi2); if (FAILED(AVIStreamFormatSize(avi1, start1, &size1))) return FALSE; if (FAILED(AVIStreamFormatSize(avi2, start2, &size2))) return FALSE; if (size1 != size2) return FALSE; /* sizes match, now get formats and compare them */ fmt1 = HeapAlloc(GetProcessHeap(), 0, size1); if (fmt1 == NULL) return FALSE; if (SUCCEEDED(AVIStreamReadFormat(avi1, start1, fmt1, &size1))) { fmt2 = HeapAlloc(GetProcessHeap(), 0, size1); if (fmt2 != NULL) { if (SUCCEEDED(AVIStreamReadFormat(avi2, start2, fmt2, &size1))) status = (memcmp(fmt1, fmt2, size1) == 0); } } HeapFree(GetProcessHeap(), 0, fmt2); HeapFree(GetProcessHeap(), 0, fmt1); return status; }
sint32 AVIReadTunnelStream::ReadFormat(VDPosition lFrame, void *pFormat, long *plSize) { LONG lFrame32 = (LONG)lFrame; if (lFrame32 != lFrame) lFrame32 = lFrame < 0 ? (LONG)0x80000000 : (LONG)0x7FFFFFFF; return AVIStreamReadFormat(pas, lFrame32, pFormat, plSize); }
bool CAviToBmp::Open(LPCTSTR Path) { Close(); if (FAILED(m_hr = AVIFileOpen(&m_pFile, Path, OF_READ, NULL))) return(FALSE); if (FAILED(m_hr = AVIFileGetStream(m_pFile, &m_pStream, streamtypeVIDEO, 0))) return(FALSE); m_FrameCount = AVIStreamLength(m_pStream); long Start = AVIStreamStart(m_pStream); if (Start < 0) return(FALSE); long FmtSize; if (FAILED(m_hr = AVIStreamReadFormat(m_pStream, Start, NULL, &FmtSize))) return(FALSE); m_pBmpInfo = (LPBITMAPINFO)new BYTE[FmtSize]; if (FAILED(m_hr = AVIStreamReadFormat(m_pStream, Start, m_pBmpInfo, &FmtSize))) return(FALSE); m_pGetFrame = AVIStreamGetFrameOpen(m_pStream, (LPBITMAPINFOHEADER)AVIGETFRAMEF_BESTDISPLAYFMT); if (m_pGetFrame == NULL) return(FALSE); return(TRUE); }
bool CCamAvi::Initialize (char* filename) { long hr; AVISTREAMINFO info; m_init=false; m_pos=0; AVIFileInit (); hr=AVIFileOpen (&m_paf, filename, OF_READ, NULL); if (hr!=0) return false; hr=AVIFileGetStream (m_paf, &m_pas, streamtypeVIDEO, 0); if (hr!=0) return false; hr= AVIStreamInfo (m_pas, &info, sizeof (info)); if (hr!=0) return false; hr=sizeof (m_bi); hr= AVIStreamReadFormat (m_pas, 0, &m_bi, &hr); if (hr!=0) return false; if (m_bi.bmiHeader.biCompression!=0) { // find decompressor m_hic=ICOpen (ICTYPE_VIDEO, info.fccHandler ,ICMODE_DECOMPRESS); if (m_hic==0) return false; } m_data=new unsigned char[m_bi.bmiHeader.biSizeImage]; AVISTREAMINFO a; AVIStreamInfo (m_pas, &a, sizeof (a)); m_length=a.dwLength; hr=AVIStreamBeginStreaming (m_pas, 0, 100, 1000); m_init=true; m_FrameRate = (double)a.dwRate / a.dwScale; m_pos=AVIStreamStart (m_pas); m_data=new BYTE[m_bi.bmiHeader.biSizeImage]; if (m_bi.bmiHeader.biCompression!=0) { hr=ICDecompressGetFormat (m_hic, &m_bi, &m_bi_out); } else m_bi_out=m_bi; m_frame.Create (m_bi_out.bmiHeader.biWidth, m_bi_out.bmiHeader.biHeight, m_bi_out.bmiHeader.biBitCount); return true; }
void VideoReader::Open(CString strFilePath) { AVIFileInit(); LONG hr; hr = AVIStreamOpenFromFile(&m_pAviStream, strFilePath, streamtypeVIDEO, 0, OF_READ, NULL); if (hr != 0){ // Handle failure. AfxMessageBox(L"Failed to open file, file must be an uncompressed video."); } else { HRESULT hr; AVISTREAMINFO strhdr; LONG lStreamSize; // Determine the size of the format data using // AVIStreamFormatSize. AVIStreamFormatSize(m_pAviStream, 0, &lStreamSize); if (lStreamSize > sizeof(m_bi)) // Format too large? return; lStreamSize = sizeof(m_bi); hr = AVIStreamReadFormat(m_pAviStream, 0, &m_bi, &lStreamSize); // Read format if (m_bi.biCompression != BI_RGB) // Wrong compression format? return; hr = AVIStreamInfo(m_pAviStream, &strhdr, sizeof(strhdr)); // Create new AVI file using AVIFileOpen. hr = AVIFileOpen(&m_pf, strFilePath + L".Processed.avi", OF_WRITE | OF_CREATE, NULL); if (hr != 0) return; m_currentSize = AVIStreamStart(m_pAviStream); // Allocate memory for the bitmaps. m_lpBuffer = (BYTE *)malloc(m_bi.biSizeImage); } }
///////////////////////////////////////////////////////// //get bitmpainfoheader information ///////////////////////////////////////////////////////// CAviMan::GetBmpHeader(BITMAPINFO *pBmpHeader) { LONG bmp_size = sizeof( BITMAPINFO ); AVIStreamReadFormat(m_aviVideoStream, 0, &(pBmpHeader->bmiHeader), &bmp_size); // Read format memcpy(&m_bmpHeader,pBmpHeader,bmp_size); m_dwStartFrame = AVIStreamStart(m_aviVideoStream); m_dwTotalFrame = AVIStreamEnd(m_aviVideoStream); //get number of frames m_dwImageSize = m_bmpHeader.bmiHeader.biWidth * m_bmpHeader.bmiHeader.biHeight; switch(m_bmpHeader.bmiHeader.biBitCount) { case(24):m_dwImageSize*=3;break; case(16):m_dwImageSize*=2;break; } }
BOOL FMPlayerDShow::GetAVIInfo(const tchar* pFileName) { AVIFileInit(); PAVIFILE pfile; BOOL bOK = FALSE; if(AVIFileOpen(&pfile, pFileName, OF_SHARE_DENY_NONE, 0L) == 0) { AVIFILEINFO afi; memset(&afi, 0, sizeof(afi)); AVIFileInfo(pfile, &afi, sizeof(AVIFILEINFO)); CComPtr<IAVIStream> pavi; if(AVIFileGetStream(pfile, &pavi, streamtypeVIDEO, 0) == AVIERR_OK) { AVISTREAMINFO si; AVIStreamInfo(pavi, &si, sizeof(si)); m_FourCC = FormatFourCC(si.fccHandler); m_FrameRate = (double)si.dwRate / (double)si.dwScale; LONG lFormat; if (0 == AVIStreamFormatSize(pavi, 0, &lFormat)) { char* pBuf = new char[lFormat]; if (0 == AVIStreamReadFormat(pavi, 0, pBuf, &lFormat)) { BITMAPINFOHEADER* pHeader = (BITMAPINFOHEADER*)pBuf; m_StreamFormat = FormatFourCC(pHeader->biCompression); } delete[] pBuf; } bOK = TRUE; } AVIFileRelease(pfile); } AVIFileExit(); return bOK; }
void VideoHelper::OpenVideo(CString strFilePath, FrameData& data) { AVIFileInit(); LONG hr; hr = AVIStreamOpenFromFile(&m_pAviStream, strFilePath, streamtypeVIDEO, 0, OF_READ, NULL); if (hr != 0){ // Handle failure. AfxMessageBox(L"Failed to open file."); } else { PAVIFILE pf; PAVISTREAM psSmall; HRESULT hr; AVISTREAMINFO strhdr; BITMAPINFOHEADER bi; BITMAPINFOHEADER biNew; LONG lStreamSize; LPVOID lpOld; LPVOID lpNew; // Determine the size of the format data using // AVIStreamFormatSize. AVIStreamFormatSize(m_pAviStream, 0, &lStreamSize); if (lStreamSize > sizeof(bi)) // Format too large? return; lStreamSize = sizeof(bi); hr = AVIStreamReadFormat(m_pAviStream, 0, &bi, &lStreamSize); // Read format if (bi.biCompression != BI_RGB) // Wrong compression format? return; hr = AVIStreamInfo(m_pAviStream, &strhdr, sizeof(strhdr)); // Create new AVI file using AVIFileOpen. hr = AVIFileOpen(&pf, strFilePath + L".Processed.avi", OF_WRITE | OF_CREATE, NULL); if (hr != 0) return; // Set parameters for the new stream. biNew = bi; SetRect(&strhdr.rcFrame, 0, 0, (int) biNew.biWidth, (int) biNew.biHeight); // Create a stream using AVIFileCreateStream. hr = AVIFileCreateStream(pf, &psSmall, &strhdr); if (hr != 0) { //Stream created OK? If not, close file. AVIFileRelease(pf); return; } // Set format of new stream using AVIStreamSetFormat. hr = AVIStreamSetFormat(psSmall, 0, &biNew, sizeof(biNew)); if (hr != 0) { AVIStreamRelease(psSmall); AVIFileRelease(pf); return; } // Allocate memory for the bitmaps. lpOld = malloc(bi.biSizeImage); // Read the stream data using AVIStreamRead. for (lStreamSize = AVIStreamStart(m_pAviStream); lStreamSize < AVIStreamEnd(m_pAviStream)/*1500*/; lStreamSize++) { //Context::Oversubscribe(true); hr = AVIStreamRead(m_pAviStream, lStreamSize, 1, lpOld, bi.biSizeImage, NULL, NULL); //Context::Oversubscribe(false); //memcpy_s(lpNew, bi.biSizeImage, lpOld, bi.biSizeImage); data.m_BBP = bi.biBitCount; data.m_ColorPlanes = bi.biPlanes; data.m_EndHeight = bi.biHeight; data.m_EndWidth = bi.biWidth; data.m_pFrame = (BYTE*)lpOld; data.m_Pitch = bi.biWidth * (bi.biBitCount / 8); data.m_Size = bi.biSizeImage; data.m_StartHeight = 0; data.m_StartWidth = 0; lpNew = m_pVideoAgent->ProcessFrame(data); if(NULL != lpNew) { // Save the compressed data using AVIStreamWrite. hr = AVIStreamWrite(psSmall, lStreamSize, 1, lpNew, biNew.biSizeImage, AVIIF_KEYFRAME, NULL, NULL); } } free(lpOld); // Close the stream and file. AVIStreamRelease(psSmall); AVIFileRelease(pf); } AVIFileExit(); }
bool CMusic::loadFromVideo(const CString& fileName) { m_loaded = false; m_fileName = fileName; m_file = nullptr; m_fromVideo = true; #ifdef T_SYSTEM_WINDOWS m_sampleCount = 0; ALenum error = alGetError(); CApplication::getApp()->log(CString::fromUTF8("Chargement de la musique de la vidéo %1").arg(m_fileName)); // Ouverture du flux audio if (AVIStreamOpenFromFile(&m_aviStream, m_fileName.toCharArray(), streamtypeAUDIO, 0, OF_READ, nullptr)) { CApplication::getApp()->log("AVIStreamOpenFromFile : impossible de lire le flux audio", ILogger::Error); return false; } LONG buffer_size; AVIStreamRead(m_aviStream, AVIStreamStart(m_aviStream), (-1L), nullptr, 0, &buffer_size, nullptr); PBYTE tmp_format = new BYTE[buffer_size]; AVIStreamReadFormat(m_aviStream, AVIStreamStart(m_aviStream), tmp_format, &buffer_size); LPWAVEFORMATEX wave_format = reinterpret_cast<LPWAVEFORMATEX>(tmp_format); // Lecture du nombre d'échantillons et du taux d'échantillonnage m_nbrSamples = AVIStreamLength(m_aviStream); m_sampleRate = wave_format->nSamplesPerSec; // Détermination du format en fonction du nombre de canaux switch (wave_format->nChannels) { case 1: m_format = AL_FORMAT_MONO16; break; case 2: m_format = AL_FORMAT_STEREO16; break; case 4: if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS")) { return false; } m_format = alGetEnumValue("AL_FORMAT_QUAD16"); break; case 6: if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS")) { return false; } m_format = alGetEnumValue("AL_FORMAT_51CHN16"); break; case 7: if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS")) { return false; } m_format = alGetEnumValue("AL_FORMAT_61CHN16"); break; case 8: if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS")) { return false; } m_format = alGetEnumValue("AL_FORMAT_71CHN16"); break; default: return false; } // Création des buffers OpenAL if (m_buffer[0] == 0 || m_buffer[1] == 0) { alGenBuffers(2, m_buffer); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alGenBuffers", __LINE__); return false; } // Les buffers sont invalides if (m_buffer[0] == 0 || m_buffer[1] == 0) { CApplication::getApp()->log("Les buffers audio sont invalides", ILogger::Error); return false; } } // Création d'une source if (m_source == 0) { alGenSources(1, &m_source); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alGenSources", __LINE__); return false; } // La source est invalide if (m_source == 0) { CApplication::getApp()->log("La source audio est invalide", ILogger::Error); return false; } } // On remplit les deux buffers readData(m_buffer[0], 44100); readData(m_buffer[1], 44100); // Remplissage avec les échantillons lus alSourceQueueBuffers(m_source, 2, m_buffer); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alSourceQueueBuffers", __LINE__); return false; } // Paramètres de la source alSourcei(m_source, AL_LOOPING, false); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alGetSourcei", __LINE__); } alSourcef(m_source, AL_PITCH, 1.0f); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alSourcef", __LINE__); } alSourcef(m_source, AL_GAIN, 1.0f); // Traitement des erreurs if ((error = alGetError() ) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alSourcef", __LINE__); } alSource3f(m_source, AL_POSITION, 0.0f, 0.0f, 0.0f); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alSource3f", __LINE__); } m_loaded = true; #endif return true; }
PAVISTREAM GetAudioStream(TSTR name,TCHAR *dir) { HRESULT hr; PAVIFILE pfile; PAVISTREAM pstream = NULL; BOOL res = TRUE; #ifndef INTERIM_64_BIT // CCJ // RB 5/10/99: Reworked this a bit. Added the current scene dir as a possible search location. // Also now using SplitPathFile() instead of doing it by hand. hr = AVIFileOpen(&pfile,name,OF_READ,NULL); if (hr) { TSTR fileName, tryName; SplitPathFile(name, NULL, &fileName); // Try the given directory (which is the sound dir) tryName = TSTR(dir) + TSTR(_T("\\")) + fileName; hr = AVIFileOpen(&pfile,tryName,OF_READ,NULL); if (hr) { // Try the scene directory TSTR sceneName = GetCOREInterface()->GetCurFilePath(); TSTR scenePath; SplitPathFile(sceneName, &scenePath, NULL); tryName = scenePath + TSTR(_T("\\")) + fileName; hr = AVIFileOpen(&pfile,tryName,OF_READ,NULL); } #if 0 // Try the file in the given directory int i = name.Length()-1; while (i>0) { if (name[i]=='\\' || name[i]==':' || name[i]=='/') { i++; break; } i--; } if (name.Length()-i>0) { TSTR newname = TSTR(dir) + TSTR(_T("\\")) + name.Substr(i,name.Length()-i); hr = AVIFileOpen(&pfile,newname,OF_READ,NULL); } #endif } if (hr) return NULL; AVIFileGetStream(pfile,&pstream,streamtypeAUDIO,0); AVIFileRelease(pfile); if (!pstream) return NULL; // Verify it's PCM PCMWAVEFORMAT wf; LONG l = sizeof(wf); AVIStreamReadFormat(pstream,0,&wf,&l); if (!l) { AVIStreamRelease(pstream); return NULL; } if (wf.wf.wFormatTag != WAVE_FORMAT_PCM) { AVIStreamRelease(pstream); return NULL; } #endif // INTERIM_64_BIT return pstream; }
int load_avi_file() { bitm.valid=false; const char *avifilename = filename->get().c_str(); HRESULT res = AVIFileOpen(&m_aviFile, avifilename, OF_READ, NULL); if (res!=AVIERR_OK) { printf("Couldn't open avi file %s\n",filename->get().c_str()); return 0; } res = AVIFileGetStream(m_aviFile, &streamVid, streamtypeVIDEO, 0); if (res!=AVIERR_OK) { AVIFileRelease(m_aviFile); m_aviFile = NULL; streamVid = NULL; printf("Couldn't get stream"); return 0; } LONG format_length = 0; AVIStreamReadFormat(streamVid,0,NULL,&format_length); //if format_data is not a reasonable size, fail if (format_length>128) { printf("Format data too big"); return 0; } //make room for at least 128 bytes, sizeof(int) aligned int format_data[(128/sizeof(int)) + 1]; AVIStreamReadFormat(streamVid,0,format_data,&format_length); BITMAPINFOHEADER *bi = (BITMAPINFOHEADER *)format_data; //only 24 bit output is supported if (bi->biBitCount!=24) { printf("Bitcount %d not supported",bi->biBitCount); return 0; } // Create the PGETFRAME getFrame = AVIStreamGetFrameOpen(streamVid,NULL); //unable to decode the .avi? if (getFrame==NULL) { printf("AVIStreamGetFrameOpen returned NULL"); return 0; } // Define the length of the video (necessary for loop reading) // and its size. num_frames = AVIStreamLength(streamVid); if (num_frames<1) { printf("Zero frames"); return 0; } AVISTREAMINFO psi; AVIStreamInfo(streamVid, &psi, sizeof(AVISTREAMINFO)); width = psi.rcFrame.right - psi.rcFrame.left; height = psi.rcFrame.bottom - psi.rcFrame.top; dwRate = psi.dwRate; dwScale = psi.dwScale; bitm.bpp=(int)(bi->biBitCount/8); bitm.bformat=GL_BGR; bitm.size_x=width; bitm.size_y=height; bitm.valid=true; return 1; }
void CAviHelper::AVItoBmp(const wstring& strAVIFileName, const wstring& strBmpDir) { AVIFileInit(); PAVIFILE avi; int res = AVIFileOpen(&avi, WS2S(strAVIFileName).c_str(), OF_READ, NULL); int n = GetLastError(); if (res!=AVIERR_OK) { //an error occures if (avi!=NULL) AVIFileRelease(avi); return ; } AVIFILEINFO avi_info; AVIFileInfo(avi, &avi_info, sizeof(AVIFILEINFO)); PAVISTREAM pStream; res=AVIFileGetStream(avi, &pStream, streamtypeVIDEO /*video stream*/, 0 /*first stream*/); if (res!=AVIERR_OK) { if (pStream!=NULL) AVIStreamRelease(pStream); AVIFileExit(); return ; } //do some task with the stream int iNumFrames; int iFirstFrame; iFirstFrame = AVIStreamStart(pStream); if (iFirstFrame==-1) { //Error getteing the frame inside the stream if (pStream!=NULL) AVIStreamRelease(pStream); AVIFileExit(); return ; } iNumFrames = AVIStreamLength(pStream); if (iNumFrames==-1) { //Error getteing the number of frames inside the stream if (pStream!=NULL) AVIStreamRelease(pStream); AVIFileExit(); return ; } //getting bitmap from frame BITMAPINFOHEADER bih; ZeroMemory(&bih, sizeof(BITMAPINFOHEADER)); bih.biBitCount=24; //24 bit per pixel bih.biClrImportant=0; bih.biClrUsed = 0; bih.biCompression = BI_RGB; bih.biPlanes = 1; bih.biSize = 40; bih.biXPelsPerMeter = 0; bih.biYPelsPerMeter = 0; //calculate total size of RGBQUAD scanlines (DWORD aligned) bih.biSizeImage = (((bih.biWidth * 3) + 3) & 0xFFFC) * bih.biHeight ; PGETFRAME pFrame; pFrame=AVIStreamGetFrameOpen(pStream, NULL ); AVISTREAMINFO streaminfo; AVIStreamInfo(pStream,&streaminfo,sizeof(AVISTREAMINFO)); //Get the first frame BITMAPINFOHEADER bih2; long lsize = sizeof(bih2); int index= 0; for (int i = iFirstFrame; i < iNumFrames; i++) { index= i-iFirstFrame; BYTE* pDIB = (BYTE*) AVIStreamGetFrame(pFrame, index); // AVIStreamReadFormat(pStream,index,&bih2,&lsize); BITMAPFILEHEADER stFileHdr; BYTE* Bits=new BYTE[bih2.biSizeImage]; AVIStreamRead(pStream,index,1,Bits,bih2.biSizeImage,NULL,NULL); //RtlMoveMemory(Bits, pDIB + sizeof(BITMAPINFOHEADER), bih2.biSizeImage); bih2.biClrUsed =0; stFileHdr.bfOffBits=sizeof(BITMAPFILEHEADER)+sizeof(BITMAPINFOHEADER); stFileHdr.bfSize=sizeof(BITMAPFILEHEADER); stFileHdr.bfType=0x4d42; CString FileName; FileName.Format(_T("Frame-%05d.bmp"), index); CString strtemp; strtemp.Format(_T("%s\\%s"), strBmpDir.c_str(), FileName); FILE* fp=_tfopen(strtemp ,_T("wb")); fwrite(&stFileHdr,1,sizeof(BITMAPFILEHEADER),fp); fwrite(&bih2,1,sizeof(BITMAPINFOHEADER),fp); int ff = fwrite(Bits,1,bih2.biSizeImage,fp); int e = GetLastError(); fclose(fp); ///// delete Bits; //CreateFromPackedDIBPointer(pDIB, index); } AVIStreamGetFrameClose(pFrame); //close the stream after finishing the task if (pStream!=NULL) AVIStreamRelease(pStream); AVIFileExit(); }
// AVI_stream_open() will open the AVI file and prepare it for reading, but will not // store any of the frame data. // // returns: 0 ==> success // !0 ==> could not open the AVI stream // // The filename is expected to be an absolute pathname (or file in the current working directory) // int AVI_stream_open(char* filename) { if ( !AVI_stream_inited ) AVI_stream_init(); int hr; PAVIFILE pfile; PAVISTREAM pstream; AVISTREAMINFO avi_stream_info; Assert( !(AVI_stream.flags & AVI_STREAM_F_USED) ); // Open the AVI file hr = AVIFileOpen(&pfile, filename, OF_SHARE_DENY_WRITE, 0); if (hr != 0){ // nprintf(("Warning", "AVI ==> Unable to open %s", filename)); return -1; } strcpy(AVI_stream.filename, filename); // Get a handle to the video stream within the AVI file hr = AVIFileGetStream(pfile, &pstream, streamtypeVIDEO, 0); if (hr != 0){ //nprintf(("Warning", "AVI ==> Unable to open video stream in %s", filename)); return -1; } // Store the pointer to stream, since we'll need it later to read from disk AVI_stream.pstream = pstream; AVI_stream.pfile = pfile; // Get information on the stream hr = AVIStreamInfo( pstream, &avi_stream_info, sizeof(AVISTREAMINFO) ); if (hr != 0){ //nprintf(("Warning", "AVI ==> Unable to retreive stream info in %s", filename)); return -1; } int buffer_size; int start_sample = AVIStreamStart(pstream); Assert( start_sample == 0 ); int end_sample = AVIStreamEnd(pstream); Assert( end_sample >= start_sample ); // store the number of frames in the AVI_info[] structure AVI_stream.num_frames = end_sample; // start sample must be 0 Assert(AVI_stream.num_frames == AVIStreamLength(pstream) ); // Get information on the stream hr = AVIStreamInfo( pstream, &avi_stream_info, sizeof(AVISTREAMINFO) ); if (hr != 0){ //nprintf(("Warning", "AVI ==> Unable to retreive stream info in %s", filename)); return -1; } buffer_size = avi_stream_info.dwSuggestedBufferSize; Assert( buffer_size > 0 ); AVI_stream.min_compressed_buffer_size = buffer_size; // determine the format of the AVI image data ubyte* format_buffer; long format_buffer_size; BITMAPINFO* bitmap_info; hr = AVIStreamFormatSize(pstream, 0, &format_buffer_size); Assert( format_buffer_size > 0 ); format_buffer = (ubyte*) malloc(format_buffer_size); Assert(format_buffer != NULL); // format_buffer is free'ed when AVI is free'ed, since memory is used by b_info member in AVI_info[] structure hr = AVIStreamReadFormat(pstream, 0, format_buffer, &format_buffer_size); bitmap_info = (BITMAPINFO*)format_buffer; switch ( bitmap_info->bmiHeader.biCompression ) { case BI_RLE8: break; default: Assert(0); break; } AVI_stream.w = bitmap_info->bmiHeader.biWidth; AVI_stream.h = bitmap_info->bmiHeader.biHeight; AVI_stream.bpp = bitmap_info->bmiHeader.biBitCount; // create the palette translation look-up table // // Transparency: If the palette color is full green, then treat as transparent // RGBQUAD* pal; pal = (RGBQUAD*)(bitmap_info->bmiColors); // Store the palette in the AVI stream structure for ( int i = 0; i < 256; i++ ) { AVI_stream.palette[i*3] = pal[i].rgbRed; AVI_stream.palette[i*3+1] = pal[i].rgbGreen; AVI_stream.palette[i*3+2] = pal[i].rgbBlue; } // memcpy(AVI_stream.palette, pal, 256*3); /* int transparent_found = 0; for ( i = 0; i < 256; i++ ) { //nprintf(("AVI", "AVI ==> R: %d G: %d B: %d\n", pal[i].rgbRed, pal[i].rgbGreen, pal[i].rgbBlue)); if ( pal[i].rgbRed < 5 && pal[i].rgbGreen > 250 && pal[i].rgbBlue < 5 ) { avi_stream->pal_translation[i] = TRANSPARENT_INDEX; break; // found transparent, continue in j for loop, since don't need check any more } else avi_stream->pal_translation[i] = palette_find( pal[i].rgbRed, pal[i].rgbGreen, pal[i].rgbBlue ); } for ( j = i+1; j < 256; j++ ) { avi_stream->pal_translation[j] = palette_find( pal[j].rgbRed, pal[j].rgbGreen, pal[j].rgbBlue ); } */ free(format_buffer); // set the flag to used, so to make sure we only process one AVI stream at a time AVI_stream.flags |= AVI_STREAM_F_USED; return 0; }
HRESULT CAviBitmap::Init() { HRESULT hr = E_FAIL; do { //Open file hr = AVIFileOpen(&m_pAviFile, m_szFileName, OF_READ, NULL); if(hr != S_OK) { m_szLastErrorMsg.Format(_T("Unable to Open the Movie File")); break; } //Get video stream hr = AVIFileGetStream(m_pAviFile, &m_pAviStream, streamtypeVIDEO /*video stream*/, 0 /*first stream*/); if(hr != S_OK) { m_szLastErrorMsg.Format(_T("Unable to Get the video stream")); break; } hr = AVIStreamInfo(m_pAviStream, &m_aviInfo, sizeof(AVISTREAMINFO)); if(hr != S_OK) { m_szLastErrorMsg.Format(_T("Unable to Get the video stream info")); break; } CString szFourCC; FourCC2Str(m_aviInfo.fccHandler, szFourCC); AfxTrace(_T("fccHandler=%s, 0x%08X\n"), szFourCC, m_aviInfo.fccHandler); ZeroMemory(&m_biWanted, sizeof(m_biWanted)); LONG lFormat = sizeof(m_biWanted); hr = AVIStreamReadFormat(m_pAviStream, 0, &m_biWanted, &lFormat); if(hr != S_OK) { m_szLastErrorMsg.Format(_T("Unable to Get the foramt of the 1st frame")); break; } m_biWanted.biCompression = BI_RGB; m_biWanted.biBitCount = 32; m_biWanted.biSizeImage = m_biWanted.biWidth * 4 * m_biWanted.biHeight; //Set the result to Fail hr = E_FAIL; //Get the GETFRAME handle m_pGetFrame = AVIStreamGetFrameOpen(m_pAviStream, &m_biWanted); if(m_pGetFrame == NULL) { m_szLastErrorMsg.Format(_T("Unable to Get the GETFRAME handle")); break; } //Get the 1st sample m_lFirstSample = AVIStreamStart(m_pAviStream); if(m_lFirstSample == -1) { m_szLastErrorMsg.Format(_T("Unable to Get the first sample")); break; } //Get the total sample count m_lSampleCount = AVIStreamLength(m_pAviStream); if(m_lSampleCount == -1) { m_szLastErrorMsg.Format(_T("Unable to Get the sample count")); break; } //Done hr = S_OK; } while (FALSE); if(hr != S_OK) { ReleaseMemory(); } return hr; }
HRESULT CAviBitmap::GetAllFrames(LPCTSTR lpszFolderName) { if(m_pGetFrame == NULL) { m_szLastErrorMsg.Format(_T("Not initialized yet")); return E_FAIL; } HRESULT hr = S_OK; int nBmpInfoHdrSize = sizeof(BITMAPINFO) + sizeof(RGBQUAD) * 256; BITMAPINFOHEADER* lpBmpInfoHdr = (BITMAPINFOHEADER*)(new BYTE[nBmpInfoHdrSize]); LONG lpcbFormat = nBmpInfoHdrSize; BYTE* lpDib = NULL; BYTE* lpBuffer = NULL; LONG lBytes = 0, lSamples = 0; BOOL bReadRaw = FALSE; int nPos = 0; int nSampleCount = min(m_lSampleCount, 101); for(nPos = 0; nPos < nSampleCount; nPos++) { //Get the frame format hr = AVIStreamReadFormat(m_pAviStream, nPos, lpBmpInfoHdr, &lpcbFormat); if(hr != S_OK) { m_szLastErrorMsg.Format(_T("Unable to Get the sample format: %d"), nPos); break; } lpBuffer = NULL; //Try to read raw data when the bitmap is BI_RGB if(lpBmpInfoHdr->biCompression == BI_RGB && (lpBmpInfoHdr->biBitCount == 24 || lpBmpInfoHdr->biBitCount == 32)) { //Get the frame data lpBuffer = new BYTE[m_biWanted.biSizeImage]; hr = AVIStreamRead(m_pAviStream, nPos, 1, lpBuffer, m_biWanted.biSizeImage, &lBytes, &lSamples); if(hr != S_OK) { m_szLastErrorMsg.Format(_T("Unable to Get the sample data: %d"), nPos); break; } } else { CString szFourCC; FourCC2Str(m_aviInfo.fccHandler, szFourCC); AfxTrace(_T("Non-RGB format at frame(%03d)=%s, 0x%08X\n"), nPos, szFourCC, lpBmpInfoHdr->biCompression); } //Get the frame at nPos lpDib = (BYTE*)AVIStreamGetFrame(m_pGetFrame, nPos); if(lpDib == NULL) { m_szLastErrorMsg.Format(_T("Unable to Get the sample: %d"), nPos); hr = E_FAIL; break; } //compare the data retrieved in 2 ways if needed if(lpBuffer != NULL) { if(memcmp(lpBuffer, lpDib + sizeof(BITMAPINFOHEADER), lpBmpInfoHdr->biSizeImage) != 0) { m_szLastErrorMsg.Format(_T("not equals: %d"), nPos); hr = E_FAIL; break; } } CString szFileName; if(lpszFolderName == NULL) { szFileName.Format(_T(".\\Frame%03d.bmp"), nPos); } else { szFileName.Format(_T("%s\\Frame%03d.bmp"), lpszFolderName, nPos); } BITMAPINFOHEADER* pTemp = (BITMAPINFOHEADER*)lpDib; // hr = SaveBitmap(lpBmpInfoHdr, lpBuffer, lpBmpInfoHdr->biSizeImage, szFileName); hr = SaveBitmap(&m_biWanted, lpDib + sizeof(BITMAPINFOHEADER), m_biWanted.biSizeImage, szFileName); if(lpBuffer != NULL) { delete [] lpBuffer; lpBuffer = NULL; } //Done } if(lpBuffer != NULL) { delete [] lpBuffer; lpBuffer = NULL; } if(lpBmpInfoHdr != NULL) { delete [] lpBmpInfoHdr; lpBmpInfoHdr = NULL; } ReleaseMemory(); return hr; }
void PaintAudio( HDC hdc, PRECT prc, PAVISTREAM pavi, LONG lStart, LONG lLen) { #ifndef INTERIM_64_BIT // CCJ LPVOID lpAudio=NULL; PCMWAVEFORMAT wf; int i; int x,y; int w,h; BYTE b; HBRUSH hbr; RECT rc = *prc; LONG lBytes; LONG l, lLenOrig = lLen; LONG lWaveBeginTime = AVIStreamStartTime(pavi); LONG lWaveEndTime = AVIStreamEndTime(pavi); // // We can't draw before the beginning of the stream - adjust // if (lStart < lWaveBeginTime) { lLen -= lWaveBeginTime - lStart; lStart = lWaveBeginTime; // right justify the legal samples in the rectangle - don't stretch rc.left = rc.right - (int)muldiv32(rc.right - rc.left, lLen, lLenOrig); } // // We can't draw past the end of the stream // if (lStart + lLen > lWaveEndTime) { lLenOrig = lLen; lLen = max(0, lWaveEndTime - lStart); // maybe nothing to draw! // left justify the legal samples in the rectangle - don't stretch rc.right = rc.left + (int)muldiv32(rc.right - rc.left, lLen, lLenOrig); } // Now start working with samples, not time l = lStart; lStart = AVIStreamTimeToSample(pavi, lStart); lLen = AVIStreamTimeToSample(pavi, l + lLen) - lStart; // // Get the format of the wave data // l = sizeof(wf); AVIStreamReadFormat(pavi, lStart, &wf, &l); if (!l) return; w = rc.right - rc.left; h = rc.bottom - rc.top; // // We were starting before the beginning or continuing past the end. // We're not painting in the whole original rect --- use a dark background // if (rc.left > prc->left) { SelectObject(hdc, GetStockObject(DKGRAY_BRUSH)); PatBlt(hdc, prc->left, rc.top, rc.left - prc->left, rc.bottom - rc.top, PATCOPY); } if (rc.right < prc->right) { SelectObject(hdc, GetStockObject(DKGRAY_BRUSH)); PatBlt(hdc, rc.right, rc.top, prc->right - rc.right, rc.bottom - rc.top, PATCOPY); } #define BACKBRUSH (GetSysColor(COLOR_BTNFACE)) // background #define MONOBRUSH (GetSysColor(COLOR_BTNSHADOW)) // for mono audio #define LEFTBRUSH (RGB(0,0,255)) // left channel #define RIGHTBRUSH (RGB(0,255,0)) // right channel #define HPOSBRUSH (RGB(255,0,0)) // current position // // Paint the background // hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(BACKBRUSH)); PatBlt(hdc, rc.left, rc.top, w, h, PATCOPY); DeleteObject(SelectObject(hdc, hbr)); // // !!! we can only paint PCM data right now. Sorry! // if (wf.wf.wFormatTag != WAVE_FORMAT_PCM) return; // // How many bytes are we painting? Alloc some space for them // lBytes = lLen * wf.wf.nChannels * wf.wBitsPerSample / 8; if (!lpAudio) lpAudio = GlobalAllocPtr (GHND, lBytes); else if ((LONG)GlobalSizePtr(lpAudio) < lBytes) lpAudio = GlobalReAllocPtr(lpAudio, lBytes, GMEM_MOVEABLE); if (!lpAudio) return; // // Read in the wave data // AVIStreamRead(pavi, lStart, lLen, lpAudio, lBytes, NULL, &l); if (l != lLen) return; #define MulDiv(a,b,c) (UINT)((DWORD)(UINT)(a) * (DWORD)(UINT)(b) / (UINT)(c)) // // !!! Flickers less painting it NOW or LATER? // First show the current position as a bar // //hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(HPOSBRUSH)); //PatBlt(hdc, prc->right / 2, prc->top, 1, prc->bottom - prc->top, PATCOPY); //DeleteObject(SelectObject(hdc, hbr)); // // Paint monochrome wave data // if (wf.wf.nChannels == 1) { // // Draw the x-axis // hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(MONOBRUSH)); y = rc.top + h/2; PatBlt(hdc, rc.left, y, w, 1, PATCOPY); // // 8 bit data is centred around 0x80 // if (wf.wBitsPerSample == 8) { for (x=0; x<w; x++) { // which byte of audio data belongs at this pixel? b = *((HPBYTE)lpAudio + muldiv32(x, lLen, w)); if (b > 0x80) { i = y - MulDiv(b - 0x80, (h / 2), 128); PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY); } else { i = y + MulDiv(0x80 - b, (h / 2), 128); PatBlt(hdc, rc.left + x, y, 1, i - y, PATCOPY); } } } // // 16 bit data is centred around 0x00 // else if (wf.wBitsPerSample == 16) { for (x=0; x<w; x++) { // which byte of audio data belongs at this pixel? i = *((HPINT)lpAudio + muldiv32(x,lLen,w)); if (i > 0) { i = y - (int) ((LONG)i * (h/2) / 32768); PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY); } else { i = (int) ((LONG)i * (h/2) / 32768); PatBlt(hdc, rc.left+x, y, 1, -i, PATCOPY); } } } DeleteObject(SelectObject(hdc, hbr)); } // endif mono // // Draw stereo waveform data // else if (wf.wf.nChannels == 2) { // // 8 bit data is centred around 0x80 // if (wf.wBitsPerSample == 8) { // Left channel hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(LEFTBRUSH)); y = rc.top + h/4; PatBlt(hdc, rc.left, y, w, 1, PATCOPY); for (x=0; x<w; x++) { b = *((HPBYTE)lpAudio + muldiv32(x,lLen,w) * 2); if (b > 0x80) { i = y - MulDiv(b-0x80,(h/4),128); PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY); } else { i = y + MulDiv(0x80-b,(h/4),128); PatBlt(hdc, rc.left+x, y, 1, i-y, PATCOPY); } } DeleteObject(SelectObject(hdc, hbr)); // Right channel hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(RIGHTBRUSH)); y = rc.top + h * 3 / 4; PatBlt(hdc, rc.left, y, w, 1, PATCOPY); for (x=0; x<w; x++) { b = *((HPBYTE)lpAudio + muldiv32(x,lLen,w) * 2 + 1); if (b > 0x80) { i = y - MulDiv(b-0x80,(h/4),128); PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY); } else { i = y + MulDiv(0x80-b,(h/4),128); PatBlt(hdc, rc.left+x, y, 1, i-y, PATCOPY); } } DeleteObject(SelectObject(hdc, hbr)); } // // 16 bit data is centred around 0x00 // else if (wf.wBitsPerSample == 16) { // Left channel hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(LEFTBRUSH)); y = rc.top + h/4; PatBlt(hdc, rc.left, y, w, 1, PATCOPY); for (x=0; x<w; x++) { i = *((HPINT)lpAudio + muldiv32(x,lLen,w) * 2); if (i > 0) { i = y - (int) ((LONG)i * (h/4) / 32768); PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY); } else { i = (int) ((LONG)i * (h/4) / 32768); PatBlt(hdc, rc.left+x, y, 1, -i, PATCOPY); } } DeleteObject(SelectObject(hdc, hbr)); // Right channel hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(RIGHTBRUSH)); y = rc.top + h * 3 / 4; PatBlt(hdc, rc.left, y, w, 1, PATCOPY); for (x=0; x<w; x++) { i = *((HPINT)lpAudio + muldiv32(x,lLen,w) * 2 + 1); if (i > 0) { i = y - (int) ((LONG)i * (h/4) / 32768); PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY); } else { i = (int) ((LONG)i * (h/4) / 32768); PatBlt(hdc, rc.left+x, y, 1, -i, PATCOPY); } } DeleteObject(SelectObject(hdc, hbr)); } } // endif stereo if (lpAudio) { GlobalFreePtr(lpAudio); lpAudio = NULL; } #endif // INTERIM_64_BIT }
FilteredWave::FilteredWave(PAVISTREAM pavi,int resolution) { #ifndef INTERIM_64_BIT // CCJ PCMWAVEFORMAT wf; LPVOID lpAudio=NULL; LONG l, lBytes; DWORD *ptr, sum[2], pos, v; LONG s = AVIStreamStartTime(pavi); LONG e = AVIStreamEndTime(pavi); LONG len = e-s; LONG sstart = AVIStreamTimeToSample(pavi,s); LONG send = AVIStreamTimeToSample(pavi,e); LONG slen = send-sstart; HPBYTE bptr; HPINT iptr; int in, b, av[2]; int j,k; DWORD i; sat = NULL; satLen = 0; start = (s*TIME_TICKSPERSEC)/1000; end = (e*TIME_TICKSPERSEC)/1000; l = sizeof(wf); AVIStreamReadFormat(pavi,0,&wf,&l); if (!l) { return; } if (wf.wf.wFormatTag != WAVE_FORMAT_PCM) { return; } lBytes = slen * wf.wf.nChannels * wf.wBitsPerSample/8; lpAudio = GlobalAllocPtr(GHND,lBytes); if (!lpAudio) { return; } AVIStreamRead(pavi,sstart,slen,lpAudio,lBytes,NULL,&l); if (l != slen) { GlobalFreePtr(lpAudio); return; } satLen = (len*resolution)/(1000); numSum = slen/satLen; satLen = slen/numSum; sat = new DWORD[satLen * wf.wf.nChannels]; if (!sat) { GlobalFreePtr(lpAudio); return; } channels = wf.wf.nChannels; ptr = sat; pos = 0; // // First find the average value // av[0] = av[1] = 0; iptr = (HPINT)lpAudio; bptr = (HPBYTE)lpAudio; for (i=0; i<(DWORD)slen; i++) { if (wf.wBitsPerSample==8) { for (j=0;j<channels;j++) { av[j] += *bptr++ - 0x80; } } else { for (j=0;j<channels;j++) { av[j] += *iptr++; } } } for (j=0;j<channels;j++) { av[j] /= slen; } // // Now build the SAT moving the average to 0. // sum[0] = sum[1] = 0; iptr = (HPINT)lpAudio; bptr = (HPBYTE)lpAudio; for (i=0; i<satLen; i++) { for (k=0; k<numSum; k++,pos++) { if (wf.wBitsPerSample==8) { for (j=0;j<channels;j++) { b = *bptr++ - av[j]; if (b > 0x80) { v = (b - 0x80) * 256; } else { v = (0x80 - b) * 256; } sum[j] += v; } } else { for (j=0;j<channels;j++) { in = *iptr++ - av[j]; if (in<0) in = -in; sum[j] += in; } } } for (j=0;j<channels;j++) { *ptr++ = sum[j]; } } // Find the max max[0] = max[1] = 0; for (i=1; i<satLen; i++) { for (j=0;j<channels;j++) { v = (sat[i*channels+j]-sat[(i-1)*channels+j])/numSum; if (v>max[j]) max[j] = v; } } GlobalFreePtr(lpAudio); #endif // INTERIM_64_BIT }
bool VideoComponent::openAvi(const std::string& filename) { // Stop any currently loaded avi closeAvi(); AVIFileInit(); // Opens The AVIFile Library // Opens The AVI Stream if (AVIStreamOpenFromFile(&m_pavi, filename.c_str(), streamtypeVIDEO, 0, OF_READ, NULL) !=0) { GameLog::errorMessage("Error opening avi: %s", filename.c_str()); // An Error Occurred Opening The Stream AVIFileExit(); // Release The File return false; } AVIStreamInfo(m_pavi, &m_psi, sizeof(m_psi)); // Reads Information About The Stream Into psi m_width = m_psi.rcFrame.right-m_psi.rcFrame.left; // Width Is Right Side Of Frame Minus Left m_height = m_psi.rcFrame.bottom-m_psi.rcFrame.top; // Height Is Bottom Of Frame Minus Top if (!m_resize) { // Size should be kept m_resizeWidth = m_width; m_resizeHeight = m_height; } m_lastframe = AVIStreamLength(m_pavi); // The Last Frame Of The Stream m_timePerFrame = ((float)AVIStreamSampleToTime(m_pavi, m_lastframe) / (float) m_lastframe) / 1000.0f; // Calculate Rough Seconds Per Frame m_bmih.biSize = sizeof (BITMAPINFOHEADER); // Size Of The BitmapInfoHeader m_bmih.biPlanes = 1; // Bitplanes m_bmih.biBitCount = 24; // Bits Format We Want 24 / 8 = 3 bytes m_bmih.biWidth = m_resizeWidth; // Width We Want m_bmih.biHeight = m_resizeHeight; // Height We Want m_bmih.biCompression= BI_RGB; // Requested Mode = RGB m_hBitmap = CreateDIBSection (m_hdc, (BITMAPINFO*)(&m_bmih), DIB_RGB_COLORS, (void**)(&m_data), NULL, NULL); SelectObject (m_hdc, m_hBitmap); // Select hBitmap Into Our Device Context (hdc) // Bitmapinfo header for decoding (needed for xvid) m_bmiavih.biSize = sizeof(BITMAPINFOHEADER); m_bmiavih.biPlanes = 1; // Bitplanes m_bmiavih.biBitCount = 24; // Bits Format We Want 24 / 8 = 3 bytes m_bmiavih.biWidth = m_width; // Width We Want m_bmiavih.biHeight = m_height; // Height We Want m_bmiavih.biCompression = BI_RGB; // Requested Mode = RGB // And some more infos m_bmiavih.biClrImportant = 0; m_bmiavih.biClrUsed = 0; m_bmiavih.biXPelsPerMeter = 0; m_bmiavih.biYPelsPerMeter = 0; m_bmiavih.biSizeImage = (((m_bmiavih.biWidth * 3) + 3) & 0xFFFC) * m_bmiavih.biHeight; m_pgf=AVIStreamGetFrameOpen(m_pavi, &m_bmiavih);// Create The PGETFRAME Using Our Request Mode if (m_pgf==0x0) { GameLog::errorMessage("Error opening first frame of avi: %s", filename.c_str()); // An Error Occurred Opening The Frame DeleteObject(m_hBitmap); // Delete The Device Dependant Bitmap Object AVIStreamRelease(m_pavi); // Release The Stream AVIFileExit(); // Release The File return false; } m_fileName = filename; // Create buffer for converted data // width*height = count pixel; each pixel has 4 channels for rgba with each one byte int dataSize = 4*m_resizeWidth*m_resizeHeight; m_bgraData = new unsigned char[dataSize]; // Initialize with 255 (black screen with full alpha) memset(m_bgraData, 255, dataSize); // Prepare horde texture stream named like the video file name, to get a unique name m_videoTexture = h3dCreateTexture(filename.c_str(), m_resizeWidth, m_resizeHeight, H3DFormats::TEX_BGRA8, H3DResFlags::NoTexMipmaps); if (m_videoTexture == 0) { GameLog::errorMessage("Error creating texture for playing avi: %s", filename.c_str()); // Failure creating the dynamic texture closeAvi(); return false; } // Find the sampler index within the material m_samplerIndex = h3dFindResElem(m_material, H3DMatRes::SamplerElem, H3DMatRes::SampNameStr, "albedoMap"); if (m_samplerIndex == -1) { GameLog::errorMessage("Error preparing material with resID %d for playing avi: %s", m_material, filename.c_str()); // No sampler found in material closeAvi(); return false; } // Store old sampler m_originalSampler = h3dGetResParamI(m_material, H3DMatRes::SamplerElem, m_samplerIndex, H3DMatRes::SampTexResI); // Now open the audio stream PAVISTREAM audioStream; if (AVIStreamOpenFromFile(&audioStream, filename.c_str(), streamtypeAUDIO, 0, OF_READ, NULL) == 0) { // Audio stream found // Get format info PCMWAVEFORMAT audioFormat; long formatSize = sizeof(audioFormat); int start = AVIStreamStart(audioStream); // TODO get channelsmask and use it AVIStreamReadFormat(audioStream, start, &audioFormat, &formatSize); long numSamples = AVIStreamLength(audioStream); int bitsPerSample = (audioFormat.wf.nAvgBytesPerSec * 8) / (audioFormat.wf.nSamplesPerSec * audioFormat.wf.nChannels); /*if (audioFormat.wf.wFormatTag == WAVE_FORMAT_MPEGLAYER3) { // TODO MPEGLAYER3WAVEFORMAT mp3Format; formatSize = sizeof(mp3Format); AVIStreamReadFormat(audioStream, start, &mp3Format, &formatSize); }*/ // Create buffer with appropriate size long bufferSize = (bitsPerSample * numSamples) / 8; char* buffer = new char[bufferSize]; // Read the audio data long bytesWritten = 0; AVIStreamRead(audioStream, start, numSamples, buffer, bufferSize, &bytesWritten, 0x0); if (bytesWritten > 0) { // Send the audio data to the sound component SoundResourceData eventData(buffer, bytesWritten, audioFormat.wf.nSamplesPerSec, bitsPerSample, audioFormat.wf.nChannels); GameEvent event(GameEvent::E_SET_SOUND_WITH_USER_DATA, &eventData, this); m_owner->executeEvent(&event); m_hasAudio = true; } // Delete the buffer data delete[] buffer; } if (m_autoStart) // Play video directly playAvi(); return true; }
BOOL CALLBACK aviaudioOpenDevice(HWND hwnd, PAVISTREAM pavi) { MMRESULT mmResult; LPVOID lpFormat; LONG cbFormat; AVISTREAMINFO strhdr; if (!pavi) // no wave data to play return FALSE; if (shWaveOut) // already something playing return TRUE; spavi = pavi; recalc = 1; AVIStreamInfo(pavi, &strhdr, sizeof(strhdr)); slSampleSize = (LONG) strhdr.dwSampleSize; if (slSampleSize <= 0 || slSampleSize > AUDIO_BUFFER_SIZE) return FALSE; //AVIStreamFormatSize(pavi, 0, &cbFormat); AVIStreamFormatSize(pavi, AVIStreamStart(pavi), &cbFormat); lpFormat = GlobalAllocPtr(GHND, cbFormat); if (!lpFormat) return FALSE; //AVIStreamReadFormat(pavi, 0, lpFormat, &cbFormat); AVIStreamReadFormat(pavi, AVIStreamStart(pavi), lpFormat, &cbFormat); sdwSamplesPerSec = ((LPWAVEFORMAT) lpFormat)->nSamplesPerSec; sdwBytesPerSec = ((LPWAVEFORMAT) lpFormat)->nAvgBytesPerSec; mmResult = waveOutOpen(&shWaveOut, (UINT)WAVE_MAPPER, (WAVEFORMATEX *) lpFormat, (DWORD) (UINT) hwnd, 0L, CALLBACK_WINDOW); // Maybe we failed because someone is playing sound already. // Shut any sound off, and try once more before giving up. if (mmResult) { sndPlaySound(NULL, 0); mmResult = waveOutOpen(&shWaveOut, (UINT)WAVE_MAPPER, (WAVEFORMATEX *) lpFormat, (DWORD) (UINT)hwnd, 0L, CALLBACK_WINDOW); } if (mmResult != 0) { return FALSE; } for (swBuffers = 0; swBuffers < MAX_AUDIO_BUFFERS; swBuffers++) { if (!(salpAudioBuf[swBuffers] = (LPWAVEHDR)GlobalAllocPtr(GMEM_MOVEABLE | GMEM_SHARE, (DWORD)(sizeof(WAVEHDR) + AUDIO_BUFFER_SIZE)))) break; salpAudioBuf[swBuffers]->dwFlags = WHDR_DONE; salpAudioBuf[swBuffers]->lpData = (LPSTR) salpAudioBuf[swBuffers] + sizeof(WAVEHDR); salpAudioBuf[swBuffers]->dwBufferLength = AUDIO_BUFFER_SIZE; if (!waveOutPrepareHeader(shWaveOut, salpAudioBuf[swBuffers], sizeof(WAVEHDR))) continue; GlobalFreePtr((LPSTR) salpAudioBuf[swBuffers]); break; } if (swBuffers < MIN_AUDIO_BUFFERS) { aviaudioCloseDevice(); return FALSE; } swBuffersOut = 0; swNextBuffer = 0; sfPlaying = FALSE; return TRUE; }
static void test_default_data(void) { COMMON_AVI_HEADERS cah; char filename[MAX_PATH]; PAVIFILE pFile; int res; LONG lSize; PAVISTREAM pStream0; PAVISTREAM pStream1; AVISTREAMINFOA asi0, asi1; WAVEFORMATEX wfx; GetTempPathA(MAX_PATH, filename); strcpy(filename+strlen(filename), testfilename); init_test_struct(&cah); create_avi_file(&cah, filename); res = AVIFileOpenA(&pFile, filename, OF_SHARE_DENY_WRITE, 0L); ok(res != AVIERR_BADFORMAT, "Unable to open file: error1=%u\n", AVIERR_BADFORMAT); ok(res != AVIERR_MEMORY, "Unable to open file: error2=%u\n", AVIERR_MEMORY); ok(res != AVIERR_FILEREAD, "Unable to open file: error3=%u\n", AVIERR_FILEREAD); ok(res != AVIERR_FILEOPEN, "Unable to open file: error4=%u\n", AVIERR_FILEOPEN); ok(res != REGDB_E_CLASSNOTREG, "Unable to open file: error5=%u\n", REGDB_E_CLASSNOTREG); ok(res == 0, "Unable to open file: error=%u\n", res); res = AVIFileGetStream(pFile, &pStream0, 0, 0); ok(res == 0, "Unable to open video stream: error=%u\n", res); res = AVIFileGetStream(pFile, &pStream1, 0, 1); ok(res == 0, "Unable to open audio stream: error=%u\n", res); res = AVIStreamInfoA(pStream0, &asi0, sizeof(asi0)); ok(res == 0, "Unable to read stream info: error=%u\n", res); res = AVIStreamInfoA(pStream1, &asi1, sizeof(asi1)); ok(res == 0, "Unable to read stream info: error=%u\n", res); res = AVIStreamReadFormat(pStream0, AVIStreamStart(pStream1), NULL, &lSize); ok(res == 0, "Unable to read format size: error=%u\n", res); res = AVIStreamReadFormat(pStream1, AVIStreamStart(pStream1), &wfx, &lSize); ok(res == 0, "Unable to read format: error=%u\n", res); ok(asi0.fccType == streamtypeVIDEO, "got 0x%x (expected streamtypeVIDEO)\n", asi0.fccType); ok(asi0.fccHandler == 0x30323449, "got 0x%x (expected 0x30323449)\n", asi0.fccHandler); ok(asi0.dwFlags == 0, "got %u (expected 0)\n", asi0.dwFlags); ok(asi0.wPriority == 0, "got %u (expected 0)\n", asi0.wPriority); ok(asi0.wLanguage == 0, "got %u (expected 0)\n", asi0.wLanguage); ok(asi0.dwScale == 1001, "got %u (expected 1001)\n", asi0.dwScale); ok(asi0.dwRate == 30000, "got %u (expected 30000)\n", asi0.dwRate); ok(asi0.dwStart == 0, "got %u (expected 0)\n", asi0.dwStart); ok(asi0.dwLength == 1, "got %u (expected 1)\n", asi0.dwLength); ok(asi0.dwInitialFrames == 0, "got %u (expected 0)\n", asi0.dwInitialFrames); ok(asi0.dwSuggestedBufferSize == 0, "got %u (expected 0)\n", asi0.dwSuggestedBufferSize); ok(asi0.dwQuality == 0xffffffff, "got 0x%x (expected 0xffffffff)\n", asi0.dwQuality); ok(asi0.dwSampleSize == 0, "got %u (expected 0)\n", asi0.dwSampleSize); ok(asi0.rcFrame.left == 0, "got %u (expected 0)\n", asi0.rcFrame.left); ok(asi0.rcFrame.top == 0, "got %u (expected 0)\n", asi0.rcFrame.top); ok(asi0.rcFrame.right == 8, "got %u (expected 8)\n", asi0.rcFrame.right); /* these are based on the values in the mah and not */ ok(asi0.rcFrame.bottom == 6, "got %u (expected 6)\n", asi0.rcFrame.bottom);/* on the ones in the ash which are 0 here */ ok(asi0.dwEditCount == 0, "got %u (expected 0)\n", asi0.dwEditCount); ok(asi0.dwFormatChangeCount == 0, "got %u (expected 0)\n", asi0.dwFormatChangeCount); ok(asi1.fccType == streamtypeAUDIO, "got 0x%x (expected streamtypeVIDEO)\n", asi1.fccType); ok(asi1.fccHandler == 0x1, "got 0x%x (expected 0x1)\n", asi1.fccHandler); ok(asi1.dwFlags == 0, "got %u (expected 0)\n", asi1.dwFlags); ok(asi1.wPriority == 0, "got %u (expected 0)\n", asi1.wPriority); ok(asi1.wLanguage == 0, "got %u (expected 0)\n", asi1.wLanguage); ok(asi1.dwScale == 1, "got %u (expected 1)\n", asi1.dwScale); ok(asi1.dwRate == 11025, "got %u (expected 11025)\n", asi1.dwRate); ok(asi1.dwStart == 0, "got %u (expected 0)\n", asi1.dwStart); ok(asi1.dwLength == 1637, "got %u (expected 1637)\n", asi1.dwLength); ok(asi1.dwInitialFrames == 0, "got %u (expected 0)\n", asi1.dwInitialFrames); ok(asi1.dwSuggestedBufferSize == 0, "got %u (expected 0)\n", asi1.dwSuggestedBufferSize); ok(asi1.dwQuality == 0xffffffff, "got 0x%x (expected 0xffffffff)\n", asi1.dwQuality); ok(asi1.dwSampleSize == 2, "got %u (expected 2)\n", asi1.dwSampleSize); ok(asi1.rcFrame.left == 0, "got %u (expected 0)\n", asi1.rcFrame.left); ok(asi1.rcFrame.top == 0, "got %u (expected 0)\n", asi1.rcFrame.top); ok(asi1.rcFrame.right == 0, "got %u (expected 0)\n", asi1.rcFrame.right); ok(asi1.rcFrame.bottom == 0, "got %u (expected 0)\n", asi1.rcFrame.bottom); ok(asi1.dwEditCount == 0, "got %u (expected 0)\n", asi1.dwEditCount); ok(asi1.dwFormatChangeCount == 0, "got %u (expected 0)\n", asi1.dwFormatChangeCount); ok(wfx.wFormatTag == 1, "got %u (expected 1)\n",wfx.wFormatTag); ok(wfx.nChannels == 2, "got %u (expected 2)\n",wfx.nChannels); ok(wfx.wFormatTag == 1, "got %u (expected 1)\n",wfx.wFormatTag); ok(wfx.nSamplesPerSec == 11025, "got %u (expected 11025)\n",wfx.nSamplesPerSec); ok(wfx.nAvgBytesPerSec == 22050, "got %u (expected 22050)\n",wfx.nAvgBytesPerSec); ok(wfx.nBlockAlign == 2, "got %u (expected 2)\n",wfx.nBlockAlign); AVIStreamRelease(pStream0); AVIStreamRelease(pStream1); AVIFileRelease(pFile); ok(DeleteFileA(filename) !=0, "Deleting file %s failed\n", filename); }
void CRotateAVIDlg::ProcessAVI(const TCHAR *source_filename, const TCHAR *dest_filename, eRotation rot) { TCHAR error_buf[1024]; PAVIFILE source_avi = 0; PAVIFILE dest_avi = 0; PAVISTREAM pSrcVidStream = 0; PAVISTREAM pSrcAudioStream = 0; PAVISTREAM pDestVidStream = 0; PAVISTREAM pDestAudioStream = 0; char *pSrcBuffer = 0; char *pJPGBuffer = 0; char *pDecompBuffer = 0; char *pRotateBuffer = 0; char *pDestBuffer = 0; AVIFileInit(); // source setup if (AVIFileOpen(&source_avi, source_filename, OF_READ, NULL) != AVIERR_OK) { _stprintf(error_buf, TEXT("Couldn't open file %s"), source_filename); MessageBox(error_buf); goto cleanup; } AVIFILEINFO src_avi_info; AVIFileInfo(source_avi, &src_avi_info, sizeof(AVIFILEINFO)); if (AVIFileGetStream(source_avi, &pSrcVidStream, streamtypeVIDEO, 0) != AVIERR_OK) { _stprintf(error_buf, TEXT("No video stream in %s"), source_filename); MessageBox(error_buf); goto cleanup; } BITMAPINFOHEADER srcBIH; long srcvidstreamsize; AVIStreamFormatSize(pSrcVidStream, 0, &srcvidstreamsize); if (srcvidstreamsize > sizeof(BITMAPINFOHEADER)) { _stprintf(error_buf, TEXT("Unable to handle video stream format in %s"), source_filename); MessageBox(error_buf); goto cleanup; } srcvidstreamsize = sizeof(BITMAPINFOHEADER); if (AVIStreamReadFormat(pSrcVidStream, 0, &srcBIH, &srcvidstreamsize) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error reading stream format in %s"), source_filename); MessageBox(error_buf); goto cleanup; } if (srcBIH.biCompression != MKFOURCC('M','J','P','G')) { _stprintf(error_buf, TEXT("%s is not motion JPEG format"), source_filename); MessageBox(error_buf); goto cleanup; } AVISTREAMINFO vidstream_info; if (AVIStreamInfo(pSrcVidStream, &vidstream_info, sizeof(AVISTREAMINFO)) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error reading stream info in %s"), source_filename); MessageBox(error_buf); goto cleanup; } int firstVidSrcFrame = AVIStreamStart(pSrcVidStream); if (firstVidSrcFrame == -1) { _stprintf(error_buf, TEXT("Video stream start error in %s"), source_filename); MessageBox(error_buf); goto cleanup; } int numVidSrcFrames = AVIStreamLength(pSrcVidStream); if (numVidSrcFrames == -1) { _stprintf(error_buf, TEXT("Video stream length error in %s"), source_filename); MessageBox(error_buf); goto cleanup; } AVIFileGetStream(source_avi, &pSrcAudioStream, streamtypeAUDIO, 0); int firstAudioSrcFrame = 0; int numAudioSrcFrames = 0; if (pSrcAudioStream) { firstAudioSrcFrame = AVIStreamStart(pSrcAudioStream); if (firstAudioSrcFrame == -1) { _stprintf(error_buf, TEXT("Audio stream start error in %s"), source_filename); MessageBox(error_buf); goto cleanup; } numAudioSrcFrames = AVIStreamLength(pSrcAudioStream); if (numAudioSrcFrames == -1) { _stprintf(error_buf, TEXT("Audio stream length error in %s"), source_filename); MessageBox(error_buf); goto cleanup; } } // dest setup BITMAPINFOHEADER destBIH; destBIH = srcBIH; if (rot != CW_180) { destBIH.biWidth = srcBIH.biHeight; destBIH.biHeight = srcBIH.biWidth; } if (AVIFileOpen(&dest_avi, dest_filename, OF_CREATE | OF_WRITE, NULL) != AVIERR_OK) { _stprintf(error_buf, TEXT("Couldn't open file %s"), dest_filename); MessageBox(error_buf); goto cleanup; } vidstream_info.rcFrame.left = vidstream_info.rcFrame.top = 0; vidstream_info.rcFrame.right = destBIH.biWidth; vidstream_info.rcFrame.bottom = destBIH.biHeight; if (AVIFileCreateStream(dest_avi, &pDestVidStream, &vidstream_info) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error creating video stream in %s"), dest_filename); MessageBox(error_buf); goto cleanup; } if (AVIStreamSetFormat(pDestVidStream, 0, &destBIH, sizeof(BITMAPINFOHEADER)) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error setting video stream format in %s"), dest_filename); MessageBox(error_buf); goto cleanup; } if (AVIStreamSetFormat(pDestVidStream, 0, &destBIH, sizeof(BITMAPINFOHEADER)) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error setting video stream format in %s"), dest_filename); MessageBox(error_buf); goto cleanup; } // video memory int img_rgb_size = srcBIH.biHeight * srcBIH.biWidth * 3; pSrcBuffer = new char[img_rgb_size]; pJPGBuffer = new char[img_rgb_size]; pDecompBuffer = new char[img_rgb_size]; pRotateBuffer = new char[img_rgb_size]; pDestBuffer = new char[img_rgb_size]; long bytes_read; long bytes_written; for (int i = firstVidSrcFrame; i < numVidSrcFrames; ++i) { if (AVIStreamRead(pSrcVidStream, i, 1, pSrcBuffer, img_rgb_size, &bytes_read, 0) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error reading video stream from %s"), source_filename); MessageBox(error_buf); goto cleanup; } // well-form the jpg int jpglen = ConstructWellFormedJPEG(pSrcBuffer, pJPGBuffer, bytes_read); // decompress JPEGHandler jpgh_decomp(pJPGBuffer, jpglen); jpgh_decomp.DecompressToRGB(pDecompBuffer, img_rgb_size); // rotate int destx, desty; char *pRotSrc; char *pRotDest; switch (rot) { case CW_90: for (int srcy = 0; srcy < srcBIH.biHeight; ++srcy) { for (int srcx = 0; srcx < srcBIH.biWidth; ++srcx) { destx = srcBIH.biHeight-1-srcy; desty = srcx; pRotSrc = &pDecompBuffer[(srcy * srcBIH.biWidth + srcx) * 3]; pRotDest = &pRotateBuffer[(desty * srcBIH.biHeight + destx) * 3]; *pRotDest++ = *pRotSrc++; *pRotDest++ = *pRotSrc++; *pRotDest++ = *pRotSrc++; } } break; case CW_180: for (int srcy = 0; srcy < srcBIH.biHeight; ++srcy) { for (int srcx = 0; srcx < srcBIH.biWidth; ++srcx) { destx = srcBIH.biWidth-1-srcx; desty = srcBIH.biHeight-1-srcy; pRotSrc = &pDecompBuffer[(srcy * srcBIH.biWidth + srcx) * 3]; pRotDest = &pRotateBuffer[(desty * srcBIH.biWidth + destx) * 3]; *pRotDest++ = *pRotSrc++; *pRotDest++ = *pRotSrc++; *pRotDest++ = *pRotSrc++; } } break; case ACW_90: for (int srcy = 0; srcy < srcBIH.biHeight; ++srcy) { for (int srcx = 0; srcx < srcBIH.biWidth; ++srcx) { destx = srcy; desty = srcBIH.biWidth-1-srcx; pRotSrc = &pDecompBuffer[(srcy * srcBIH.biWidth + srcx) * 3]; pRotDest = &pRotateBuffer[(desty * srcBIH.biHeight + destx) * 3]; *pRotDest++ = *pRotSrc++; *pRotDest++ = *pRotSrc++; *pRotDest++ = *pRotSrc++; } } break; } // compress JPEGHandler jpgh_comp(pRotateBuffer, img_rgb_size); if (rot != CW_180) destBIH.biSizeImage = jpgh_comp.CompressFromRGB(pDestBuffer, img_rgb_size, srcBIH.biHeight, srcBIH.biWidth); else destBIH.biSizeImage = jpgh_comp.CompressFromRGB(pDestBuffer, img_rgb_size, srcBIH.biWidth, srcBIH.biHeight); if (AVIStreamWrite(pDestVidStream, i, 1, pDestBuffer, destBIH.biSizeImage, AVIIF_KEYFRAME, NULL, &bytes_written) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error writing video stream to %s"), dest_filename); MessageBox(error_buf); goto cleanup; } } cleanup: delete[] pSrcBuffer; delete[] pDestBuffer; delete[] pJPGBuffer; delete[] pDecompBuffer; delete[] pRotateBuffer; if (pDestAudioStream) AVIStreamRelease(pDestAudioStream); if (pDestVidStream) AVIStreamRelease(pDestVidStream); if (pSrcAudioStream) AVIStreamRelease(pSrcAudioStream); if (pSrcVidStream) AVIStreamRelease(pSrcVidStream); if (dest_avi) AVIFileRelease(dest_avi); if (source_avi) AVIFileRelease(source_avi); AVIFileExit(); }
bool setupAudio() { int ret; //read audio stream info; specifically, we need the encoded chunksize AVISTREAMINFO audioStreamInfo; AVIStreamInfo(audioStream,&audioStreamInfo,sizeof(AVISTREAMINFO)); audioChunkSize = audioStreamInfo.dwSuggestedBufferSize; audioSampleCount = audioStreamInfo.dwLength; audioEncoded = new char[audioChunkSize]; //read the audio streamformat info LONG formatSize; AVIStreamReadFormat(audioStream,AVIStreamStart(audioStream),0,&formatSize); char *format = (char *)malloc(formatSize); AVIStreamReadFormat(audioStream,AVIStreamStart(audioStream),format,&formatSize); WAVEFORMATEX *wfxEncoded = (WAVEFORMATEX *)format; //construct a descriptor for the format we desire to get out of the decoder //note that we have to use the same samplerate as the encoded format indicates //since acm can't change the samplerate in one fell swoop wfxDecoded.cbSize = sizeof(WAVEFORMATEX); wfxDecoded.nChannels = wfxEncoded->nChannels; wfxDecoded.wFormatTag = WAVE_FORMAT_PCM; wfxDecoded.nSamplesPerSec = wfxEncoded->nSamplesPerSec; wfxDecoded.wBitsPerSample = 16; wfxDecoded.nBlockAlign = wfxDecoded.wBitsPerSample/8 * wfxEncoded->nChannels; wfxDecoded.nAvgBytesPerSec = wfxDecoded.nBlockAlign * wfxDecoded.nSamplesPerSec; sampleSize = wfxDecoded.nBlockAlign; //try to get a converter from the encoded data to the decoded data ret = acmStreamOpen(&acmStream,0,wfxEncoded,&wfxDecoded,0,0,0,0); //now we're done with wfxEncoded free(format); if(ret) { delete[] audioEncoded; AVIStreamClose(audioStream); return false; } //decide on a playback buffer size //make each buffer 1/2sec playBufferSamples = wfxDecoded.nSamplesPerSec / 2; playBufferSize = playBufferSamples * sampleSize; //hurry and try to create the output stream. //if we can't do that, then everything that follows is pointless. int mode = 0; if(wfxDecoded.wBitsPerSample == 8) mode |= FSOUND_8BITS; else if(wfxDecoded.wBitsPerSample == 16) mode |= FSOUND_16BITS; if(wfxDecoded.nChannels == 1) mode |= FSOUND_MONO; else mode |= FSOUND_STEREO; #ifdef SND_USE_FMOD fmod_stream = FSOUND_Stream_Create(win_movie_fmod_streamCallback,playBufferSize,mode,wfxDecoded.nSamplesPerSec,(int)this); if(!fmod_stream) { acmStreamClose(acmStream,0); delete[] audioEncoded; AVIStreamClose(audioStream); err("Error creating fmod stream for movieplayback. Please report this case so we can improve the robustness of the movie player!"); return false; } #endif //find out how large a decode buffer we need for the encode buffer chunksize acmStreamSize(acmStream,audioChunkSize,&decodeBufferSize,ACM_STREAMSIZEF_SOURCE); decodeBufferSamples = decodeBufferSize / sampleSize; //allocate the decode buffer audioDecoded = new char[decodeBufferSize]; //prep the decode operation audioStreamHeader.cbStruct = sizeof(ACMSTREAMHEADER); audioStreamHeader.fdwStatus = 0; audioStreamHeader.pbSrc = (LPBYTE)audioEncoded; audioStreamHeader.cbSrcLength = audioChunkSize; audioStreamHeader.pbDst = (LPBYTE)audioDecoded; audioStreamHeader.cbDstLength = decodeBufferSize; ret = acmStreamPrepareHeader(acmStream,&audioStreamHeader,0); if(ret) { delete[] audioDecoded; acmStreamClose(acmStream,0); delete[] audioEncoded; AVIStreamClose(audioStream); return false; } #ifdef SND_USE_FMOD //finally we're ready to start the audio stream FSOUND_Stream_Play(FSOUND_FREE,fmod_stream); #endif return true; }
int avisynth_read_header() { avs = (AVISynthContext *) av_mallocz(sizeof(AVISynthContext)); HRESULT res; AVIFILEINFO info; DWORD id; AVIFileInit(); res = AVIFileOpen(&avs->file, read_config_filepath(), OF_READ|OF_SHARE_DENY_WRITE, NULL); if (res != S_OK) { av_log(s, AV_LOG_ERROR, "AVIFileOpen failed with error %ld", res); AVIFileExit(); return -1; } res = AVIFileInfo(avs->file, &info, sizeof(info)); if (res != S_OK) { av_log(s, AV_LOG_ERROR, "AVIFileInfo failed with error %ld", res); AVIFileExit(); return -1; } avs->streams = (AVISynthStream *) av_mallocz(info.dwStreams * sizeof(AVISynthStream)); assert(info.dwStreams == 1); for (id=0; id<info.dwStreams; id++) { stream = &avs->streams[id]; stream->read = 0; if (AVIFileGetStream(avs->file, &stream->handle, 0, id) == S_OK) { if (AVIStreamInfo(stream->handle, &stream->info, sizeof(stream->info)) == S_OK) { if (stream->info.fccType == streamtypeAUDIO) { assert(false); // don't do audio yet LONG struct_size = sizeof(WAVEFORMATEX); if (AVIStreamReadFormat(stream->handle, 0, &wvfmt, &struct_size) != S_OK) continue; /* audio: st = avformat_new_stream(s, NULL); st->id = id; st->codec->codec_type = AVMEDIA_TYPE_AUDIO; st->codec->block_align = wvfmt.nBlockAlign; st->codec->channels = wvfmt.nChannels; st->codec->sample_rate = wvfmt.nSamplesPerSec; st->codec->bit_rate = wvfmt.nAvgBytesPerSec * 8; st->codec->bits_per_coded_sample = wvfmt.wBitsPerSample; st->codec->codec_tag = wvfmt.wFormatTag; st->codec->codec_id = ff_wav_codec_get_id(wvfmt.wFormatTag, st->codec->bits_per_coded_sample); */ stream->chunck_samples = wvfmt.nSamplesPerSec * (__int64)info.dwScale / (__int64) info.dwRate; stream->chunck_size = stream->chunck_samples * wvfmt.nChannels * wvfmt.wBitsPerSample / 8; } else if (stream->info.fccType == streamtypeVIDEO) { LONG struct_size = sizeof(BITMAPINFO); stream->chunck_size = stream->info.dwSampleSize; stream->chunck_samples = 1; if (AVIStreamReadFormat(stream->handle, 0, &savedVideoFormat, &struct_size) != S_OK) continue; /* stream->info.dwRate is numerator stream->info.dwScale is denominator [?] savedVideoFormat.bmiHeader.biWidth */ /*st = avformat_new_stream(s, NULL); st->id = id; st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->r_frame_rate.num = stream->info.dwRate; st->r_frame_rate.den = stream->info.dwScale; st->codec->width = savedVideoFormat.bmiHeader.biWidth; st->codec->height = savedVideoFormat.bmiHeader.biHeight; st->codec->bits_per_coded_sample = savedVideoFormat.bmiHeader.biBitCount; st->codec->bit_rate = (uint64_t)stream->info.dwSampleSize * (uint64_t)stream->info.dwRate * 8 / (uint64_t)stream->info.dwScale; st->codec->codec_tag = savedVideoFormat.bmiHeader.biCompression; st->codec->codec_id = ff_codec_get_id(ff_codec_bmp_tags, savedVideoFormat.bmiHeader.biCompression); if (st->codec->codec_id == CODEC_ID_RAWVIDEO && savedVideoFormat.bmiHeader.biCompression== BI_RGB) { st->codec->extradata = av_malloc(9 + FF_INPUT_BUFFER_PADDING_SIZE); if (st->codec->extradata) { st->codec->extradata_size = 9; memcpy(st->codec->extradata, "BottomUp", 9); } } st->duration = stream->info.dwLength; */ } else { AVIStreamRelease(stream->handle); continue; } avs->nb_streams++; // st->codec->stream_codec_tag = stream->info.fccHandler; //avpriv_set_pts_info(st, 64, info.dwScale, info.dwRate); //st->start_time = stream->info.dwStart; // wow what is the dshow equivalent? hmm... } } } return 0; }
static int avisynth_read_header(AVFormatContext *s) { AVISynthContext *avs = s->priv_data; HRESULT res; AVIFILEINFO info; DWORD id; AVStream *st; AVISynthStream *stream; wchar_t filename_wchar[1024] = { 0 }; char filename_char[1024] = { 0 }; AVIFileInit(); /* avisynth can't accept UTF-8 filename */ MultiByteToWideChar(CP_UTF8, 0, s->filename, -1, filename_wchar, 1024); WideCharToMultiByte(CP_THREAD_ACP, 0, filename_wchar, -1, filename_char, 1024, NULL, NULL); res = AVIFileOpen(&avs->file, filename_char, OF_READ|OF_SHARE_DENY_WRITE, NULL); if (res != S_OK) { av_log(s, AV_LOG_ERROR, "AVIFileOpen failed with error %ld", res); AVIFileExit(); return -1; } res = AVIFileInfo(avs->file, &info, sizeof(info)); if (res != S_OK) { av_log(s, AV_LOG_ERROR, "AVIFileInfo failed with error %ld", res); AVIFileExit(); return -1; } avs->streams = av_mallocz(info.dwStreams * sizeof(AVISynthStream)); for (id=0; id<info.dwStreams; id++) { stream = &avs->streams[id]; stream->read = 0; if (AVIFileGetStream(avs->file, &stream->handle, 0, id) == S_OK) { if (AVIStreamInfo(stream->handle, &stream->info, sizeof(stream->info)) == S_OK) { if (stream->info.fccType == streamtypeAUDIO) { WAVEFORMATEX wvfmt; LONG struct_size = sizeof(WAVEFORMATEX); if (AVIStreamReadFormat(stream->handle, 0, &wvfmt, &struct_size) != S_OK) continue; st = avformat_new_stream(s, NULL); st->id = id; st->codec->codec_type = AVMEDIA_TYPE_AUDIO; st->codec->block_align = wvfmt.nBlockAlign; st->codec->channels = wvfmt.nChannels; st->codec->sample_rate = wvfmt.nSamplesPerSec; st->codec->bit_rate = wvfmt.nAvgBytesPerSec * 8; st->codec->bits_per_coded_sample = wvfmt.wBitsPerSample; stream->chunck_samples = wvfmt.nSamplesPerSec * (uint64_t)info.dwScale / (uint64_t)info.dwRate; stream->chunck_size = stream->chunck_samples * wvfmt.nChannels * wvfmt.wBitsPerSample / 8; st->codec->codec_tag = wvfmt.wFormatTag; st->codec->codec_id = ff_wav_codec_get_id(wvfmt.wFormatTag, st->codec->bits_per_coded_sample); } else if (stream->info.fccType == streamtypeVIDEO) { BITMAPINFO imgfmt; LONG struct_size = sizeof(BITMAPINFO); stream->chunck_size = stream->info.dwSampleSize; stream->chunck_samples = 1; if (AVIStreamReadFormat(stream->handle, 0, &imgfmt, &struct_size) != S_OK) continue; st = avformat_new_stream(s, NULL); st->id = id; st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->r_frame_rate.num = stream->info.dwRate; st->r_frame_rate.den = stream->info.dwScale; st->codec->width = imgfmt.bmiHeader.biWidth; st->codec->height = imgfmt.bmiHeader.biHeight; st->codec->bits_per_coded_sample = imgfmt.bmiHeader.biBitCount; st->codec->bit_rate = (uint64_t)stream->info.dwSampleSize * (uint64_t)stream->info.dwRate * 8 / (uint64_t)stream->info.dwScale; st->codec->codec_tag = imgfmt.bmiHeader.biCompression; st->codec->codec_id = ff_codec_get_id(ff_codec_bmp_tags, imgfmt.bmiHeader.biCompression); if (st->codec->codec_id == CODEC_ID_RAWVIDEO && imgfmt.bmiHeader.biCompression== BI_RGB) { st->codec->extradata = av_malloc(9 + FF_INPUT_BUFFER_PADDING_SIZE); if (st->codec->extradata) { st->codec->extradata_size = 9; memcpy(st->codec->extradata, "BottomUp", 9); } } st->duration = stream->info.dwLength; } else { AVIStreamRelease(stream->handle); continue; } avs->nb_streams++; st->codec->stream_codec_tag = stream->info.fccHandler; avpriv_set_pts_info(st, 64, info.dwScale, info.dwRate); st->start_time = stream->info.dwStart; } } } return 0; }
static int avisynth_read_header(AVFormatContext *s, AVFormatParameters *ap) { AVISynthContext *avs = s->priv_data; HRESULT res; AVIFILEINFO info; DWORD id; AVStream *st; AVISynthStream *stream; AVIFileInit(); res = AVIFileOpen(&avs->file, s->filename, OF_READ|OF_SHARE_DENY_WRITE, NULL); if (res != S_OK) { av_log(s, AV_LOG_ERROR, "AVIFileOpen failed with error %ld", res); AVIFileExit(); return -1; } res = AVIFileInfo(avs->file, &info, sizeof(info)); if (res != S_OK) { av_log(s, AV_LOG_ERROR, "AVIFileInfo failed with error %ld", res); AVIFileExit(); return -1; } avs->streams = av_mallocz(info.dwStreams * sizeof(AVISynthStream)); for (id=0; id<info.dwStreams; id++) { stream = &avs->streams[id]; stream->read = 0; if (AVIFileGetStream(avs->file, &stream->handle, 0, id) == S_OK) { if (AVIStreamInfo(stream->handle, &stream->info, sizeof(stream->info)) == S_OK) { if (stream->info.fccType == streamtypeAUDIO) { WAVEFORMATEX wvfmt; LONG struct_size = sizeof(WAVEFORMATEX); if (AVIStreamReadFormat(stream->handle, 0, &wvfmt, &struct_size) != S_OK) continue; st = av_new_stream(s, id); st->codec->codec_type = CODEC_TYPE_AUDIO; st->codec->block_align = wvfmt.nBlockAlign; st->codec->channels = wvfmt.nChannels; st->codec->sample_rate = wvfmt.nSamplesPerSec; st->codec->bit_rate = wvfmt.nAvgBytesPerSec * 8; st->codec->bits_per_sample = wvfmt.wBitsPerSample; stream->chunck_samples = wvfmt.nSamplesPerSec * (uint64_t)info.dwScale / (uint64_t)info.dwRate; stream->chunck_size = stream->chunck_samples * wvfmt.nChannels * wvfmt.wBitsPerSample / 8; st->codec->codec_tag = wvfmt.wFormatTag; st->codec->codec_id = wav_codec_get_id(wvfmt.wFormatTag, st->codec->bits_per_sample); } else if (stream->info.fccType == streamtypeVIDEO) { BITMAPINFO imgfmt; LONG struct_size = sizeof(BITMAPINFO); stream->chunck_size = stream->info.dwSampleSize; stream->chunck_samples = 1; if (AVIStreamReadFormat(stream->handle, 0, &imgfmt, &struct_size) != S_OK) continue; st = av_new_stream(s, id); st->codec->codec_type = CODEC_TYPE_VIDEO; st->r_frame_rate.num = stream->info.dwRate; st->r_frame_rate.den = stream->info.dwScale; st->codec->width = imgfmt.bmiHeader.biWidth; st->codec->height = imgfmt.bmiHeader.biHeight; st->codec->bits_per_sample = imgfmt.bmiHeader.biBitCount; st->codec->bit_rate = (uint64_t)stream->info.dwSampleSize * (uint64_t)stream->info.dwRate * 8 / (uint64_t)stream->info.dwScale; st->codec->codec_tag = imgfmt.bmiHeader.biCompression; st->codec->codec_id = codec_get_id(codec_bmp_tags, imgfmt.bmiHeader.biCompression); st->duration = stream->info.dwLength; } else { AVIStreamRelease(stream->handle); continue; } avs->nb_streams++; st->codec->stream_codec_tag = stream->info.fccHandler; av_set_pts_info(st, 64, info.dwScale, info.dwRate); st->start_time = stream->info.dwStart; } } } return 0; }
int imFileFormatAVI::ReadImageInfo(int index) { this->current_frame = index; if (this->frame) // frame reading already prepared return IM_ERR_NONE; /* get stream format */ LONG formsize; AVIStreamReadFormat(stream, 0, NULL, &formsize); BITMAPINFO *bmpinfo = (BITMAPINFO*)malloc(formsize); HRESULT hr = AVIStreamReadFormat(stream, 0, bmpinfo, &formsize); if (hr != 0) { free(bmpinfo); return IM_ERR_ACCESS; } int top_down = 0; if (bmpinfo->bmiHeader.biHeight < 0) top_down = 1; this->width = bmpinfo->bmiHeader.biWidth; this->height = top_down? -bmpinfo->bmiHeader.biHeight: bmpinfo->bmiHeader.biHeight; int bpp = bmpinfo->bmiHeader.biBitCount; imAttribTable* attrib_table = AttribTable(); attrib_table->Set("FPS", IM_FLOAT, 1, &fps); this->file_data_type = IM_BYTE; if (bpp > 8) { this->file_color_mode = IM_RGB; this->file_color_mode |= IM_PACKED; } else { this->palette_count = 1 << bpp; this->file_color_mode = IM_MAP; } if (bpp < 8) this->convert_bpp = bpp; if (bpp == 32) this->file_color_mode |= IM_ALPHA; if (top_down) this->file_color_mode |= IM_TOPDOWN; if (bpp <= 8) { /* updates the palette_count based on the number of colors used */ if (bmpinfo->bmiHeader.biClrUsed != 0 && (int)bmpinfo->bmiHeader.biClrUsed < this->palette_count) this->palette_count = bmpinfo->bmiHeader.biClrUsed; ReadPalette((unsigned char*)bmpinfo->bmiColors); } free(bmpinfo); this->line_buffer_extra = 4; // room enough for padding /* prepares to read data from the stream */ if (bpp == 32 || bpp == 16) { BITMAPINFOHEADER info; memset(&info, 0, sizeof(BITMAPINFOHEADER)); info.biSize = sizeof(BITMAPINFOHEADER); info.biWidth = width; info.biHeight = height; info.biPlanes = 1; info.biBitCount = (WORD)bpp; frame = AVIStreamGetFrameOpen(stream, &info); } else frame = AVIStreamGetFrameOpen(stream, NULL); if (!frame) return IM_ERR_ACCESS; return IM_ERR_NONE; }
static int startavi(struct anim *anim) { AviError avierror; #if defined(_WIN32) && !defined(FREE_WINDOWS) HRESULT hr; int i, firstvideo = -1; int streamcount; BYTE abFormat[1024]; LONG l; LPBITMAPINFOHEADER lpbi; AVISTREAMINFO avis; streamcount = anim->streamindex; #endif anim->avi = MEM_callocN(sizeof(AviMovie), "animavi"); if (anim->avi == NULL) { printf("Can't open avi: %s\n", anim->name); return -1; } avierror = AVI_open_movie(anim->name, anim->avi); #if defined(_WIN32) && !defined(FREE_WINDOWS) if (avierror == AVI_ERROR_COMPRESSION) { AVIFileInit(); hr = AVIFileOpen(&anim->pfile, anim->name, OF_READ, 0L); if (hr == 0) { anim->pfileopen = 1; for (i = 0; i < MAXNUMSTREAMS; i++) { if (AVIFileGetStream(anim->pfile, &anim->pavi[i], 0L, i) != AVIERR_OK) { break; } AVIStreamInfo(anim->pavi[i], &avis, sizeof(avis)); if ((avis.fccType == streamtypeVIDEO) && (firstvideo == -1)) { if (streamcount > 0) { streamcount--; continue; } anim->pgf = AVIStreamGetFrameOpen(anim->pavi[i], NULL); if (anim->pgf) { firstvideo = i; /* get stream length */ anim->avi->header->TotalFrames = AVIStreamLength(anim->pavi[i]); /* get information about images inside the stream */ l = sizeof(abFormat); AVIStreamReadFormat(anim->pavi[i], 0, &abFormat, &l); lpbi = (LPBITMAPINFOHEADER)abFormat; anim->avi->header->Height = lpbi->biHeight; anim->avi->header->Width = lpbi->biWidth; } else { FIXCC(avis.fccHandler); FIXCC(avis.fccType); printf("Can't find AVI decoder for type : %4.4hs/%4.4hs\n", (LPSTR)&avis.fccType, (LPSTR)&avis.fccHandler); } } } /* register number of opened avistreams */ anim->avistreams = i; /* * Couldn't get any video streams out of this file */ if ((anim->avistreams == 0) || (firstvideo == -1)) { avierror = AVI_ERROR_FORMAT; } else { avierror = AVI_ERROR_NONE; anim->firstvideo = firstvideo; } } else { AVIFileExit(); } } #endif if (avierror != AVI_ERROR_NONE) { AVI_print_error(avierror); printf("Error loading avi: %s\n", anim->name); free_anim_avi(anim); return -1; } anim->duration = anim->avi->header->TotalFrames; anim->params = NULL; anim->x = anim->avi->header->Width; anim->y = anim->avi->header->Height; anim->interlacing = 0; anim->orientation = 0; anim->framesize = anim->x * anim->y * 4; anim->curposition = 0; anim->preseek = 0; /* printf("x:%d y:%d size:%d interl:%d dur:%d\n", anim->x, anim->y, anim->framesize, anim->interlacing, anim->duration);*/ return 0; }
HRESULT CAVIFileReader::Initialize(IN char *pszFileName) { HRESULT hr = S_OK; // // would do better argument checking in a real-life application // _ASSERTE(pszFileName); // // log file name and requested wave format // LogMessage("CAVIFileReader::Initialize started. file [%s]", pszFileName); // // initialize avi file library. AVIFileExit should be called on the same // thread when AVI library is no longer needed // // AVIFileExit is called in the destructor. // // Assuption: instances of CAVIFileReader are initialized and destroyed on // the same thread // AVIFileInit(); // // open the first audio stream in the file // hr = AVIStreamOpenFromFile(&m_pAudioStream, pszFileName, streamtypeAUDIO, 0, OF_READ | OF_SHARE_DENY_WRITE, NULL); if (FAILED(hr)) { LogError("CAVIFileReader::Initialize " "Failed to open stream from the file"); m_pAudioStream = NULL; return hr; } // // read the size of the stream's format // LONG nFormatSize = 0; hr = AVIStreamReadFormat(m_pAudioStream, 0, NULL, &nFormatSize); if ( FAILED(hr) || (0 == nFormatSize) ) { LogError("CAVIFileReader::Initialize" "Failed to get stream format's size"); m_pAudioStream->Release(); m_pAudioStream = NULL; return E_FAIL; } // // allocate memory for audio format. keep it in a waveformatex structure. // if the structure returned is waveformat, still allocate waveformatex // nFormatSize = max((LONG)sizeof(WAVEFORMATEX), nFormatSize); m_pWaveFormat = (WAVEFORMATEX*)AllocateMemory(nFormatSize); if (NULL == m_pWaveFormat) { LogError("CAVIFileReader::Initialize " "Failed to allocate memory for wave format, size %ld", nFormatSize); m_pAudioStream->Release(); m_pAudioStream = NULL; return E_OUTOFMEMORY; } // // read stream format into allocated structure // hr = AVIStreamReadFormat(m_pAudioStream, 0, m_pWaveFormat, &nFormatSize); if (FAILED(hr)) { LogError("CAVIFileReader::Initialize " "Failed to read stream format"); m_pAudioStream->Release(); m_pAudioStream = NULL; FreeMemory(m_pWaveFormat); m_pWaveFormat = NULL; return hr; } // // log stream's format // LogMessage("CAVIFileReader::CAVIFileReader stream opened"); LogFormat(m_pWaveFormat); LogMessage("CAVIFileReader::CAVIFileReader finished"); return S_OK; }