static int avisynth_read_packet(AVFormatContext *s, AVPacket *pkt) { AVISynthContext *avs = s->priv_data; HRESULT res; AVISynthStream *stream; int stream_id = avs->next_stream; LONG read_size; // handle interleaving manually... stream = &avs->streams[stream_id]; if (stream->read >= stream->info.dwLength) return AVERROR(EIO); if (av_new_packet(pkt, stream->chunck_size)) return AVERROR(EIO); pkt->stream_index = stream_id; pkt->pts = avs->streams[stream_id].read / avs->streams[stream_id].chunck_samples; res = AVIStreamRead(stream->handle, stream->read, stream->chunck_samples, pkt->data, stream->chunck_size, &read_size, NULL); pkt->pts = stream->read; pkt->size = read_size; stream->read += stream->chunck_samples; // prepare for the next stream to read do { avs->next_stream = (avs->next_stream+1) % avs->nb_streams; } while (avs->next_stream != stream_id && s->streams[avs->next_stream]->discard >= AVDISCARD_ALL); return (res == S_OK) ? pkt->size : -1; }
CImage& CCamAvi::GetFrame () { m_frame.Create (m_bi_out.bmiHeader.biWidth, m_bi_out.bmiHeader.biHeight, m_bi_out.bmiHeader.biBitCount); HANDLE h = 0; IplImage* img=m_frame.GetImage(); BITMAPINFOHEADER *b; long written, s_written; long err=AVIStreamRead (m_pas, m_pos, 1, m_data, m_bi.bmiHeader.biSizeImage, &written, &s_written); m_pos=(m_pos+1)%m_length; if (m_bi.bmiHeader.biCompression!=0) { // decompress data h=ICImageDecompress (m_hic, 0, &m_bi, m_data, &m_bi_out); if( h ) { b=(BITMAPINFOHEADER*)GlobalLock(h); iplConvertFromDIB (b, img); GlobalUnlock(h); GlobalFree(h); } } else { iplConvertFromDIBSep (&m_bi.bmiHeader, (const char*) m_data, img); } return m_frame; }
void decodeFrame() { LONG a,b; HRESULT hr; hr = AVIStreamRead(stream,lastSample,1,indata,bihEncoded.biSizeImage,&a,&b); if(hr != 0) err("error in AVIStreamRead: hresult: %d",hr); if(b != 1) err("error in AVIStreamRead: did not read 1 frame, as instructed"); hr = ICDecompress(hIC,0,&bihEncoded,indata,&bihDecoded,outdata); if(hr != 0) err("error in ICDecompress: hresult: %d",hr); bDirty = true; }
sint32 AVIReadTunnelStream::Read(VDPosition lStart, long lSamples, void *lpBuffer, long cbBuffer, long *plBytes, long *plSamples) { HRESULT hr; { VDExternalCodeBracket(mpAvisynthClipInfo ? L"Avisynth" : L"An AVIFile input stream driver", __FILE__, __LINE__); hr = AVIStreamRead(pas, (LONG)lStart, lSamples, lpBuffer, cbBuffer, plBytes, plSamples); } if (mpAvisynthClipInfo) { const char *pszErr; if (mpAvisynthClipInfo->GetError(&pszErr)) throw MyError("Avisynth read error:\n%s", pszErr); } return hr; }
int avisynth_read_packet(BYTE *pData, LONG pDataSize) { HRESULT res; AVISynthStream *stream; int stream_id = avs->next_stream; assert(stream_id == 0); LONG read_size; // handle interleaving manually... stream = &avs->streams[stream_id]; // read is the pts? that always increases... //if (stream->read >= stream->info.dwLength) // return -1; // chunck_size is an avisynth thing. whoa! if(pDataSize < stream->chunck_size) assert(false); // guess we have our own concept of stream_index, maybe? // pkt->stream_index = stream_id; // is this like a pts integer or something? __int64 pts = avs->streams[stream_id].read / avs->streams[stream_id].chunck_samples; // I think this will block until data is available, or 0 for EOF [?] res = AVIStreamRead(stream->handle, stream->read, stream->chunck_samples, pData, stream->chunck_size, &read_size, NULL); __int64 size = read_size; assert(pDataSize >= read_size); stream->read += stream->chunck_samples; // I guess with avi you're just supposed to read one stream, then the next, forever? huh? /* // prepare for the next stream to read do { avs->next_stream = (avs->next_stream+1) % avs->nb_streams; } while (avs->next_stream != stream_id && s->streams[avs->next_stream]->discard >= AVDISCARD_ALL); */ return (res == S_OK) ? read_size : -1; }
HRESULT CBSoundAVI::LoadSegment(int Segment) { LPVOID Data; DWORD DataSize; HRESULT res = m_SoundBuffer->Lock(Segment*m_StreamBlockSize, m_StreamBlockSize, &Data, &DataSize, 0, 0, 0); if(FAILED(res)) return res; DWORD DataLeft = m_TotalDataLength - m_TotalDataRead; AVIStreamRead(m_AudioStream, m_TotalDataRead / m_Format.wf.nBlockAlign, min(DataLeft, DataSize)/m_Format.wf.nBlockAlign, Data, min(DataLeft, DataSize), NULL, NULL); m_TotalDataRead += min(DataLeft, DataSize); if(DataLeft<DataSize){ memset((BYTE*)Data+DataLeft, 0, DataSize-DataLeft); if(m_LastStreamBlock==-1){ m_LastStreamBlock = Segment; m_StopNow = false; } } m_SoundBuffer->Unlock(Data, DataSize, 0, 0); return DS_OK; }
// AVI_stream_get_next_frame() will take the next RLE'd AVI frame and return the // uncompressed data in the buffer pointer supplied as a parameter. The caller is // responsible for allocating the memory before-hand (the memory required is easily // calculated by looking at the w and h members in AVI_stream). // // returns: 0 ==> success // !0 ==> error // int AVI_stream_get_frame(ubyte* buffer, int frame_number) { if ( frame_number > AVI_stream.num_frames ) { buffer = NULL; return -1; } Assert( (frame_number - 1) >= 0 ); ubyte* compressed_frame = (ubyte*)malloc(AVI_stream.min_compressed_buffer_size); Assert( compressed_frame != NULL ); long num_bytes_used; long num_samples_used; AVIStreamRead( AVI_stream.pstream, frame_number-1, 1, compressed_frame, AVI_stream.min_compressed_buffer_size, &num_bytes_used, &num_samples_used); Assert(num_samples_used == 1); AVI_decompress_RLE8(compressed_frame, buffer, AVI_stream.w, AVI_stream.h); free( compressed_frame ); return 0; }
void VideoReader::ReadNextFrame(FrameData& data) { // Read the stream data using AVIStreamRead. if (m_currentSize < AVIStreamEnd(m_pAviStream)) { //Context::Oversubscribe(true); HRESULT hr = AVIStreamRead(m_pAviStream, m_currentSize, 1, (LPVOID)m_lpBuffer, m_bi.biSizeImage, NULL, NULL); data.m_BBP = m_bi.biBitCount; data.m_ColorPlanes = m_bi.biPlanes; data.m_EndHeight = m_bi.biHeight; data.m_EndWidth = m_bi.biWidth; data.m_pFrame = m_lpBuffer; data.m_Pitch = m_bi.biWidth * (m_bi.biBitCount / 8); data.m_Size = m_bi.biSizeImage; data.m_StartHeight = 0; data.m_StartWidth = 0; m_pVideoAgent->ProcessFrame(data); ++m_currentSize; } }
static LPVOID WINAPI IGetFrame_fnGetFrame(IGetFrame *iface, LONG lPos) { IGetFrameImpl *This = impl_from_IGetFrame(iface); LONG readBytes; LONG readSamples; TRACE("(%p,%d)\n", iface, lPos); /* We don't want negative start values! -- marks invalid buffer content */ if (lPos < 0) return NULL; /* check state */ if (This->pStream == NULL) return NULL; if (This->lpInFormat == NULL) return NULL; /* Could stream have changed? */ if (! This->bFixedStream) { AVISTREAMINFOW sInfo; IAVIStream_Info(This->pStream, &sInfo, sizeof(sInfo)); if (sInfo.dwEditCount != This->dwEditCount) { This->dwEditCount = sInfo.dwEditCount; This->lCurrentFrame = -1; } if (sInfo.dwFormatChangeCount != This->dwFormatChangeCount) { /* stream has changed */ if (This->lpOutFormat != NULL) { BITMAPINFOHEADER bi; bi = *This->lpOutFormat; AVIFILE_CloseCompressor(This); if (FAILED(IGetFrame_SetFormat(iface, &bi, NULL, 0, 0, -1, -1))) { if (FAILED(IGetFrame_SetFormat(iface, NULL, NULL, 0, 0, -1, -1))) return NULL; } } else if (FAILED(IGetFrame_SetFormat(iface, NULL, NULL, 0, 0, -1, -1))) return NULL; } } if (lPos != This->lCurrentFrame) { LONG lNext = IAVIStream_FindSample(This->pStream,lPos,FIND_KEY|FIND_PREV); if (lNext == -1) return NULL; /* frame doesn't exist */ if (lNext <= This->lCurrentFrame && This->lCurrentFrame < lPos) lNext = This->lCurrentFrame + 1; for (; lNext <= lPos; lNext++) { /* new format for this frame? */ if (This->bFormatChanges) { IAVIStream_ReadFormat(This->pStream, lNext, This->lpInFormat, &This->cbInFormat); if (This->lpOutFormat != NULL) { if (This->lpOutFormat->biBitCount <= 8) ICDecompressGetPalette(This->hic, This->lpInFormat, This->lpOutFormat); } } /* read input frame */ while (FAILED(AVIStreamRead(This->pStream, lNext, 1, This->lpInBuffer, This->cbInBuffer, &readBytes, &readSamples))) { /* not enough memory for input buffer? */ readBytes = 0; if (FAILED(AVIStreamSampleSize(This->pStream, lNext, &readBytes))) return NULL; /* bad thing, but bad things will happen */ if (readBytes <= 0) { ERR(": IAVIStream::Read doesn't return needed bytes!\n"); return NULL; } /* IAVIStream::Read failed because of other reasons not buffersize? */ if (This->cbInBuffer >= readBytes) break; This->cbInBuffer = This->cbInFormat + readBytes; This->lpInFormat = HeapReAlloc(GetProcessHeap(), 0, This->lpInFormat, This->cbInBuffer); if (This->lpInFormat == NULL) return NULL; /* out of memory */ This->lpInBuffer = (BYTE*)This->lpInFormat + This->cbInFormat; } if (readSamples != 1) { ERR(": no frames read\n"); return NULL; } if (readBytes != 0) { This->lpInFormat->biSizeImage = readBytes; /* nothing to decompress? */ if (This->hic == NULL) { This->lCurrentFrame = lPos; return This->lpInFormat; } if (This->bResize) { ICDecompressEx(This->hic,0,This->lpInFormat,This->lpInBuffer,0,0, This->lpInFormat->biWidth,This->lpInFormat->biHeight, This->lpOutFormat,This->lpOutBuffer,This->x,This->y, This->dx,This->dy); } else { ICDecompress(This->hic, 0, This->lpInFormat, This->lpInBuffer, This->lpOutFormat, This->lpOutBuffer); } } } /* for (lNext < lPos) */ } /* if (This->lCurrentFrame != lPos) */ return (This->hic == NULL ? This->lpInFormat : This->lpOutFormat); }
// Fill up any empty audio buffers and ship them out to the device. BOOL aviaudioiFillBuffers(void) { LONG lRead; MMRESULT mmResult; LONG lSamplesToPlay; if (!sfPlaying) return TRUE; while (swBuffersOut < swBuffers) { if (slCurrent >= slEnd) { if (sfLooping) { slCurrent = slBegin; } else { break; } } lSamplesToPlay = slEnd - slCurrent; if (lSamplesToPlay > AUDIO_BUFFER_SIZE / slSampleSize ) lSamplesToPlay = AUDIO_BUFFER_SIZE / slSampleSize ; //ErrMsg("slCurrent %ld, lSamplesToPlay %ld, toplay %ld",slCurrent, lSamplesToPlay, slEnd - slCurrent); long retval = AVIStreamRead(spavi, slCurrent, lSamplesToPlay, salpAudioBuf[swNextBuffer]->lpData, AUDIO_BUFFER_SIZE, (long *)&salpAudioBuf[swNextBuffer]->dwBufferLength, &lRead); //ErrMsg("slCurrent %ld, lSamplesToPlay %ld, toplay %ld",slCurrent, lSamplesToPlay, slEnd - slCurrent); //over here //This seems to be the condition related to the non-stopping at end of movie if ((lRead <= 0) && (lSamplesToPlay>0)) { //retry retval = AVIStreamRead(spavi, slCurrent, lSamplesToPlay, salpAudioBuf[swNextBuffer]->lpData, AUDIO_BUFFER_SIZE, (long *)&salpAudioBuf[swNextBuffer]->dwBufferLength, &lRead); if ((lRead <= 0) && (lSamplesToPlay>0)) { slCurrent += lSamplesToPlay; break; } } if (lRead != lSamplesToPlay) { if (lRead == lSamplesToPlay-1) { //do nothing...allow it to pass on } else { return FALSE; } } slCurrent += lRead; mmResult = waveOutWrite(shWaveOut, salpAudioBuf[swNextBuffer],sizeof(WAVEHDR)); if (mmResult != 0) { //::MessageBox(NULL,"Waveoutwrite problem","note",MB_OK); return FALSE; } ++swBuffersOut; ++swNextBuffer; if (swNextBuffer >= swBuffers) swNextBuffer = 0; }//while if ((swBuffersOut == 0) && (slCurrent >= slEnd)) { aviaudioStop(); } // All buffers Filled return TRUE; }
void VideoHelper::OpenVideo(CString strFilePath, FrameData& data) { AVIFileInit(); LONG hr; hr = AVIStreamOpenFromFile(&m_pAviStream, strFilePath, streamtypeVIDEO, 0, OF_READ, NULL); if (hr != 0){ // Handle failure. AfxMessageBox(L"Failed to open file."); } else { PAVIFILE pf; PAVISTREAM psSmall; HRESULT hr; AVISTREAMINFO strhdr; BITMAPINFOHEADER bi; BITMAPINFOHEADER biNew; LONG lStreamSize; LPVOID lpOld; LPVOID lpNew; // Determine the size of the format data using // AVIStreamFormatSize. AVIStreamFormatSize(m_pAviStream, 0, &lStreamSize); if (lStreamSize > sizeof(bi)) // Format too large? return; lStreamSize = sizeof(bi); hr = AVIStreamReadFormat(m_pAviStream, 0, &bi, &lStreamSize); // Read format if (bi.biCompression != BI_RGB) // Wrong compression format? return; hr = AVIStreamInfo(m_pAviStream, &strhdr, sizeof(strhdr)); // Create new AVI file using AVIFileOpen. hr = AVIFileOpen(&pf, strFilePath + L".Processed.avi", OF_WRITE | OF_CREATE, NULL); if (hr != 0) return; // Set parameters for the new stream. biNew = bi; SetRect(&strhdr.rcFrame, 0, 0, (int) biNew.biWidth, (int) biNew.biHeight); // Create a stream using AVIFileCreateStream. hr = AVIFileCreateStream(pf, &psSmall, &strhdr); if (hr != 0) { //Stream created OK? If not, close file. AVIFileRelease(pf); return; } // Set format of new stream using AVIStreamSetFormat. hr = AVIStreamSetFormat(psSmall, 0, &biNew, sizeof(biNew)); if (hr != 0) { AVIStreamRelease(psSmall); AVIFileRelease(pf); return; } // Allocate memory for the bitmaps. lpOld = malloc(bi.biSizeImage); // Read the stream data using AVIStreamRead. for (lStreamSize = AVIStreamStart(m_pAviStream); lStreamSize < AVIStreamEnd(m_pAviStream)/*1500*/; lStreamSize++) { //Context::Oversubscribe(true); hr = AVIStreamRead(m_pAviStream, lStreamSize, 1, lpOld, bi.biSizeImage, NULL, NULL); //Context::Oversubscribe(false); //memcpy_s(lpNew, bi.biSizeImage, lpOld, bi.biSizeImage); data.m_BBP = bi.biBitCount; data.m_ColorPlanes = bi.biPlanes; data.m_EndHeight = bi.biHeight; data.m_EndWidth = bi.biWidth; data.m_pFrame = (BYTE*)lpOld; data.m_Pitch = bi.biWidth * (bi.biBitCount / 8); data.m_Size = bi.biSizeImage; data.m_StartHeight = 0; data.m_StartWidth = 0; lpNew = m_pVideoAgent->ProcessFrame(data); if(NULL != lpNew) { // Save the compressed data using AVIStreamWrite. hr = AVIStreamWrite(psSmall, lStreamSize, 1, lpNew, biNew.biSizeImage, AVIIF_KEYFRAME, NULL, NULL); } } free(lpOld); // Close the stream and file. AVIStreamRelease(psSmall); AVIFileRelease(pf); } AVIFileExit(); }
bool CMusic::loadFromVideo(const CString& fileName) { m_loaded = false; m_fileName = fileName; m_file = nullptr; m_fromVideo = true; #ifdef T_SYSTEM_WINDOWS m_sampleCount = 0; ALenum error = alGetError(); CApplication::getApp()->log(CString::fromUTF8("Chargement de la musique de la vidéo %1").arg(m_fileName)); // Ouverture du flux audio if (AVIStreamOpenFromFile(&m_aviStream, m_fileName.toCharArray(), streamtypeAUDIO, 0, OF_READ, nullptr)) { CApplication::getApp()->log("AVIStreamOpenFromFile : impossible de lire le flux audio", ILogger::Error); return false; } LONG buffer_size; AVIStreamRead(m_aviStream, AVIStreamStart(m_aviStream), (-1L), nullptr, 0, &buffer_size, nullptr); PBYTE tmp_format = new BYTE[buffer_size]; AVIStreamReadFormat(m_aviStream, AVIStreamStart(m_aviStream), tmp_format, &buffer_size); LPWAVEFORMATEX wave_format = reinterpret_cast<LPWAVEFORMATEX>(tmp_format); // Lecture du nombre d'échantillons et du taux d'échantillonnage m_nbrSamples = AVIStreamLength(m_aviStream); m_sampleRate = wave_format->nSamplesPerSec; // Détermination du format en fonction du nombre de canaux switch (wave_format->nChannels) { case 1: m_format = AL_FORMAT_MONO16; break; case 2: m_format = AL_FORMAT_STEREO16; break; case 4: if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS")) { return false; } m_format = alGetEnumValue("AL_FORMAT_QUAD16"); break; case 6: if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS")) { return false; } m_format = alGetEnumValue("AL_FORMAT_51CHN16"); break; case 7: if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS")) { return false; } m_format = alGetEnumValue("AL_FORMAT_61CHN16"); break; case 8: if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS")) { return false; } m_format = alGetEnumValue("AL_FORMAT_71CHN16"); break; default: return false; } // Création des buffers OpenAL if (m_buffer[0] == 0 || m_buffer[1] == 0) { alGenBuffers(2, m_buffer); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alGenBuffers", __LINE__); return false; } // Les buffers sont invalides if (m_buffer[0] == 0 || m_buffer[1] == 0) { CApplication::getApp()->log("Les buffers audio sont invalides", ILogger::Error); return false; } } // Création d'une source if (m_source == 0) { alGenSources(1, &m_source); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alGenSources", __LINE__); return false; } // La source est invalide if (m_source == 0) { CApplication::getApp()->log("La source audio est invalide", ILogger::Error); return false; } } // On remplit les deux buffers readData(m_buffer[0], 44100); readData(m_buffer[1], 44100); // Remplissage avec les échantillons lus alSourceQueueBuffers(m_source, 2, m_buffer); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alSourceQueueBuffers", __LINE__); return false; } // Paramètres de la source alSourcei(m_source, AL_LOOPING, false); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alGetSourcei", __LINE__); } alSourcef(m_source, AL_PITCH, 1.0f); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alSourcef", __LINE__); } alSourcef(m_source, AL_GAIN, 1.0f); // Traitement des erreurs if ((error = alGetError() ) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alSourcef", __LINE__); } alSource3f(m_source, AL_POSITION, 0.0f, 0.0f, 0.0f); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alSource3f", __LINE__); } m_loaded = true; #endif return true; }
void CRotateAVIDlg::ProcessAVI(const TCHAR *source_filename, const TCHAR *dest_filename, eRotation rot) { TCHAR error_buf[1024]; PAVIFILE source_avi = 0; PAVIFILE dest_avi = 0; PAVISTREAM pSrcVidStream = 0; PAVISTREAM pSrcAudioStream = 0; PAVISTREAM pDestVidStream = 0; PAVISTREAM pDestAudioStream = 0; char *pSrcBuffer = 0; char *pJPGBuffer = 0; char *pDecompBuffer = 0; char *pRotateBuffer = 0; char *pDestBuffer = 0; AVIFileInit(); // source setup if (AVIFileOpen(&source_avi, source_filename, OF_READ, NULL) != AVIERR_OK) { _stprintf(error_buf, TEXT("Couldn't open file %s"), source_filename); MessageBox(error_buf); goto cleanup; } AVIFILEINFO src_avi_info; AVIFileInfo(source_avi, &src_avi_info, sizeof(AVIFILEINFO)); if (AVIFileGetStream(source_avi, &pSrcVidStream, streamtypeVIDEO, 0) != AVIERR_OK) { _stprintf(error_buf, TEXT("No video stream in %s"), source_filename); MessageBox(error_buf); goto cleanup; } BITMAPINFOHEADER srcBIH; long srcvidstreamsize; AVIStreamFormatSize(pSrcVidStream, 0, &srcvidstreamsize); if (srcvidstreamsize > sizeof(BITMAPINFOHEADER)) { _stprintf(error_buf, TEXT("Unable to handle video stream format in %s"), source_filename); MessageBox(error_buf); goto cleanup; } srcvidstreamsize = sizeof(BITMAPINFOHEADER); if (AVIStreamReadFormat(pSrcVidStream, 0, &srcBIH, &srcvidstreamsize) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error reading stream format in %s"), source_filename); MessageBox(error_buf); goto cleanup; } if (srcBIH.biCompression != MKFOURCC('M','J','P','G')) { _stprintf(error_buf, TEXT("%s is not motion JPEG format"), source_filename); MessageBox(error_buf); goto cleanup; } AVISTREAMINFO vidstream_info; if (AVIStreamInfo(pSrcVidStream, &vidstream_info, sizeof(AVISTREAMINFO)) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error reading stream info in %s"), source_filename); MessageBox(error_buf); goto cleanup; } int firstVidSrcFrame = AVIStreamStart(pSrcVidStream); if (firstVidSrcFrame == -1) { _stprintf(error_buf, TEXT("Video stream start error in %s"), source_filename); MessageBox(error_buf); goto cleanup; } int numVidSrcFrames = AVIStreamLength(pSrcVidStream); if (numVidSrcFrames == -1) { _stprintf(error_buf, TEXT("Video stream length error in %s"), source_filename); MessageBox(error_buf); goto cleanup; } AVIFileGetStream(source_avi, &pSrcAudioStream, streamtypeAUDIO, 0); int firstAudioSrcFrame = 0; int numAudioSrcFrames = 0; if (pSrcAudioStream) { firstAudioSrcFrame = AVIStreamStart(pSrcAudioStream); if (firstAudioSrcFrame == -1) { _stprintf(error_buf, TEXT("Audio stream start error in %s"), source_filename); MessageBox(error_buf); goto cleanup; } numAudioSrcFrames = AVIStreamLength(pSrcAudioStream); if (numAudioSrcFrames == -1) { _stprintf(error_buf, TEXT("Audio stream length error in %s"), source_filename); MessageBox(error_buf); goto cleanup; } } // dest setup BITMAPINFOHEADER destBIH; destBIH = srcBIH; if (rot != CW_180) { destBIH.biWidth = srcBIH.biHeight; destBIH.biHeight = srcBIH.biWidth; } if (AVIFileOpen(&dest_avi, dest_filename, OF_CREATE | OF_WRITE, NULL) != AVIERR_OK) { _stprintf(error_buf, TEXT("Couldn't open file %s"), dest_filename); MessageBox(error_buf); goto cleanup; } vidstream_info.rcFrame.left = vidstream_info.rcFrame.top = 0; vidstream_info.rcFrame.right = destBIH.biWidth; vidstream_info.rcFrame.bottom = destBIH.biHeight; if (AVIFileCreateStream(dest_avi, &pDestVidStream, &vidstream_info) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error creating video stream in %s"), dest_filename); MessageBox(error_buf); goto cleanup; } if (AVIStreamSetFormat(pDestVidStream, 0, &destBIH, sizeof(BITMAPINFOHEADER)) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error setting video stream format in %s"), dest_filename); MessageBox(error_buf); goto cleanup; } if (AVIStreamSetFormat(pDestVidStream, 0, &destBIH, sizeof(BITMAPINFOHEADER)) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error setting video stream format in %s"), dest_filename); MessageBox(error_buf); goto cleanup; } // video memory int img_rgb_size = srcBIH.biHeight * srcBIH.biWidth * 3; pSrcBuffer = new char[img_rgb_size]; pJPGBuffer = new char[img_rgb_size]; pDecompBuffer = new char[img_rgb_size]; pRotateBuffer = new char[img_rgb_size]; pDestBuffer = new char[img_rgb_size]; long bytes_read; long bytes_written; for (int i = firstVidSrcFrame; i < numVidSrcFrames; ++i) { if (AVIStreamRead(pSrcVidStream, i, 1, pSrcBuffer, img_rgb_size, &bytes_read, 0) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error reading video stream from %s"), source_filename); MessageBox(error_buf); goto cleanup; } // well-form the jpg int jpglen = ConstructWellFormedJPEG(pSrcBuffer, pJPGBuffer, bytes_read); // decompress JPEGHandler jpgh_decomp(pJPGBuffer, jpglen); jpgh_decomp.DecompressToRGB(pDecompBuffer, img_rgb_size); // rotate int destx, desty; char *pRotSrc; char *pRotDest; switch (rot) { case CW_90: for (int srcy = 0; srcy < srcBIH.biHeight; ++srcy) { for (int srcx = 0; srcx < srcBIH.biWidth; ++srcx) { destx = srcBIH.biHeight-1-srcy; desty = srcx; pRotSrc = &pDecompBuffer[(srcy * srcBIH.biWidth + srcx) * 3]; pRotDest = &pRotateBuffer[(desty * srcBIH.biHeight + destx) * 3]; *pRotDest++ = *pRotSrc++; *pRotDest++ = *pRotSrc++; *pRotDest++ = *pRotSrc++; } } break; case CW_180: for (int srcy = 0; srcy < srcBIH.biHeight; ++srcy) { for (int srcx = 0; srcx < srcBIH.biWidth; ++srcx) { destx = srcBIH.biWidth-1-srcx; desty = srcBIH.biHeight-1-srcy; pRotSrc = &pDecompBuffer[(srcy * srcBIH.biWidth + srcx) * 3]; pRotDest = &pRotateBuffer[(desty * srcBIH.biWidth + destx) * 3]; *pRotDest++ = *pRotSrc++; *pRotDest++ = *pRotSrc++; *pRotDest++ = *pRotSrc++; } } break; case ACW_90: for (int srcy = 0; srcy < srcBIH.biHeight; ++srcy) { for (int srcx = 0; srcx < srcBIH.biWidth; ++srcx) { destx = srcy; desty = srcBIH.biWidth-1-srcx; pRotSrc = &pDecompBuffer[(srcy * srcBIH.biWidth + srcx) * 3]; pRotDest = &pRotateBuffer[(desty * srcBIH.biHeight + destx) * 3]; *pRotDest++ = *pRotSrc++; *pRotDest++ = *pRotSrc++; *pRotDest++ = *pRotSrc++; } } break; } // compress JPEGHandler jpgh_comp(pRotateBuffer, img_rgb_size); if (rot != CW_180) destBIH.biSizeImage = jpgh_comp.CompressFromRGB(pDestBuffer, img_rgb_size, srcBIH.biHeight, srcBIH.biWidth); else destBIH.biSizeImage = jpgh_comp.CompressFromRGB(pDestBuffer, img_rgb_size, srcBIH.biWidth, srcBIH.biHeight); if (AVIStreamWrite(pDestVidStream, i, 1, pDestBuffer, destBIH.biSizeImage, AVIIF_KEYFRAME, NULL, &bytes_written) != AVIERR_OK) { _stprintf(error_buf, TEXT("Error writing video stream to %s"), dest_filename); MessageBox(error_buf); goto cleanup; } } cleanup: delete[] pSrcBuffer; delete[] pDestBuffer; delete[] pJPGBuffer; delete[] pDecompBuffer; delete[] pRotateBuffer; if (pDestAudioStream) AVIStreamRelease(pDestAudioStream); if (pDestVidStream) AVIStreamRelease(pDestVidStream); if (pSrcAudioStream) AVIStreamRelease(pSrcAudioStream); if (pSrcVidStream) AVIStreamRelease(pSrcVidStream); if (dest_avi) AVIFileRelease(dest_avi); if (source_avi) AVIFileRelease(source_avi); AVIFileExit(); }
HRESULT CAviBitmap::GetAllFrames(LPCTSTR lpszFolderName) { if(m_pGetFrame == NULL) { m_szLastErrorMsg.Format(_T("Not initialized yet")); return E_FAIL; } HRESULT hr = S_OK; int nBmpInfoHdrSize = sizeof(BITMAPINFO) + sizeof(RGBQUAD) * 256; BITMAPINFOHEADER* lpBmpInfoHdr = (BITMAPINFOHEADER*)(new BYTE[nBmpInfoHdrSize]); LONG lpcbFormat = nBmpInfoHdrSize; BYTE* lpDib = NULL; BYTE* lpBuffer = NULL; LONG lBytes = 0, lSamples = 0; BOOL bReadRaw = FALSE; int nPos = 0; int nSampleCount = min(m_lSampleCount, 101); for(nPos = 0; nPos < nSampleCount; nPos++) { //Get the frame format hr = AVIStreamReadFormat(m_pAviStream, nPos, lpBmpInfoHdr, &lpcbFormat); if(hr != S_OK) { m_szLastErrorMsg.Format(_T("Unable to Get the sample format: %d"), nPos); break; } lpBuffer = NULL; //Try to read raw data when the bitmap is BI_RGB if(lpBmpInfoHdr->biCompression == BI_RGB && (lpBmpInfoHdr->biBitCount == 24 || lpBmpInfoHdr->biBitCount == 32)) { //Get the frame data lpBuffer = new BYTE[m_biWanted.biSizeImage]; hr = AVIStreamRead(m_pAviStream, nPos, 1, lpBuffer, m_biWanted.biSizeImage, &lBytes, &lSamples); if(hr != S_OK) { m_szLastErrorMsg.Format(_T("Unable to Get the sample data: %d"), nPos); break; } } else { CString szFourCC; FourCC2Str(m_aviInfo.fccHandler, szFourCC); AfxTrace(_T("Non-RGB format at frame(%03d)=%s, 0x%08X\n"), nPos, szFourCC, lpBmpInfoHdr->biCompression); } //Get the frame at nPos lpDib = (BYTE*)AVIStreamGetFrame(m_pGetFrame, nPos); if(lpDib == NULL) { m_szLastErrorMsg.Format(_T("Unable to Get the sample: %d"), nPos); hr = E_FAIL; break; } //compare the data retrieved in 2 ways if needed if(lpBuffer != NULL) { if(memcmp(lpBuffer, lpDib + sizeof(BITMAPINFOHEADER), lpBmpInfoHdr->biSizeImage) != 0) { m_szLastErrorMsg.Format(_T("not equals: %d"), nPos); hr = E_FAIL; break; } } CString szFileName; if(lpszFolderName == NULL) { szFileName.Format(_T(".\\Frame%03d.bmp"), nPos); } else { szFileName.Format(_T("%s\\Frame%03d.bmp"), lpszFolderName, nPos); } BITMAPINFOHEADER* pTemp = (BITMAPINFOHEADER*)lpDib; // hr = SaveBitmap(lpBmpInfoHdr, lpBuffer, lpBmpInfoHdr->biSizeImage, szFileName); hr = SaveBitmap(&m_biWanted, lpDib + sizeof(BITMAPINFOHEADER), m_biWanted.biSizeImage, szFileName); if(lpBuffer != NULL) { delete [] lpBuffer; lpBuffer = NULL; } //Done } if(lpBuffer != NULL) { delete [] lpBuffer; lpBuffer = NULL; } if(lpBmpInfoHdr != NULL) { delete [] lpBmpInfoHdr; lpBmpInfoHdr = NULL; } ReleaseMemory(); return hr; }
bool CMusic::readData(ALuint buffer, ALsizei nbr_samples) { ALenum error = alGetError(); // Lecture des échantillons audio au format entier 16 bits signé std::vector<ALshort> samples(nbr_samples); // Lecture depuis un fichier if (m_file) { // On remplit le premier buffer if (sf_read_short(m_file, &samples[0], nbr_samples) == 0) { // Fin du fichier, on revient au début if (m_loop) { sf_seek(m_file, 0, SEEK_SET); #ifdef T_SYSTEM_WINDOWS m_sampleCount = 0; #endif } else { m_play = false; } return true; } // On le remplit avec les données du fichier alBufferData(buffer, m_format, &samples[0], nbr_samples * sizeof(ALushort), m_sampleRate); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alBufferData", __LINE__); return false; } return true; } #ifdef T_SYSTEM_WINDOWS // Lecture depuis une vidéo else if (m_fromVideo && m_aviStream) { LONG samples_read; switch (AVIStreamRead(m_aviStream, AVIStreamStart(m_aviStream) + m_sampleCount, nbr_samples, &samples[0], nbr_samples * sizeof(ALshort), nullptr, &samples_read)) { // Aucune erreur case 0: break; case AVIERR_BUFFERTOOSMALL: CApplication::getApp()->log("AVIStreamRead : The buffer size was smaller than a single sample of data.", ILogger::Error); return false; case AVIERR_MEMORY: CApplication::getApp()->log("AVIStreamRead : There was not enough memory to complete the read operation.", ILogger::Error); return false; case AVIERR_FILEREAD: CApplication::getApp()->log("AVIStreamRead : A disk error occurred while reading the file.", ILogger::Error); return false; default: CApplication::getApp()->log("AVIStreamRead : Unknown error", ILogger::Error); return false; } m_sampleCount += samples_read / 2; // On divise par la taille d'un échantillon // Le nombre d'échantillons lus est inférieur au nombre demandé : on revient au début //TODO: vérifier pour chaque format possible if ((m_format == AL_FORMAT_MONO16 && samples_read < nbr_samples) || (m_format == AL_FORMAT_STEREO16 && samples_read * 2 < nbr_samples)) { if (m_loop) { m_sampleCount = 0; } else { m_play = false; } } // On le remplit avec les données du fichier alBufferData(buffer, m_format, &samples[0], samples_read * sizeof(ALushort), m_sampleRate); // Traitement des erreurs if ((error = alGetError()) != AL_NO_ERROR) { CSoundEngine::displayOpenALError(error, "alBufferData", __LINE__); return false; } return true; } #endif return false; }
void PaintAudio( HDC hdc, PRECT prc, PAVISTREAM pavi, LONG lStart, LONG lLen) { #ifndef INTERIM_64_BIT // CCJ LPVOID lpAudio=NULL; PCMWAVEFORMAT wf; int i; int x,y; int w,h; BYTE b; HBRUSH hbr; RECT rc = *prc; LONG lBytes; LONG l, lLenOrig = lLen; LONG lWaveBeginTime = AVIStreamStartTime(pavi); LONG lWaveEndTime = AVIStreamEndTime(pavi); // // We can't draw before the beginning of the stream - adjust // if (lStart < lWaveBeginTime) { lLen -= lWaveBeginTime - lStart; lStart = lWaveBeginTime; // right justify the legal samples in the rectangle - don't stretch rc.left = rc.right - (int)muldiv32(rc.right - rc.left, lLen, lLenOrig); } // // We can't draw past the end of the stream // if (lStart + lLen > lWaveEndTime) { lLenOrig = lLen; lLen = max(0, lWaveEndTime - lStart); // maybe nothing to draw! // left justify the legal samples in the rectangle - don't stretch rc.right = rc.left + (int)muldiv32(rc.right - rc.left, lLen, lLenOrig); } // Now start working with samples, not time l = lStart; lStart = AVIStreamTimeToSample(pavi, lStart); lLen = AVIStreamTimeToSample(pavi, l + lLen) - lStart; // // Get the format of the wave data // l = sizeof(wf); AVIStreamReadFormat(pavi, lStart, &wf, &l); if (!l) return; w = rc.right - rc.left; h = rc.bottom - rc.top; // // We were starting before the beginning or continuing past the end. // We're not painting in the whole original rect --- use a dark background // if (rc.left > prc->left) { SelectObject(hdc, GetStockObject(DKGRAY_BRUSH)); PatBlt(hdc, prc->left, rc.top, rc.left - prc->left, rc.bottom - rc.top, PATCOPY); } if (rc.right < prc->right) { SelectObject(hdc, GetStockObject(DKGRAY_BRUSH)); PatBlt(hdc, rc.right, rc.top, prc->right - rc.right, rc.bottom - rc.top, PATCOPY); } #define BACKBRUSH (GetSysColor(COLOR_BTNFACE)) // background #define MONOBRUSH (GetSysColor(COLOR_BTNSHADOW)) // for mono audio #define LEFTBRUSH (RGB(0,0,255)) // left channel #define RIGHTBRUSH (RGB(0,255,0)) // right channel #define HPOSBRUSH (RGB(255,0,0)) // current position // // Paint the background // hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(BACKBRUSH)); PatBlt(hdc, rc.left, rc.top, w, h, PATCOPY); DeleteObject(SelectObject(hdc, hbr)); // // !!! we can only paint PCM data right now. Sorry! // if (wf.wf.wFormatTag != WAVE_FORMAT_PCM) return; // // How many bytes are we painting? Alloc some space for them // lBytes = lLen * wf.wf.nChannels * wf.wBitsPerSample / 8; if (!lpAudio) lpAudio = GlobalAllocPtr (GHND, lBytes); else if ((LONG)GlobalSizePtr(lpAudio) < lBytes) lpAudio = GlobalReAllocPtr(lpAudio, lBytes, GMEM_MOVEABLE); if (!lpAudio) return; // // Read in the wave data // AVIStreamRead(pavi, lStart, lLen, lpAudio, lBytes, NULL, &l); if (l != lLen) return; #define MulDiv(a,b,c) (UINT)((DWORD)(UINT)(a) * (DWORD)(UINT)(b) / (UINT)(c)) // // !!! Flickers less painting it NOW or LATER? // First show the current position as a bar // //hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(HPOSBRUSH)); //PatBlt(hdc, prc->right / 2, prc->top, 1, prc->bottom - prc->top, PATCOPY); //DeleteObject(SelectObject(hdc, hbr)); // // Paint monochrome wave data // if (wf.wf.nChannels == 1) { // // Draw the x-axis // hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(MONOBRUSH)); y = rc.top + h/2; PatBlt(hdc, rc.left, y, w, 1, PATCOPY); // // 8 bit data is centred around 0x80 // if (wf.wBitsPerSample == 8) { for (x=0; x<w; x++) { // which byte of audio data belongs at this pixel? b = *((HPBYTE)lpAudio + muldiv32(x, lLen, w)); if (b > 0x80) { i = y - MulDiv(b - 0x80, (h / 2), 128); PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY); } else { i = y + MulDiv(0x80 - b, (h / 2), 128); PatBlt(hdc, rc.left + x, y, 1, i - y, PATCOPY); } } } // // 16 bit data is centred around 0x00 // else if (wf.wBitsPerSample == 16) { for (x=0; x<w; x++) { // which byte of audio data belongs at this pixel? i = *((HPINT)lpAudio + muldiv32(x,lLen,w)); if (i > 0) { i = y - (int) ((LONG)i * (h/2) / 32768); PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY); } else { i = (int) ((LONG)i * (h/2) / 32768); PatBlt(hdc, rc.left+x, y, 1, -i, PATCOPY); } } } DeleteObject(SelectObject(hdc, hbr)); } // endif mono // // Draw stereo waveform data // else if (wf.wf.nChannels == 2) { // // 8 bit data is centred around 0x80 // if (wf.wBitsPerSample == 8) { // Left channel hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(LEFTBRUSH)); y = rc.top + h/4; PatBlt(hdc, rc.left, y, w, 1, PATCOPY); for (x=0; x<w; x++) { b = *((HPBYTE)lpAudio + muldiv32(x,lLen,w) * 2); if (b > 0x80) { i = y - MulDiv(b-0x80,(h/4),128); PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY); } else { i = y + MulDiv(0x80-b,(h/4),128); PatBlt(hdc, rc.left+x, y, 1, i-y, PATCOPY); } } DeleteObject(SelectObject(hdc, hbr)); // Right channel hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(RIGHTBRUSH)); y = rc.top + h * 3 / 4; PatBlt(hdc, rc.left, y, w, 1, PATCOPY); for (x=0; x<w; x++) { b = *((HPBYTE)lpAudio + muldiv32(x,lLen,w) * 2 + 1); if (b > 0x80) { i = y - MulDiv(b-0x80,(h/4),128); PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY); } else { i = y + MulDiv(0x80-b,(h/4),128); PatBlt(hdc, rc.left+x, y, 1, i-y, PATCOPY); } } DeleteObject(SelectObject(hdc, hbr)); } // // 16 bit data is centred around 0x00 // else if (wf.wBitsPerSample == 16) { // Left channel hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(LEFTBRUSH)); y = rc.top + h/4; PatBlt(hdc, rc.left, y, w, 1, PATCOPY); for (x=0; x<w; x++) { i = *((HPINT)lpAudio + muldiv32(x,lLen,w) * 2); if (i > 0) { i = y - (int) ((LONG)i * (h/4) / 32768); PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY); } else { i = (int) ((LONG)i * (h/4) / 32768); PatBlt(hdc, rc.left+x, y, 1, -i, PATCOPY); } } DeleteObject(SelectObject(hdc, hbr)); // Right channel hbr = (HBRUSH)SelectObject(hdc, CreateSolidBrush(RIGHTBRUSH)); y = rc.top + h * 3 / 4; PatBlt(hdc, rc.left, y, w, 1, PATCOPY); for (x=0; x<w; x++) { i = *((HPINT)lpAudio + muldiv32(x,lLen,w) * 2 + 1); if (i > 0) { i = y - (int) ((LONG)i * (h/4) / 32768); PatBlt(hdc, rc.left+x, i, 1, y-i, PATCOPY); } else { i = (int) ((LONG)i * (h/4) / 32768); PatBlt(hdc, rc.left+x, y, 1, -i, PATCOPY); } } DeleteObject(SelectObject(hdc, hbr)); } } // endif stereo if (lpAudio) { GlobalFreePtr(lpAudio); lpAudio = NULL; } #endif // INTERIM_64_BIT }
FilteredWave::FilteredWave(PAVISTREAM pavi,int resolution) { #ifndef INTERIM_64_BIT // CCJ PCMWAVEFORMAT wf; LPVOID lpAudio=NULL; LONG l, lBytes; DWORD *ptr, sum[2], pos, v; LONG s = AVIStreamStartTime(pavi); LONG e = AVIStreamEndTime(pavi); LONG len = e-s; LONG sstart = AVIStreamTimeToSample(pavi,s); LONG send = AVIStreamTimeToSample(pavi,e); LONG slen = send-sstart; HPBYTE bptr; HPINT iptr; int in, b, av[2]; int j,k; DWORD i; sat = NULL; satLen = 0; start = (s*TIME_TICKSPERSEC)/1000; end = (e*TIME_TICKSPERSEC)/1000; l = sizeof(wf); AVIStreamReadFormat(pavi,0,&wf,&l); if (!l) { return; } if (wf.wf.wFormatTag != WAVE_FORMAT_PCM) { return; } lBytes = slen * wf.wf.nChannels * wf.wBitsPerSample/8; lpAudio = GlobalAllocPtr(GHND,lBytes); if (!lpAudio) { return; } AVIStreamRead(pavi,sstart,slen,lpAudio,lBytes,NULL,&l); if (l != slen) { GlobalFreePtr(lpAudio); return; } satLen = (len*resolution)/(1000); numSum = slen/satLen; satLen = slen/numSum; sat = new DWORD[satLen * wf.wf.nChannels]; if (!sat) { GlobalFreePtr(lpAudio); return; } channels = wf.wf.nChannels; ptr = sat; pos = 0; // // First find the average value // av[0] = av[1] = 0; iptr = (HPINT)lpAudio; bptr = (HPBYTE)lpAudio; for (i=0; i<(DWORD)slen; i++) { if (wf.wBitsPerSample==8) { for (j=0;j<channels;j++) { av[j] += *bptr++ - 0x80; } } else { for (j=0;j<channels;j++) { av[j] += *iptr++; } } } for (j=0;j<channels;j++) { av[j] /= slen; } // // Now build the SAT moving the average to 0. // sum[0] = sum[1] = 0; iptr = (HPINT)lpAudio; bptr = (HPBYTE)lpAudio; for (i=0; i<satLen; i++) { for (k=0; k<numSum; k++,pos++) { if (wf.wBitsPerSample==8) { for (j=0;j<channels;j++) { b = *bptr++ - av[j]; if (b > 0x80) { v = (b - 0x80) * 256; } else { v = (0x80 - b) * 256; } sum[j] += v; } } else { for (j=0;j<channels;j++) { in = *iptr++ - av[j]; if (in<0) in = -in; sum[j] += in; } } } for (j=0;j<channels;j++) { *ptr++ = sum[j]; } } // Find the max max[0] = max[1] = 0; for (i=1; i<satLen; i++) { for (j=0;j<channels;j++) { v = (sat[i*channels+j]-sat[(i-1)*channels+j])/numSum; if (v>max[j]) max[j] = v; } } GlobalFreePtr(lpAudio); #endif // INTERIM_64_BIT }
void audioReadChunk() { LONG a,b; AVIStreamRead(audioStream,encodeCursor,audioChunkSize,audioEncoded,audioChunkSize,&a,&b); encodeCursor += b; }
// Fill up any empty audio buffers and ship them out to the device. BOOL aviaudioiFillBuffers(void) { LONG lRead; MMRESULT mmResult; LONG lSamplesToPlay; if (!sfPlaying) return TRUE; while (swBuffersOut < swBuffers) { if (slCurrent >= slEnd) { if (sfLooping) { slCurrent = slBegin; } else break; } lSamplesToPlay = slEnd - slCurrent; if (lSamplesToPlay > AUDIO_BUFFER_SIZE / slSampleSize ) lSamplesToPlay = AUDIO_BUFFER_SIZE / slSampleSize ; //ErrMsg("slCurrent %ld, lSamplesToPlay %ld, toplay %ld swBuffers %ld, swBuffersOut %d,swNextBuffer %d",slCurrent, lSamplesToPlay, slEnd - slCurrent,swBuffers,swBuffersOut,swNextBuffer); /* //this line cause the sond playing to crash... { void* buffer = malloc(AUDIO_BUFFER_SIZE); long retlen = 0; long retval = AVIStreamRead(spavi, slCurrent, lSamplesToPlay, buffer, AUDIO_BUFFER_SIZE, &retlen, &lRead); if (buffer) free(buffer); } ErrMsg("before"); */ //long retval; //if ((swNextBuffer>=0) && (swNextBuffer<MAX_AUDIO_BUFFERS)) //{ // if (salpAudioBuf[swNextBuffer]->lpData) { //if (sizeof(salpAudioBuf[swNextBuffer]->lpData) >= AUDIO_BUFFER_SIZE) //{ //ErrMsg("Correct Size %ld",sizeof(*salpAudioBuf[swNextBuffer]->lpData)); long retval = AVIStreamRead(spavi, slCurrent, lSamplesToPlay, salpAudioBuf[swNextBuffer]->lpData, AUDIO_BUFFER_SIZE, (long *)&salpAudioBuf[swNextBuffer]->dwBufferLength, &lRead); //} //else // ErrMsg("Size %ld",sizeof(*salpAudioBuf[swNextBuffer]->lpData)); //} //else // ErrMsg("2"); //} //else // ErrMsg("3"); //ErrMsg("after..."); //ErrMsg("slCurrent %ld, lSamplesToPlay %ld, toplay %ld",slCurrent, lSamplesToPlay, slEnd - slCurrent); //over here //This seems to be the condition related to the non-stopping at end of movie if ((lRead <= 0) && (lSamplesToPlay>0)) { //retry retval = AVIStreamRead(spavi, slCurrent, lSamplesToPlay, salpAudioBuf[swNextBuffer]->lpData, AUDIO_BUFFER_SIZE, (long *)&salpAudioBuf[swNextBuffer]->dwBufferLength, &lRead); if ((lRead <= 0) && (lSamplesToPlay>0)) { //if (retval == AVIERR_FILEREAD) { slCurrent += lSamplesToPlay; break; //} //else return FALSE; } } if (lRead != lSamplesToPlay) { if (lRead == lSamplesToPlay-1) { //do nothing...allow it to pass on } else { return FALSE; } } slCurrent += lRead; mmResult = waveOutWrite(shWaveOut, salpAudioBuf[swNextBuffer],sizeof(WAVEHDR)); if (mmResult != 0) { //::MessageBox(NULL,"Waveoutwrite problem","note",MB_OK); return FALSE; } ++swBuffersOut; ++swNextBuffer; if (swNextBuffer >= swBuffers) swNextBuffer = 0; }//while if ((swBuffersOut == 0) && (slCurrent >= slEnd)) { aviaudioStop(); } // All buffers Filled return TRUE; }
bool VideoComponent::openAvi(const std::string& filename) { // Stop any currently loaded avi closeAvi(); AVIFileInit(); // Opens The AVIFile Library // Opens The AVI Stream if (AVIStreamOpenFromFile(&m_pavi, filename.c_str(), streamtypeVIDEO, 0, OF_READ, NULL) !=0) { GameLog::errorMessage("Error opening avi: %s", filename.c_str()); // An Error Occurred Opening The Stream AVIFileExit(); // Release The File return false; } AVIStreamInfo(m_pavi, &m_psi, sizeof(m_psi)); // Reads Information About The Stream Into psi m_width = m_psi.rcFrame.right-m_psi.rcFrame.left; // Width Is Right Side Of Frame Minus Left m_height = m_psi.rcFrame.bottom-m_psi.rcFrame.top; // Height Is Bottom Of Frame Minus Top if (!m_resize) { // Size should be kept m_resizeWidth = m_width; m_resizeHeight = m_height; } m_lastframe = AVIStreamLength(m_pavi); // The Last Frame Of The Stream m_timePerFrame = ((float)AVIStreamSampleToTime(m_pavi, m_lastframe) / (float) m_lastframe) / 1000.0f; // Calculate Rough Seconds Per Frame m_bmih.biSize = sizeof (BITMAPINFOHEADER); // Size Of The BitmapInfoHeader m_bmih.biPlanes = 1; // Bitplanes m_bmih.biBitCount = 24; // Bits Format We Want 24 / 8 = 3 bytes m_bmih.biWidth = m_resizeWidth; // Width We Want m_bmih.biHeight = m_resizeHeight; // Height We Want m_bmih.biCompression= BI_RGB; // Requested Mode = RGB m_hBitmap = CreateDIBSection (m_hdc, (BITMAPINFO*)(&m_bmih), DIB_RGB_COLORS, (void**)(&m_data), NULL, NULL); SelectObject (m_hdc, m_hBitmap); // Select hBitmap Into Our Device Context (hdc) // Bitmapinfo header for decoding (needed for xvid) m_bmiavih.biSize = sizeof(BITMAPINFOHEADER); m_bmiavih.biPlanes = 1; // Bitplanes m_bmiavih.biBitCount = 24; // Bits Format We Want 24 / 8 = 3 bytes m_bmiavih.biWidth = m_width; // Width We Want m_bmiavih.biHeight = m_height; // Height We Want m_bmiavih.biCompression = BI_RGB; // Requested Mode = RGB // And some more infos m_bmiavih.biClrImportant = 0; m_bmiavih.biClrUsed = 0; m_bmiavih.biXPelsPerMeter = 0; m_bmiavih.biYPelsPerMeter = 0; m_bmiavih.biSizeImage = (((m_bmiavih.biWidth * 3) + 3) & 0xFFFC) * m_bmiavih.biHeight; m_pgf=AVIStreamGetFrameOpen(m_pavi, &m_bmiavih);// Create The PGETFRAME Using Our Request Mode if (m_pgf==0x0) { GameLog::errorMessage("Error opening first frame of avi: %s", filename.c_str()); // An Error Occurred Opening The Frame DeleteObject(m_hBitmap); // Delete The Device Dependant Bitmap Object AVIStreamRelease(m_pavi); // Release The Stream AVIFileExit(); // Release The File return false; } m_fileName = filename; // Create buffer for converted data // width*height = count pixel; each pixel has 4 channels for rgba with each one byte int dataSize = 4*m_resizeWidth*m_resizeHeight; m_bgraData = new unsigned char[dataSize]; // Initialize with 255 (black screen with full alpha) memset(m_bgraData, 255, dataSize); // Prepare horde texture stream named like the video file name, to get a unique name m_videoTexture = h3dCreateTexture(filename.c_str(), m_resizeWidth, m_resizeHeight, H3DFormats::TEX_BGRA8, H3DResFlags::NoTexMipmaps); if (m_videoTexture == 0) { GameLog::errorMessage("Error creating texture for playing avi: %s", filename.c_str()); // Failure creating the dynamic texture closeAvi(); return false; } // Find the sampler index within the material m_samplerIndex = h3dFindResElem(m_material, H3DMatRes::SamplerElem, H3DMatRes::SampNameStr, "albedoMap"); if (m_samplerIndex == -1) { GameLog::errorMessage("Error preparing material with resID %d for playing avi: %s", m_material, filename.c_str()); // No sampler found in material closeAvi(); return false; } // Store old sampler m_originalSampler = h3dGetResParamI(m_material, H3DMatRes::SamplerElem, m_samplerIndex, H3DMatRes::SampTexResI); // Now open the audio stream PAVISTREAM audioStream; if (AVIStreamOpenFromFile(&audioStream, filename.c_str(), streamtypeAUDIO, 0, OF_READ, NULL) == 0) { // Audio stream found // Get format info PCMWAVEFORMAT audioFormat; long formatSize = sizeof(audioFormat); int start = AVIStreamStart(audioStream); // TODO get channelsmask and use it AVIStreamReadFormat(audioStream, start, &audioFormat, &formatSize); long numSamples = AVIStreamLength(audioStream); int bitsPerSample = (audioFormat.wf.nAvgBytesPerSec * 8) / (audioFormat.wf.nSamplesPerSec * audioFormat.wf.nChannels); /*if (audioFormat.wf.wFormatTag == WAVE_FORMAT_MPEGLAYER3) { // TODO MPEGLAYER3WAVEFORMAT mp3Format; formatSize = sizeof(mp3Format); AVIStreamReadFormat(audioStream, start, &mp3Format, &formatSize); }*/ // Create buffer with appropriate size long bufferSize = (bitsPerSample * numSamples) / 8; char* buffer = new char[bufferSize]; // Read the audio data long bytesWritten = 0; AVIStreamRead(audioStream, start, numSamples, buffer, bufferSize, &bytesWritten, 0x0); if (bytesWritten > 0) { // Send the audio data to the sound component SoundResourceData eventData(buffer, bytesWritten, audioFormat.wf.nSamplesPerSec, bitsPerSample, audioFormat.wf.nChannels); GameEvent event(GameEvent::E_SET_SOUND_WITH_USER_DATA, &eventData, this); m_owner->executeEvent(&event); m_hasAudio = true; } // Delete the buffer data delete[] buffer; } if (m_autoStart) // Play video directly playAvi(); return true; }
void CAviHelper::AVItoBmp(const wstring& strAVIFileName, const wstring& strBmpDir) { AVIFileInit(); PAVIFILE avi; int res = AVIFileOpen(&avi, WS2S(strAVIFileName).c_str(), OF_READ, NULL); int n = GetLastError(); if (res!=AVIERR_OK) { //an error occures if (avi!=NULL) AVIFileRelease(avi); return ; } AVIFILEINFO avi_info; AVIFileInfo(avi, &avi_info, sizeof(AVIFILEINFO)); PAVISTREAM pStream; res=AVIFileGetStream(avi, &pStream, streamtypeVIDEO /*video stream*/, 0 /*first stream*/); if (res!=AVIERR_OK) { if (pStream!=NULL) AVIStreamRelease(pStream); AVIFileExit(); return ; } //do some task with the stream int iNumFrames; int iFirstFrame; iFirstFrame = AVIStreamStart(pStream); if (iFirstFrame==-1) { //Error getteing the frame inside the stream if (pStream!=NULL) AVIStreamRelease(pStream); AVIFileExit(); return ; } iNumFrames = AVIStreamLength(pStream); if (iNumFrames==-1) { //Error getteing the number of frames inside the stream if (pStream!=NULL) AVIStreamRelease(pStream); AVIFileExit(); return ; } //getting bitmap from frame BITMAPINFOHEADER bih; ZeroMemory(&bih, sizeof(BITMAPINFOHEADER)); bih.biBitCount=24; //24 bit per pixel bih.biClrImportant=0; bih.biClrUsed = 0; bih.biCompression = BI_RGB; bih.biPlanes = 1; bih.biSize = 40; bih.biXPelsPerMeter = 0; bih.biYPelsPerMeter = 0; //calculate total size of RGBQUAD scanlines (DWORD aligned) bih.biSizeImage = (((bih.biWidth * 3) + 3) & 0xFFFC) * bih.biHeight ; PGETFRAME pFrame; pFrame=AVIStreamGetFrameOpen(pStream, NULL ); AVISTREAMINFO streaminfo; AVIStreamInfo(pStream,&streaminfo,sizeof(AVISTREAMINFO)); //Get the first frame BITMAPINFOHEADER bih2; long lsize = sizeof(bih2); int index= 0; for (int i = iFirstFrame; i < iNumFrames; i++) { index= i-iFirstFrame; BYTE* pDIB = (BYTE*) AVIStreamGetFrame(pFrame, index); // AVIStreamReadFormat(pStream,index,&bih2,&lsize); BITMAPFILEHEADER stFileHdr; BYTE* Bits=new BYTE[bih2.biSizeImage]; AVIStreamRead(pStream,index,1,Bits,bih2.biSizeImage,NULL,NULL); //RtlMoveMemory(Bits, pDIB + sizeof(BITMAPINFOHEADER), bih2.biSizeImage); bih2.biClrUsed =0; stFileHdr.bfOffBits=sizeof(BITMAPFILEHEADER)+sizeof(BITMAPINFOHEADER); stFileHdr.bfSize=sizeof(BITMAPFILEHEADER); stFileHdr.bfType=0x4d42; CString FileName; FileName.Format(_T("Frame-%05d.bmp"), index); CString strtemp; strtemp.Format(_T("%s\\%s"), strBmpDir.c_str(), FileName); FILE* fp=_tfopen(strtemp ,_T("wb")); fwrite(&stFileHdr,1,sizeof(BITMAPFILEHEADER),fp); fwrite(&bih2,1,sizeof(BITMAPINFOHEADER),fp); int ff = fwrite(Bits,1,bih2.biSizeImage,fp); int e = GetLastError(); fclose(fp); ///// delete Bits; //CreateFromPackedDIBPointer(pDIB, index); } AVIStreamGetFrameClose(pFrame); //close the stream after finishing the task if (pStream!=NULL) AVIStreamRelease(pStream); AVIFileExit(); }
HRESULT CAVIFileReader::Read( IN BYTE *pBuffer, IN LONG nBufferSize, IN OUT LONG *pBytesRead) { // // don't return garbage // *pBytesRead = 0; // // fail if the file is not open // if (NULL == m_pWaveFormat) { // // file is not open // LogError("CAVIFileReader::Read file is not open"); return E_FAIL; } HRESULT hr = E_FAIL; // // read data into the user-supplied buffer, starting with the current // stream position // LONG nSamplesRead = 0; hr = AVIStreamRead(m_pAudioStream, m_nSamplesReadSoFar, AVISTREAMREAD_CONVENIENT, pBuffer, nBufferSize, pBytesRead, &nSamplesRead); if (FAILED(hr)) { LogError("CAVIFileReader::Read AVIStreamRead failed"); *pBytesRead = 0; return hr; } // // keep track of how many samples we have read, so we know where // to start next time // m_nSamplesReadSoFar += nSamplesRead; if (*pBytesRead == 0) { LogMessage("CAVIFileReader::Read no more data in the file"); return S_FALSE; } LogMessage("CAVIFileReader::Read read %ld bytes (%ld samples)", *pBytesRead, nSamplesRead); return S_OK; }