Beispiel #1
0
static BOOL AVIFILE_FormatsEqual(PAVISTREAM avi1, PAVISTREAM avi2)
{
  LPVOID fmt1 = NULL, fmt2 = NULL;
  LONG size1, size2, start1, start2;
  BOOL status = FALSE;

  assert(avi1 != NULL && avi2 != NULL);

  /* get stream starts and check format sizes */
  start1 = AVIStreamStart(avi1);
  start2 = AVIStreamStart(avi2);
  if (FAILED(AVIStreamFormatSize(avi1, start1, &size1)))
    return FALSE;
  if (FAILED(AVIStreamFormatSize(avi2, start2, &size2)))
    return FALSE;
  if (size1 != size2)
    return FALSE;

  /* sizes match, now get formats and compare them */
  fmt1 = HeapAlloc(GetProcessHeap(), 0, size1);
  if (fmt1 == NULL)
    return FALSE;
  if (SUCCEEDED(AVIStreamReadFormat(avi1, start1, fmt1, &size1))) {
    fmt2 = HeapAlloc(GetProcessHeap(), 0, size1);
    if (fmt2 != NULL) {
      if (SUCCEEDED(AVIStreamReadFormat(avi2, start2, fmt2, &size1)))
        status = (memcmp(fmt1, fmt2, size1) == 0);
    }
  }

  HeapFree(GetProcessHeap(), 0, fmt2);
  HeapFree(GetProcessHeap(), 0, fmt1);

  return status;
}
		bool AviFrameGraber::_Open(void){
			//thread_handle_ !=NULL means it is already opened
			if (thread_handle_!=NULL)	return false;

			int res=AVIFileOpen(&avi_file_, file_path_.c_str(), OF_READ, NULL);
			if (res!=AVIERR_OK){
				woodychang0611::diagnostics::SendError(_T("AviFrameGraber Open File Fail"));
				_Close();
				return false;
			}
			res=AVIFileGetStream(avi_file_, &stream_, streamtypeVIDEO, 0/*first stream*/);
			if (res!=AVIERR_OK){
				woodychang0611::diagnostics::SendError(_T("AviFrameGraber Get Stream Fail"));
				_Close();
				return false;
			}
			if (AVIStreamStart(stream_)==-1 || AVIStreamLength(stream_)==-1){
				woodychang0611::diagnostics::SendError(_T("AviFrameGraber Stream Start or Length no correct"));
				_Close();
				return false;								
			}
			AVIFileInfo(avi_file_, &avi_info_, sizeof(AVIFILEINFO));
			BITMAPINFOHEADER bih;
			bih.biSize = sizeof(BITMAPINFOHEADER);
			bih.biWidth = avi_info_.dwWidth;
			bih.biHeight = avi_info_.dwHeight;
			bih.biPlanes = 1;
			bih.biBitCount = 24;
			bih.biCompression = BI_RGB;
			bih.biSizeImage = 0;
			bih.biXPelsPerMeter = 0;
			bih.biYPelsPerMeter = 0;
			bih.biClrUsed = 0;
			bih.biClrImportant = 0;
			frame_=AVIStreamGetFrameOpen(stream_, (LPBITMAPINFOHEADER) &bih);
			if (frame_ !=NULL){
				start_frame_ = AVIStreamStart(stream_);
				frame_length_ = AVIStreamLength(stream_);
				current_frame_ = start_frame_;

				//Set Frame info
				frame_info_.start_frame_=start_frame_;
				frame_info_.frame_length_ =frame_length_; 
				frame_info_.frame_per_second_=(FLOAT32)avi_info_.dwRate/avi_info_.dwScale;
				frame_info_.frame_width_=(UINT16) avi_info_.dwWidth;
				frame_info_.frame_height_=(UINT16)  avi_info_.dwHeight;
				_status = FRAME_SUBJECT_PAUSE;
				thread_handle_ =CreateThread(NULL ,0,this->_ThreadFunc,this,0,NULL);
				return true;
			}else{
				woodychang0611::diagnostics::SendError(_T("AviFrameGraber Get Frame Failed"));
			}
			return false;
		}
void Movie::play(bool Looped)
{
    mcrAVIData->State   = MOVIESTATE_PLAYED;
    mcrAVIData->Time    = mcrAVIData->LastTime = clock();
    mcrAVIData->Looped  = Looped;
    
    #if 0
    if (AVIStreamStart(mcrAVIData->pAudioStream) == -1)
        io::printError("Could not start audio stream");
    #endif
}
Beispiel #4
0
static ImBuf *avi_fetchibuf(struct anim *anim, int position)
{
	ImBuf *ibuf = NULL;
	int *tmp;
	int y;
	
	if (anim == NULL) {
		return NULL;
	}

#if defined(_WIN32) && !defined(FREE_WINDOWS)
	if (anim->avistreams) {
		LPBITMAPINFOHEADER lpbi;

		if (anim->pgf) {
			lpbi = AVIStreamGetFrame(anim->pgf, position + AVIStreamStart(anim->pavi[anim->firstvideo]));
			if (lpbi) {
				ibuf = IMB_ibImageFromMemory((unsigned char *) lpbi, 100, IB_rect, anim->colorspace, "<avi_fetchibuf>");
//Oh brother...
			}
		}
	}
	else
#endif
	{
		ibuf = IMB_allocImBuf(anim->x, anim->y, 24, IB_rect);

		tmp = AVI_read_frame(anim->avi, AVI_FORMAT_RGB32, position,
		                     AVI_get_stream(anim->avi, AVIST_VIDEO, 0));
		
		if (tmp == NULL) {
			printf("Error reading frame from AVI: '%s'\n", anim->name);
			IMB_freeImBuf(ibuf);
			return NULL;
		}

		for (y = 0; y < anim->y; y++) {
			memcpy(&(ibuf->rect)[((anim->y - y) - 1) * anim->x],  &tmp[y * anim->x],
			       anim->x * 4);
		}
		
		MEM_freeN(tmp);
	}
	
	ibuf->rect_colorspace = colormanage_colorspace_get_named(anim->colorspace);

	return ibuf;
}
Beispiel #5
0
bool CCamAvi::Initialize (char* filename)
{
	long hr;
	AVISTREAMINFO info;
	
	m_init=false;
	m_pos=0;
	
	AVIFileInit ();
	
	hr=AVIFileOpen (&m_paf, filename, OF_READ, NULL);
	if (hr!=0) return false;

	hr=AVIFileGetStream (m_paf, &m_pas, streamtypeVIDEO, 0);
	if (hr!=0) return false;

	hr= AVIStreamInfo (m_pas, &info, sizeof (info));	
	if (hr!=0) return false;

	hr=sizeof (m_bi);
	hr= AVIStreamReadFormat (m_pas, 0, &m_bi, &hr);
	if (hr!=0) return false;	
	
	if (m_bi.bmiHeader.biCompression!=0)
	{
		// find decompressor
		m_hic=ICOpen (ICTYPE_VIDEO, info.fccHandler ,ICMODE_DECOMPRESS);
		if (m_hic==0) return false;
	}
 	m_data=new unsigned char[m_bi.bmiHeader.biSizeImage];
	AVISTREAMINFO a;
	AVIStreamInfo (m_pas, &a, sizeof (a));
	m_length=a.dwLength;
	hr=AVIStreamBeginStreaming (m_pas, 0, 100, 1000);	
	m_init=true;

    m_FrameRate = (double)a.dwRate / a.dwScale;
	
	m_pos=AVIStreamStart (m_pas);
	m_data=new BYTE[m_bi.bmiHeader.biSizeImage];
	if (m_bi.bmiHeader.biCompression!=0) 
	{
		hr=ICDecompressGetFormat (m_hic, &m_bi, &m_bi_out);
	}
	else m_bi_out=m_bi;
	m_frame.Create (m_bi_out.bmiHeader.biWidth, m_bi_out.bmiHeader.biHeight, m_bi_out.bmiHeader.biBitCount);
	return true;
}
Beispiel #6
0
void VideoReader::Open(CString strFilePath)
{    
    AVIFileInit();

    LONG hr;  
    hr = AVIStreamOpenFromFile(&m_pAviStream, strFilePath, streamtypeVIDEO, 0, OF_READ, NULL);
    if (hr != 0){ 
        // Handle failure.
        AfxMessageBox(L"Failed to open file, file must be an uncompressed video."); 
    }
    else
    {
        HRESULT          hr; 
        AVISTREAMINFO    strhdr; 
        LONG             lStreamSize; 
 

        // Determine the size of the format data using 
        // AVIStreamFormatSize. 
        AVIStreamFormatSize(m_pAviStream, 0, &lStreamSize); 
        if (lStreamSize > sizeof(m_bi)) // Format too large? 
            return; 

        lStreamSize = sizeof(m_bi); 
        hr = AVIStreamReadFormat(m_pAviStream, 0, &m_bi, &lStreamSize); // Read format 
        if (m_bi.biCompression != BI_RGB) // Wrong compression format? 
            return; 

        hr = AVIStreamInfo(m_pAviStream, &strhdr, sizeof(strhdr)); 

        // Create new AVI file using AVIFileOpen. 
        hr = AVIFileOpen(&m_pf, strFilePath + L".Processed.avi", OF_WRITE | OF_CREATE, NULL); 
        if (hr != 0) 
            return; 

        m_currentSize = AVIStreamStart(m_pAviStream);

        // Allocate memory for the bitmaps. 
        m_lpBuffer = (BYTE *)malloc(m_bi.biSizeImage); 
    }

}
Beispiel #7
0
	/////////////////////////////////////////////////////////
	//get bitmpainfoheader information
	/////////////////////////////////////////////////////////
	CAviMan::GetBmpHeader(BITMAPINFO *pBmpHeader)
	{
		LONG bmp_size = sizeof( BITMAPINFO ); 

		AVIStreamReadFormat(m_aviVideoStream, 0, &(pBmpHeader->bmiHeader), &bmp_size); // Read format 

		memcpy(&m_bmpHeader,pBmpHeader,bmp_size);

		m_dwStartFrame = AVIStreamStart(m_aviVideoStream);
		m_dwTotalFrame = AVIStreamEnd(m_aviVideoStream);				//get number of frames

		m_dwImageSize = m_bmpHeader.bmiHeader.biWidth * m_bmpHeader.bmiHeader.biHeight;

		switch(m_bmpHeader.bmiHeader.biBitCount)
		{
		case(24):m_dwImageSize*=3;break;
		case(16):m_dwImageSize*=2;break;			
		}

	}
Beispiel #8
0
bool CAviToBmp::Open(LPCTSTR Path)
{
	Close();
	if (FAILED(m_hr = AVIFileOpen(&m_pFile, Path, OF_READ, NULL)))
		return(FALSE);
	if (FAILED(m_hr = AVIFileGetStream(m_pFile, &m_pStream, streamtypeVIDEO, 0)))
		return(FALSE);
	m_FrameCount = AVIStreamLength(m_pStream);
	long	Start = AVIStreamStart(m_pStream);
	if (Start < 0)
		return(FALSE);
	long	FmtSize;
 	if (FAILED(m_hr = AVIStreamReadFormat(m_pStream, Start, NULL, &FmtSize)))
		return(FALSE);
	m_pBmpInfo = (LPBITMAPINFO)new BYTE[FmtSize];
	if (FAILED(m_hr = AVIStreamReadFormat(m_pStream, Start, m_pBmpInfo, &FmtSize)))
		return(FALSE);
	m_pGetFrame = AVIStreamGetFrameOpen(m_pStream, (LPBITMAPINFOHEADER)AVIGETFRAMEF_BESTDISPLAYFMT);
	if (m_pGetFrame == NULL)
		return(FALSE);
	return(TRUE);
}
Beispiel #9
0
BOOL CALLBACK aviaudioOpenDevice(HWND hwnd, PAVISTREAM pavi)
{
	MMRESULT            mmResult;
	LPVOID              lpFormat;
	LONG                cbFormat;
	AVISTREAMINFO       strhdr;

	if (!pavi)          // no wave data to play
		return FALSE;

	if (shWaveOut)      // already something playing
		return TRUE;

	spavi = pavi;

	recalc = 1;

	AVIStreamInfo(pavi, &strhdr, sizeof(strhdr));

	slSampleSize = (LONG) strhdr.dwSampleSize;
	if (slSampleSize <= 0 || slSampleSize > AUDIO_BUFFER_SIZE)
		return FALSE;

	//AVIStreamFormatSize(pavi, 0, &cbFormat);
	AVIStreamFormatSize(pavi, AVIStreamStart(pavi), &cbFormat);

	lpFormat = GlobalAllocPtr(GHND, cbFormat);
	if (!lpFormat)
		return FALSE;

	//AVIStreamReadFormat(pavi, 0, lpFormat, &cbFormat);
	AVIStreamReadFormat(pavi, AVIStreamStart(pavi), lpFormat, &cbFormat);

	sdwSamplesPerSec = ((LPWAVEFORMAT) lpFormat)->nSamplesPerSec;
	sdwBytesPerSec = ((LPWAVEFORMAT) lpFormat)->nAvgBytesPerSec;

	mmResult = waveOutOpen(&shWaveOut, (UINT)WAVE_MAPPER, (WAVEFORMATEX *) lpFormat,
		(DWORD) (UINT) hwnd, 0L, CALLBACK_WINDOW);

	// Maybe we failed because someone is playing sound already.
	// Shut any sound off, and try once more before giving up.
	if (mmResult) {
		sndPlaySound(NULL, 0);
		mmResult = waveOutOpen(&shWaveOut, (UINT)WAVE_MAPPER, (WAVEFORMATEX *) lpFormat,
			(DWORD) (UINT)hwnd, 0L, CALLBACK_WINDOW);
	}

	if (mmResult != 0) {
		return FALSE;
	}

	for (swBuffers = 0; swBuffers < MAX_AUDIO_BUFFERS; swBuffers++) {
		if (!(salpAudioBuf[swBuffers] = (LPWAVEHDR)GlobalAllocPtr(GMEM_MOVEABLE | GMEM_SHARE, (DWORD)(sizeof(WAVEHDR) + AUDIO_BUFFER_SIZE))))
			break;
		salpAudioBuf[swBuffers]->dwFlags = WHDR_DONE;
		salpAudioBuf[swBuffers]->lpData = (LPSTR) salpAudioBuf[swBuffers] + sizeof(WAVEHDR);
		salpAudioBuf[swBuffers]->dwBufferLength = AUDIO_BUFFER_SIZE;
		if (!waveOutPrepareHeader(shWaveOut, salpAudioBuf[swBuffers], sizeof(WAVEHDR)))
			continue;

		GlobalFreePtr((LPSTR) salpAudioBuf[swBuffers]);
		break;
	}

	if (swBuffers < MIN_AUDIO_BUFFERS) {
		aviaudioCloseDevice();
		return FALSE;
	}

	swBuffersOut = 0;
	swNextBuffer = 0;

	sfPlaying = FALSE;

	return TRUE;
}
Beispiel #10
0
void VideoHelper::OpenVideo(CString strFilePath, FrameData& data)
{    
    AVIFileInit();

    LONG hr;  
    hr = AVIStreamOpenFromFile(&m_pAviStream, strFilePath, streamtypeVIDEO, 0, OF_READ, NULL);
    if (hr != 0){ 
        // Handle failure.
        AfxMessageBox(L"Failed to open file."); 
    }
    else
    {
        PAVIFILE         pf; 
        PAVISTREAM       psSmall; 
        HRESULT          hr; 
        AVISTREAMINFO    strhdr; 
        BITMAPINFOHEADER bi; 
        BITMAPINFOHEADER biNew; 
        LONG             lStreamSize; 
        LPVOID           lpOld; 
        LPVOID           lpNew; 

        // Determine the size of the format data using 
        // AVIStreamFormatSize. 
        AVIStreamFormatSize(m_pAviStream, 0, &lStreamSize); 
        if (lStreamSize > sizeof(bi)) // Format too large? 
            return; 

        lStreamSize = sizeof(bi); 
        hr = AVIStreamReadFormat(m_pAviStream, 0, &bi, &lStreamSize); // Read format 
        if (bi.biCompression != BI_RGB) // Wrong compression format? 
            return; 

        hr = AVIStreamInfo(m_pAviStream, &strhdr, sizeof(strhdr)); 

        // Create new AVI file using AVIFileOpen. 
        hr = AVIFileOpen(&pf, strFilePath + L".Processed.avi", OF_WRITE | OF_CREATE, NULL); 
        if (hr != 0) 
            return; 

        // Set parameters for the new stream. 
        biNew = bi; 

        SetRect(&strhdr.rcFrame, 0, 0, (int) biNew.biWidth, 
            (int) biNew.biHeight); 

        // Create a stream using AVIFileCreateStream. 
        hr = AVIFileCreateStream(pf, &psSmall, &strhdr); 
        if (hr != 0) {            //Stream created OK? If not, close file. 
            AVIFileRelease(pf); 
            return; 
        } 

        // Set format of new stream using AVIStreamSetFormat. 
        hr = AVIStreamSetFormat(psSmall, 0, &biNew, sizeof(biNew)); 
        if (hr != 0) { 
            AVIStreamRelease(psSmall); 
            AVIFileRelease(pf); 
            return; 
        } 

        // Allocate memory for the bitmaps. 
        lpOld = malloc(bi.biSizeImage); 

        // Read the stream data using AVIStreamRead. 
        for (lStreamSize = AVIStreamStart(m_pAviStream); lStreamSize <
            AVIStreamEnd(m_pAviStream)/*1500*/; lStreamSize++) { 
                //Context::Oversubscribe(true);
                hr = AVIStreamRead(m_pAviStream, lStreamSize, 1, lpOld, bi.biSizeImage,
                    NULL, NULL); 
                //Context::Oversubscribe(false);
                //memcpy_s(lpNew, bi.biSizeImage, lpOld, bi.biSizeImage);
                data.m_BBP = bi.biBitCount;
                data.m_ColorPlanes = bi.biPlanes;
                data.m_EndHeight = bi.biHeight;
                data.m_EndWidth = bi.biWidth;
                data.m_pFrame = (BYTE*)lpOld;
                data.m_Pitch = bi.biWidth * (bi.biBitCount / 8);
                data.m_Size = bi.biSizeImage;
                data.m_StartHeight = 0;
                data.m_StartWidth = 0;
                lpNew = m_pVideoAgent->ProcessFrame(data);
               

                if(NULL != lpNew)
                {
                    // Save the compressed data using AVIStreamWrite.
                    hr = AVIStreamWrite(psSmall, lStreamSize, 1, lpNew,
                    biNew.biSizeImage, AVIIF_KEYFRAME, NULL, NULL);
                }
        } 
        free(lpOld);
        // Close the stream and file. 
        AVIStreamRelease(psSmall); 
        AVIFileRelease(pf); 
    }
    AVIFileExit();
}
Beispiel #11
0
	bool setupAudio()
	{
		int ret;


		//read audio stream info; specifically, we need the encoded chunksize
		AVISTREAMINFO audioStreamInfo;
		AVIStreamInfo(audioStream,&audioStreamInfo,sizeof(AVISTREAMINFO));
		audioChunkSize = audioStreamInfo.dwSuggestedBufferSize;
		audioSampleCount = audioStreamInfo.dwLength;

		audioEncoded = new char[audioChunkSize];

		//read the audio streamformat info
		LONG formatSize;
		AVIStreamReadFormat(audioStream,AVIStreamStart(audioStream),0,&formatSize);
		char *format = (char *)malloc(formatSize);
		AVIStreamReadFormat(audioStream,AVIStreamStart(audioStream),format,&formatSize);
		WAVEFORMATEX *wfxEncoded = (WAVEFORMATEX *)format;

		//construct a descriptor for the format we desire to get out of the decoder
		//note that we have to use the same samplerate as the encoded format indicates
		//since acm can't change the samplerate in one fell swoop
		wfxDecoded.cbSize = sizeof(WAVEFORMATEX);
		wfxDecoded.nChannels = wfxEncoded->nChannels;
		wfxDecoded.wFormatTag = WAVE_FORMAT_PCM;
		wfxDecoded.nSamplesPerSec = wfxEncoded->nSamplesPerSec;
		wfxDecoded.wBitsPerSample = 16;
		wfxDecoded.nBlockAlign = wfxDecoded.wBitsPerSample/8 * wfxEncoded->nChannels;
		wfxDecoded.nAvgBytesPerSec = wfxDecoded.nBlockAlign * wfxDecoded.nSamplesPerSec;

		sampleSize = wfxDecoded.nBlockAlign;

		//try to get a converter from the encoded data to the decoded data
		ret = acmStreamOpen(&acmStream,0,wfxEncoded,&wfxDecoded,0,0,0,0);

		//now we're done with wfxEncoded
		free(format);

		if(ret)
		{
			delete[] audioEncoded;
			AVIStreamClose(audioStream);
			return false;
		}


		//decide on a playback buffer size
		//make each buffer 1/2sec
		playBufferSamples = wfxDecoded.nSamplesPerSec / 2;
		playBufferSize = playBufferSamples * sampleSize;


		//hurry and try to create the output stream.
		//if we can't do that, then everything that follows is pointless.
		int mode = 0;
		if(wfxDecoded.wBitsPerSample == 8)
			mode |= FSOUND_8BITS;
		else if(wfxDecoded.wBitsPerSample == 16)
			mode |= FSOUND_16BITS;
		if(wfxDecoded.nChannels == 1)
			mode |= FSOUND_MONO;
		else
			mode |= FSOUND_STEREO;

		#ifdef SND_USE_FMOD
		fmod_stream = FSOUND_Stream_Create(win_movie_fmod_streamCallback,playBufferSize,mode,wfxDecoded.nSamplesPerSec,(int)this);
		if(!fmod_stream)
		{
			acmStreamClose(acmStream,0);
			delete[] audioEncoded;
			AVIStreamClose(audioStream);
			err("Error creating fmod stream for movieplayback.  Please report this case so we can improve the robustness of the movie player!");
			return false;
		}
		#endif


		//find out how large a decode buffer we need for the encode buffer chunksize
		acmStreamSize(acmStream,audioChunkSize,&decodeBufferSize,ACM_STREAMSIZEF_SOURCE);
		decodeBufferSamples = decodeBufferSize / sampleSize;


		//allocate the decode buffer
		audioDecoded = new char[decodeBufferSize];

		//prep the decode operation
		audioStreamHeader.cbStruct = sizeof(ACMSTREAMHEADER);
		audioStreamHeader.fdwStatus = 0;
		audioStreamHeader.pbSrc = (LPBYTE)audioEncoded;
		audioStreamHeader.cbSrcLength = audioChunkSize;
		audioStreamHeader.pbDst = (LPBYTE)audioDecoded;
		audioStreamHeader.cbDstLength = decodeBufferSize;
		ret = acmStreamPrepareHeader(acmStream,&audioStreamHeader,0);
		if(ret)
		{
			delete[] audioDecoded;
			acmStreamClose(acmStream,0);
			delete[] audioEncoded;
			AVIStreamClose(audioStream);
			return false;
		}

		#ifdef SND_USE_FMOD
		//finally we're ready to start the audio stream
		FSOUND_Stream_Play(FSOUND_FREE,fmod_stream);
		#endif

		return true;
	}
Beispiel #12
0
static ImBuf *avi_fetchibuf(struct anim *anim, int position)
{
	ImBuf *ibuf = NULL;
	int *tmp;
	int y;
	
	if (anim == NULL) {
		return NULL;
	}

#if defined(_WIN32) && !defined(FREE_WINDOWS)
	if (anim->avistreams) {
		LPBITMAPINFOHEADER lpbi;

		if (anim->pgf) {
			lpbi = AVIStreamGetFrame(anim->pgf, position + AVIStreamStart(anim->pavi[anim->firstvideo]));
			if (lpbi) {
				ibuf = IMB_ibImageFromMemory((unsigned char *) lpbi, 100, IB_rect, anim->colorspace, "<avi_fetchibuf>");
//Oh brother...
			}
		}
	}
	else {
#else
	if (1) {
#endif
		ibuf = IMB_allocImBuf(anim->x, anim->y, 24, IB_rect);

		tmp = AVI_read_frame(anim->avi, AVI_FORMAT_RGB32, position,
		                     AVI_get_stream(anim->avi, AVIST_VIDEO, 0));
		
		if (tmp == NULL) {
			printf("Error reading frame from AVI: '%s'\n", anim->name);
			IMB_freeImBuf(ibuf);
			return NULL;
		}

		for (y = 0; y < anim->y; y++) {
			memcpy(&(ibuf->rect)[((anim->y - y) - 1) * anim->x],  &tmp[y * anim->x],
			       anim->x * 4);
		}
		
		MEM_freeN(tmp);
	}
	
	ibuf->rect_colorspace = colormanage_colorspace_get_named(anim->colorspace);

	return ibuf;
}
#endif  /* WITH_AVI */

#ifdef WITH_FFMPEG

static int startffmpeg(struct anim *anim)
{
	int i, videoStream;

	AVCodec *pCodec;
	AVFormatContext *pFormatCtx = NULL;
	AVCodecContext *pCodecCtx;
	int frs_num;
	double frs_den;
	int streamcount;

#ifdef FFMPEG_SWSCALE_COLOR_SPACE_SUPPORT
	/* The following for color space determination */
	int srcRange, dstRange, brightness, contrast, saturation;
	int *table;
	const int *inv_table;
#endif

	if (anim == 0) return(-1);

	streamcount = anim->streamindex;

	if (avformat_open_input(&pFormatCtx, anim->name, NULL, NULL) != 0) {
		return -1;
	}

	if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	av_dump_format(pFormatCtx, 0, anim->name, 0);


	/* Find the video stream */
	videoStream = -1;

	for (i = 0; i < pFormatCtx->nb_streams; i++)
		if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
			if (streamcount > 0) {
				streamcount--;
				continue;
			}
			videoStream = i;
			break;
		}

	if (videoStream == -1) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	pCodecCtx = pFormatCtx->streams[videoStream]->codec;

	/* Find the decoder for the video stream */
	pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
	if (pCodec == NULL) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	pCodecCtx->workaround_bugs = 1;

	if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	anim->duration = ceil(pFormatCtx->duration *
	                      av_q2d(pFormatCtx->streams[videoStream]->r_frame_rate) /
	                      AV_TIME_BASE);

	frs_num = pFormatCtx->streams[videoStream]->r_frame_rate.num;
	frs_den = pFormatCtx->streams[videoStream]->r_frame_rate.den;

	frs_den *= AV_TIME_BASE;

	while (frs_num % 10 == 0 && frs_den >= 2.0 && frs_num > 10) {
		frs_num /= 10;
		frs_den /= 10;
	}

	anim->frs_sec = frs_num;
	anim->frs_sec_base = frs_den;

	anim->params = 0;

	anim->x = pCodecCtx->width;
	anim->y = av_get_cropped_height_from_codec(pCodecCtx);

	anim->pFormatCtx = pFormatCtx;
	anim->pCodecCtx = pCodecCtx;
	anim->pCodec = pCodec;
	anim->videoStream = videoStream;

	anim->interlacing = 0;
	anim->orientation = 0;
	anim->framesize = anim->x * anim->y * 4;

	anim->curposition = -1;
	anim->last_frame = 0;
	anim->last_pts = -1;
	anim->next_pts = -1;
	anim->next_packet.stream_index = -1;

	anim->pFrame = avcodec_alloc_frame();
	anim->pFrameComplete = FALSE;
	anim->pFrameDeinterlaced = avcodec_alloc_frame();
	anim->pFrameRGB = avcodec_alloc_frame();

	if (avpicture_get_size(PIX_FMT_RGBA, anim->x, anim->y) !=
	    anim->x * anim->y * 4)
	{
		fprintf(stderr,
		        "ffmpeg has changed alloc scheme ... ARGHHH!\n");
		avcodec_close(anim->pCodecCtx);
		av_close_input_file(anim->pFormatCtx);
		av_free(anim->pFrameRGB);
		av_free(anim->pFrameDeinterlaced);
		av_free(anim->pFrame);
		anim->pCodecCtx = NULL;
		return -1;
	}

	if (anim->ib_flags & IB_animdeinterlace) {
		avpicture_fill((AVPicture *) anim->pFrameDeinterlaced,
		               MEM_callocN(avpicture_get_size(
		                               anim->pCodecCtx->pix_fmt,
		                               anim->pCodecCtx->width,
		                               anim->pCodecCtx->height),
		                           "ffmpeg deinterlace"),
		               anim->pCodecCtx->pix_fmt, 
		               anim->pCodecCtx->width,
		               anim->pCodecCtx->height);
	}

	if (pCodecCtx->has_b_frames) {
		anim->preseek = 25; /* FIXME: detect gopsize ... */
	}
	else {
		anim->preseek = 0;
	}
	
	anim->img_convert_ctx = sws_getContext(
	        anim->x,
	        anim->y,
	        anim->pCodecCtx->pix_fmt,
	        anim->x,
	        anim->y,
	        PIX_FMT_RGBA,
	        SWS_FAST_BILINEAR | SWS_PRINT_INFO | SWS_FULL_CHR_H_INT,
	        NULL, NULL, NULL);
		
	if (!anim->img_convert_ctx) {
		fprintf(stderr,
		        "Can't transform color space??? Bailing out...\n");
		avcodec_close(anim->pCodecCtx);
		av_close_input_file(anim->pFormatCtx);
		av_free(anim->pFrameRGB);
		av_free(anim->pFrameDeinterlaced);
		av_free(anim->pFrame);
		anim->pCodecCtx = NULL;
		return -1;
	}

#ifdef FFMPEG_SWSCALE_COLOR_SPACE_SUPPORT
	/* Try do detect if input has 0-255 YCbCR range (JFIF Jpeg MotionJpeg) */
	if (!sws_getColorspaceDetails(anim->img_convert_ctx, (int **)&inv_table, &srcRange,
	                              &table, &dstRange, &brightness, &contrast, &saturation))
	{
		srcRange = srcRange || anim->pCodecCtx->color_range == AVCOL_RANGE_JPEG;
		inv_table = sws_getCoefficients(anim->pCodecCtx->colorspace);

		if (sws_setColorspaceDetails(anim->img_convert_ctx, (int *)inv_table, srcRange,
		                             table, dstRange, brightness, contrast, saturation))
		{
			fprintf(stderr, "Warning: Could not set libswscale colorspace details.\n");
		}
	}
	else {
		fprintf(stderr, "Warning: Could not set libswscale colorspace details.\n");
	}
#endif
		
	return (0);
}
Beispiel #13
0
bool CMusic::loadFromVideo(const CString& fileName)
{
    m_loaded = false;
    m_fileName = fileName;
    m_file = nullptr;
    m_fromVideo = true;

#ifdef T_SYSTEM_WINDOWS

    m_sampleCount = 0;
    ALenum error = alGetError();

    CApplication::getApp()->log(CString::fromUTF8("Chargement de la musique de la vidéo %1").arg(m_fileName));

    // Ouverture du flux audio
    if (AVIStreamOpenFromFile(&m_aviStream, m_fileName.toCharArray(), streamtypeAUDIO, 0, OF_READ, nullptr))
    {
        CApplication::getApp()->log("AVIStreamOpenFromFile : impossible de lire le flux audio", ILogger::Error);
        return false;
    }


    LONG buffer_size;
    AVIStreamRead(m_aviStream, AVIStreamStart(m_aviStream), (-1L), nullptr, 0, &buffer_size, nullptr);

    PBYTE tmp_format = new BYTE[buffer_size];
    AVIStreamReadFormat(m_aviStream, AVIStreamStart(m_aviStream), tmp_format, &buffer_size);
    LPWAVEFORMATEX wave_format = reinterpret_cast<LPWAVEFORMATEX>(tmp_format);

    // Lecture du nombre d'échantillons et du taux d'échantillonnage
    m_nbrSamples = AVIStreamLength(m_aviStream);
    m_sampleRate = wave_format->nSamplesPerSec;

    // Détermination du format en fonction du nombre de canaux
    switch (wave_format->nChannels)
    {
        case 1:
            m_format = AL_FORMAT_MONO16;
            break;

        case 2:
            m_format = AL_FORMAT_STEREO16;
            break;

        case 4:

            if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS"))
            {
                return false;
            }

            m_format = alGetEnumValue("AL_FORMAT_QUAD16");
            break;

        case 6:

            if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS"))
            {
                return false;
            }

            m_format = alGetEnumValue("AL_FORMAT_51CHN16");
            break;

        case 7:

            if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS"))
            {
                return false;
            }

            m_format = alGetEnumValue("AL_FORMAT_61CHN16");
            break;

        case 8:

            if (!CSoundEngine::isOALExtension("AL_EXT_MCFORMATS"))
            {
                return false;
            }

            m_format = alGetEnumValue("AL_FORMAT_71CHN16");
            break;

        default:
            return false;
    }

    // Création des buffers OpenAL
    if (m_buffer[0] == 0 || m_buffer[1] == 0)
    {
        alGenBuffers(2, m_buffer);

        // Traitement des erreurs
        if ((error = alGetError()) != AL_NO_ERROR)
        {
            CSoundEngine::displayOpenALError(error, "alGenBuffers", __LINE__);
            return false;
        }

        // Les buffers sont invalides
        if (m_buffer[0] == 0 || m_buffer[1] == 0)
        {
            CApplication::getApp()->log("Les buffers audio sont invalides", ILogger::Error);
            return false;
        }
    }

    // Création d'une source
    if (m_source == 0)
    {
        alGenSources(1, &m_source);

        // Traitement des erreurs
        if ((error = alGetError()) != AL_NO_ERROR)
        {
            CSoundEngine::displayOpenALError(error, "alGenSources", __LINE__);
            return false;
        }

        // La source est invalide
        if (m_source == 0)
        {
            CApplication::getApp()->log("La source audio est invalide", ILogger::Error);
            return false;
        }
    }

    // On remplit les deux buffers
    readData(m_buffer[0], 44100);
    readData(m_buffer[1], 44100);

    // Remplissage avec les échantillons lus
    alSourceQueueBuffers(m_source, 2, m_buffer);

    // Traitement des erreurs
    if ((error = alGetError()) != AL_NO_ERROR)
    {
        CSoundEngine::displayOpenALError(error, "alSourceQueueBuffers", __LINE__);
        return false;
    }

    // Paramètres de la source
    alSourcei(m_source, AL_LOOPING, false);

    // Traitement des erreurs
    if ((error = alGetError()) != AL_NO_ERROR)
    {
        CSoundEngine::displayOpenALError(error, "alGetSourcei", __LINE__);
    }

    alSourcef(m_source, AL_PITCH, 1.0f);

    // Traitement des erreurs
    if ((error = alGetError()) != AL_NO_ERROR)
    {
        CSoundEngine::displayOpenALError(error, "alSourcef", __LINE__);
    }

    alSourcef(m_source, AL_GAIN, 1.0f);

    // Traitement des erreurs
    if ((error = alGetError() ) != AL_NO_ERROR)
    {
        CSoundEngine::displayOpenALError(error, "alSourcef", __LINE__);
    }

    alSource3f(m_source, AL_POSITION, 0.0f, 0.0f, 0.0f);

    // Traitement des erreurs
    if ((error = alGetError()) != AL_NO_ERROR)
    {
        CSoundEngine::displayOpenALError(error, "alSource3f", __LINE__);
    }

    m_loaded = true;

#endif

    return true;
}
Beispiel #14
0
BOOL ExtractAVIFrames(CString szFileName)
{
	AVIFileInit();

	PAVIFILE avi;
	int res = AVIFileOpen(&avi, szFileName, OF_READ, NULL);

	if (res != AVIERR_OK)
	{
		//an error occures
		if (avi != NULL)
			AVIFileRelease(avi);

		return FALSE;
	}

	AVIFILEINFO avi_info;
	AVIFileInfo(avi, &avi_info, sizeof(AVIFILEINFO));

	CString szFileInfo;
	szFileInfo.Format("Dimention: %dx%d\n"
		"Length: %d frames\n"
		"Max bytes per second: %d\n"
		"Samples per second: %d\n"
		"Streams: %d\n"
		"File Type: %d", avi_info.dwWidth,
		avi_info.dwHeight,
		avi_info.dwLength,
		avi_info.dwMaxBytesPerSec,
		(DWORD)(avi_info.dwRate / avi_info.dwScale),
		avi_info.dwStreams,
		avi_info.szFileType);

	AfxMessageBox(szFileInfo, MB_ICONINFORMATION | MB_OK);

	PAVISTREAM pStream;
	res = AVIFileGetStream(avi, &pStream, streamtypeVIDEO /*video stream*/,
		0 /*first stream*/);

	if (res != AVIERR_OK)
	{
		if (pStream != NULL)
			AVIStreamRelease(pStream);

		AVIFileExit();
		return FALSE;
	}

	//do some task with the stream
	int iNumFrames;
	int iFirstFrame;

	iFirstFrame = AVIStreamStart(pStream);
	if (iFirstFrame == -1)
	{
		//Error getteing the frame inside the stream

		if (pStream != NULL)
			AVIStreamRelease(pStream);

		AVIFileExit();
		return FALSE;
	}

	iNumFrames = AVIStreamLength(pStream);
	if (iNumFrames == -1)
	{
		//Error getteing the number of frames inside the stream

		if (pStream != NULL)
			AVIStreamRelease(pStream);

		AVIFileExit();
		return FALSE;
	}

	//getting bitmap from frame
	BITMAPINFOHEADER bih;
	ZeroMemory(&bih, sizeof(BITMAPINFOHEADER));

	bih.biBitCount = 24;    //24 bit per pixel
	bih.biClrImportant = 0;
	bih.biClrUsed = 0;
	bih.biCompression = BI_RGB;
	bih.biPlanes = 1;
	bih.biSize = 40;
	bih.biXPelsPerMeter = 0;
	bih.biYPelsPerMeter = 0;
	//calculate total size of RGBQUAD scanlines (DWORD aligned)
	bih.biSizeImage = (((bih.biWidth * 3) + 3) & 0xFFFC) * bih.biHeight;

	PGETFRAME pFrame;
	pFrame = AVIStreamGetFrameOpen(pStream,
		NULL/*(BITMAPINFOHEADER*) AVIGETFRAMEF_BESTDISPLAYFMT*/ /*&bih*/);

	//Get the first frame
	int index = 0;
	for (int i = iFirstFrame; i<iNumFrames; i++)
	{
		index = i - iFirstFrame;

		BYTE* pDIB = (BYTE*)AVIStreamGetFrame(pFrame, index);

		CreateFromPackedDIBPointer(pDIB, index);
	}

	AVIStreamGetFrameClose(pFrame);

	//close the stream after finishing the task
	if (pStream != NULL)
		AVIStreamRelease(pStream);

	AVIFileExit();

	return TRUE;
}
Beispiel #15
0
void CRotateAVIDlg::ProcessAVI(const TCHAR *source_filename, const TCHAR *dest_filename, eRotation rot)
{
	TCHAR error_buf[1024];

    PAVIFILE source_avi = 0;
    PAVIFILE dest_avi = 0;
    PAVISTREAM pSrcVidStream = 0;
    PAVISTREAM pSrcAudioStream = 0;
	PAVISTREAM pDestVidStream = 0;
	PAVISTREAM pDestAudioStream = 0;
	char *pSrcBuffer = 0;
	char *pJPGBuffer = 0;
	char *pDecompBuffer = 0;
	char *pRotateBuffer = 0;
	char *pDestBuffer = 0;

    AVIFileInit();

	// source setup

    if (AVIFileOpen(&source_avi, source_filename, OF_READ, NULL) != AVIERR_OK)
	{
		_stprintf(error_buf, TEXT("Couldn't open file %s"), source_filename);
		MessageBox(error_buf);
		goto cleanup;
	}

	AVIFILEINFO src_avi_info;
    AVIFileInfo(source_avi, &src_avi_info, sizeof(AVIFILEINFO));

    if (AVIFileGetStream(source_avi, &pSrcVidStream, streamtypeVIDEO, 0) != AVIERR_OK)
    {
		_stprintf(error_buf, TEXT("No video stream in %s"), source_filename);
		MessageBox(error_buf);
		goto cleanup;
    }

	BITMAPINFOHEADER srcBIH;
	long srcvidstreamsize;
    AVIStreamFormatSize(pSrcVidStream, 0, &srcvidstreamsize); 
    if (srcvidstreamsize > sizeof(BITMAPINFOHEADER))
	{
		_stprintf(error_buf, TEXT("Unable to handle video stream format in %s"), source_filename);
		MessageBox(error_buf);
		goto cleanup;
	}
 
    srcvidstreamsize = sizeof(BITMAPINFOHEADER); 
    if (AVIStreamReadFormat(pSrcVidStream, 0, &srcBIH, &srcvidstreamsize) != AVIERR_OK)
	{
		_stprintf(error_buf, TEXT("Error reading stream format in %s"), source_filename);
		MessageBox(error_buf);
		goto cleanup;
	}
    if (srcBIH.biCompression != MKFOURCC('M','J','P','G'))
	{
		_stprintf(error_buf, TEXT("%s is not motion JPEG format"), source_filename);
		MessageBox(error_buf);
		goto cleanup;
	}
 
	AVISTREAMINFO vidstream_info;
    if (AVIStreamInfo(pSrcVidStream, &vidstream_info, sizeof(AVISTREAMINFO)) != AVIERR_OK)
	{
		_stprintf(error_buf, TEXT("Error reading stream info in %s"), source_filename);
		MessageBox(error_buf);
		goto cleanup;
	}

    int firstVidSrcFrame = AVIStreamStart(pSrcVidStream);
    if (firstVidSrcFrame == -1)
	{
		_stprintf(error_buf, TEXT("Video stream start error in %s"), source_filename);
		MessageBox(error_buf);
		goto cleanup;
	}
    int numVidSrcFrames = AVIStreamLength(pSrcVidStream);
    if (numVidSrcFrames == -1)
	{
		_stprintf(error_buf, TEXT("Video stream length error in %s"), source_filename);
		MessageBox(error_buf);
		goto cleanup;
	}

    AVIFileGetStream(source_avi, &pSrcAudioStream, streamtypeAUDIO, 0);
	int firstAudioSrcFrame = 0;
	int numAudioSrcFrames = 0;
	if (pSrcAudioStream)
	{
		firstAudioSrcFrame = AVIStreamStart(pSrcAudioStream);
		if (firstAudioSrcFrame == -1)
		{
			_stprintf(error_buf, TEXT("Audio stream start error in %s"), source_filename);
			MessageBox(error_buf);
			goto cleanup;
		}
		numAudioSrcFrames = AVIStreamLength(pSrcAudioStream);
		if (numAudioSrcFrames == -1)
		{
			_stprintf(error_buf, TEXT("Audio stream length error in %s"), source_filename);
			MessageBox(error_buf);
			goto cleanup;
		}
	}

	// dest setup

	BITMAPINFOHEADER destBIH;
	destBIH = srcBIH;
	if (rot != CW_180)
	{
		destBIH.biWidth = srcBIH.biHeight;
		destBIH.biHeight = srcBIH.biWidth;
	}

    if (AVIFileOpen(&dest_avi, dest_filename, OF_CREATE | OF_WRITE, NULL) != AVIERR_OK)
	{
		_stprintf(error_buf, TEXT("Couldn't open file %s"), dest_filename);
		MessageBox(error_buf);
		goto cleanup;
	}
	vidstream_info.rcFrame.left = vidstream_info.rcFrame.top = 0;
	vidstream_info.rcFrame.right = destBIH.biWidth;
	vidstream_info.rcFrame.bottom = destBIH.biHeight;
 
    if (AVIFileCreateStream(dest_avi, &pDestVidStream, &vidstream_info) != AVIERR_OK)
	{
		_stprintf(error_buf, TEXT("Error creating video stream in %s"), dest_filename);
		MessageBox(error_buf);
		goto cleanup;
	}
 
    if (AVIStreamSetFormat(pDestVidStream, 0, &destBIH, sizeof(BITMAPINFOHEADER)) != AVIERR_OK)
	{ 
		_stprintf(error_buf, TEXT("Error setting video stream format in %s"), dest_filename);
		MessageBox(error_buf);
		goto cleanup;
    } 

    if (AVIStreamSetFormat(pDestVidStream, 0, &destBIH, sizeof(BITMAPINFOHEADER)) != AVIERR_OK)
	{ 
		_stprintf(error_buf, TEXT("Error setting video stream format in %s"), dest_filename);
		MessageBox(error_buf);
		goto cleanup;
    } 

	// video memory
	int img_rgb_size = srcBIH.biHeight * srcBIH.biWidth * 3;
	pSrcBuffer = new char[img_rgb_size];
	pJPGBuffer = new char[img_rgb_size];
	pDecompBuffer = new char[img_rgb_size];
	pRotateBuffer = new char[img_rgb_size];
	pDestBuffer = new char[img_rgb_size];

	long bytes_read;
	long bytes_written;

	for (int i = firstVidSrcFrame; i < numVidSrcFrames; ++i)
	{
		if (AVIStreamRead(pSrcVidStream, i, 1, pSrcBuffer, img_rgb_size, &bytes_read, 0) != AVIERR_OK)
		{
			_stprintf(error_buf, TEXT("Error reading video stream from %s"), source_filename);
			MessageBox(error_buf);
			goto cleanup;
		}

		// well-form the jpg
		int jpglen = ConstructWellFormedJPEG(pSrcBuffer, pJPGBuffer, bytes_read);
		// decompress
		JPEGHandler jpgh_decomp(pJPGBuffer, jpglen);
		jpgh_decomp.DecompressToRGB(pDecompBuffer, img_rgb_size);
		// rotate
		int destx, desty;
		char *pRotSrc;
		char *pRotDest;
		switch (rot)
		{
		case CW_90:
			for (int srcy = 0; srcy < srcBIH.biHeight; ++srcy)
			{
				for (int srcx = 0; srcx < srcBIH.biWidth; ++srcx)
				{
					destx = srcBIH.biHeight-1-srcy;
					desty = srcx;
					pRotSrc = &pDecompBuffer[(srcy * srcBIH.biWidth + srcx) * 3];
					pRotDest = &pRotateBuffer[(desty * srcBIH.biHeight + destx) * 3];

					*pRotDest++ = *pRotSrc++;
					*pRotDest++ = *pRotSrc++;
					*pRotDest++ = *pRotSrc++;
				}
			}
			break;

		case CW_180:
			for (int srcy = 0; srcy < srcBIH.biHeight; ++srcy)
			{
				for (int srcx = 0; srcx < srcBIH.biWidth; ++srcx)
				{
					destx = srcBIH.biWidth-1-srcx;
					desty = srcBIH.biHeight-1-srcy;
					pRotSrc = &pDecompBuffer[(srcy * srcBIH.biWidth + srcx) * 3];
					pRotDest = &pRotateBuffer[(desty * srcBIH.biWidth + destx) * 3];

					*pRotDest++ = *pRotSrc++;
					*pRotDest++ = *pRotSrc++;
					*pRotDest++ = *pRotSrc++;
				}
			}
			break;

		case ACW_90:
			for (int srcy = 0; srcy < srcBIH.biHeight; ++srcy)
			{
				for (int srcx = 0; srcx < srcBIH.biWidth; ++srcx)
				{
					destx = srcy;
					desty = srcBIH.biWidth-1-srcx;
					pRotSrc = &pDecompBuffer[(srcy * srcBIH.biWidth + srcx) * 3];
					pRotDest = &pRotateBuffer[(desty * srcBIH.biHeight + destx) * 3];

					*pRotDest++ = *pRotSrc++;
					*pRotDest++ = *pRotSrc++;
					*pRotDest++ = *pRotSrc++;
				}
			}
			break;
		}
		// compress
		JPEGHandler jpgh_comp(pRotateBuffer, img_rgb_size);
		if (rot != CW_180)
			destBIH.biSizeImage = jpgh_comp.CompressFromRGB(pDestBuffer, img_rgb_size, srcBIH.biHeight, srcBIH.biWidth);
		else
			destBIH.biSizeImage = jpgh_comp.CompressFromRGB(pDestBuffer, img_rgb_size, srcBIH.biWidth, srcBIH.biHeight);

		if (AVIStreamWrite(pDestVidStream, i, 1, pDestBuffer, destBIH.biSizeImage, AVIIF_KEYFRAME, NULL, &bytes_written) != AVIERR_OK)
		{
			_stprintf(error_buf, TEXT("Error writing video stream to %s"), dest_filename);
			MessageBox(error_buf);
			goto cleanup;
		}
	} 
 
cleanup:
	delete[] pSrcBuffer;
	delete[] pDestBuffer;
	delete[] pJPGBuffer;
	delete[] pDecompBuffer;
	delete[] pRotateBuffer;
	if (pDestAudioStream) AVIStreamRelease(pDestAudioStream);
	if (pDestVidStream) AVIStreamRelease(pDestVidStream);
	if (pSrcAudioStream) AVIStreamRelease(pSrcAudioStream);
	if (pSrcVidStream) AVIStreamRelease(pSrcVidStream);
	if (dest_avi) AVIFileRelease(dest_avi);
	if (source_avi) AVIFileRelease(source_avi);

	AVIFileExit();
}
void CAviHelper::AVItoBmp(const wstring& strAVIFileName, const wstring& strBmpDir)
{
	AVIFileInit();
	PAVIFILE avi;
	int res = AVIFileOpen(&avi, WS2S(strAVIFileName).c_str(), OF_READ, NULL);
	int n = GetLastError();
	if (res!=AVIERR_OK)
	{
		//an error occures
		if (avi!=NULL)
			AVIFileRelease(avi);
		return ;
	}
	
	AVIFILEINFO avi_info;
	AVIFileInfo(avi, &avi_info, sizeof(AVIFILEINFO));
	PAVISTREAM pStream;
	res=AVIFileGetStream(avi, &pStream, streamtypeVIDEO /*video stream*/, 0 /*first stream*/);
	if (res!=AVIERR_OK)
	{
		if (pStream!=NULL)
			AVIStreamRelease(pStream);
		AVIFileExit();
		return ;
	}
	
	//do some task with the stream
	int iNumFrames;
	int iFirstFrame;
	iFirstFrame = AVIStreamStart(pStream);
	if (iFirstFrame==-1)
	{
		//Error getteing the frame inside the stream
		if (pStream!=NULL)
			AVIStreamRelease(pStream);
		AVIFileExit();
		return ;
	}
	
	iNumFrames = AVIStreamLength(pStream);
	if (iNumFrames==-1)
	{
		//Error getteing the number of frames inside the stream
		if (pStream!=NULL)
			AVIStreamRelease(pStream);
		AVIFileExit();
		return ;
	}
	
	//getting bitmap from frame
	BITMAPINFOHEADER bih;
	ZeroMemory(&bih, sizeof(BITMAPINFOHEADER));
	bih.biBitCount=24; //24 bit per pixel
	bih.biClrImportant=0;
	bih.biClrUsed = 0;
	bih.biCompression = BI_RGB;
	bih.biPlanes = 1;
	bih.biSize = 40;
	bih.biXPelsPerMeter = 0;
	bih.biYPelsPerMeter = 0;
	
	//calculate total size of RGBQUAD scanlines (DWORD aligned)
	bih.biSizeImage = (((bih.biWidth * 3) + 3) & 0xFFFC) * bih.biHeight ;
	PGETFRAME pFrame;
	pFrame=AVIStreamGetFrameOpen(pStream, NULL );
	AVISTREAMINFO streaminfo;
	AVIStreamInfo(pStream,&streaminfo,sizeof(AVISTREAMINFO));
	
	//Get the first frame
	BITMAPINFOHEADER bih2;
	long lsize = sizeof(bih2);
	int index= 0;
	for (int i = iFirstFrame; i < iNumFrames; i++)
	{
		index= i-iFirstFrame;
		BYTE* pDIB = (BYTE*) AVIStreamGetFrame(pFrame, index); //
		AVIStreamReadFormat(pStream,index,&bih2,&lsize);
		BITMAPFILEHEADER stFileHdr;
		BYTE* Bits=new BYTE[bih2.biSizeImage];
		AVIStreamRead(pStream,index,1,Bits,bih2.biSizeImage,NULL,NULL);
		//RtlMoveMemory(Bits, pDIB + sizeof(BITMAPINFOHEADER), bih2.biSizeImage);
		bih2.biClrUsed =0;
		stFileHdr.bfOffBits=sizeof(BITMAPFILEHEADER)+sizeof(BITMAPINFOHEADER);
		stFileHdr.bfSize=sizeof(BITMAPFILEHEADER);
		stFileHdr.bfType=0x4d42; 
		CString FileName;
		FileName.Format(_T("Frame-%05d.bmp"), index);
		CString strtemp;
		strtemp.Format(_T("%s\\%s"), strBmpDir.c_str(), FileName);
		FILE* fp=_tfopen(strtemp ,_T("wb"));
		fwrite(&stFileHdr,1,sizeof(BITMAPFILEHEADER),fp);
		fwrite(&bih2,1,sizeof(BITMAPINFOHEADER),fp);
		int ff = fwrite(Bits,1,bih2.biSizeImage,fp);
		int e = GetLastError();
		fclose(fp);
		/////
		delete Bits;
		//CreateFromPackedDIBPointer(pDIB, index);
	}
	
	AVIStreamGetFrameClose(pFrame);
	//close the stream after finishing the task
	if (pStream!=NULL)
		AVIStreamRelease(pStream);
	AVIFileExit();
}
Beispiel #17
0
// AVI_stream_open() will open the AVI file and prepare it for reading, but will not 
// store any of the frame data. 
//
//	returns:   0 ==> success
//           !0 ==> could not open the AVI stream
//
// The filename is expected to be an absolute pathname (or file in the current working directory)
//
int AVI_stream_open(char* filename)
{
	if ( !AVI_stream_inited )
		AVI_stream_init();

	int				hr; 
	PAVIFILE			pfile; 
	PAVISTREAM		pstream;
	AVISTREAMINFO	avi_stream_info;

	Assert( !(AVI_stream.flags & AVI_STREAM_F_USED) );

	// Open the AVI file
	hr = AVIFileOpen(&pfile, filename, OF_SHARE_DENY_WRITE, 0); 
	if (hr != 0){ 
//		nprintf(("Warning", "AVI ==> Unable to open %s", filename)); 
		return -1; 
	} 
 
	strcpy(AVI_stream.filename, filename);

	// Get a handle to the video stream within the AVI file	
	hr = AVIFileGetStream(pfile, &pstream, streamtypeVIDEO, 0); 
	if (hr != 0){ 
		//nprintf(("Warning", "AVI ==> Unable to open video stream in %s", filename)); 
		return -1; 
	} 

	// Store the pointer to stream, since we'll need it later to read from disk
	AVI_stream.pstream = pstream;
	AVI_stream.pfile = pfile;

	// Get information on the stream
	hr = AVIStreamInfo( pstream, &avi_stream_info, sizeof(AVISTREAMINFO) );
	if (hr != 0){ 
		//nprintf(("Warning", "AVI ==> Unable to retreive stream info in %s", filename)); 
		return -1; 
	} 


	int buffer_size;
	
	int start_sample = AVIStreamStart(pstream);
	Assert( start_sample == 0 );

	int end_sample = AVIStreamEnd(pstream);
	Assert( end_sample >= start_sample );

	// store the number of frames in the AVI_info[] structure
	AVI_stream.num_frames = end_sample;		// start sample must be 0
	Assert(AVI_stream.num_frames == AVIStreamLength(pstream) );


	// Get information on the stream
	hr = AVIStreamInfo( pstream, &avi_stream_info, sizeof(AVISTREAMINFO) );
	if (hr != 0){ 
		//nprintf(("Warning", "AVI ==> Unable to retreive stream info in %s", filename)); 
		return -1; 
	} 

	buffer_size = avi_stream_info.dwSuggestedBufferSize;
	Assert( buffer_size > 0 );
	AVI_stream.min_compressed_buffer_size = buffer_size;

	// determine the format of the AVI image data
	ubyte* format_buffer;
	long format_buffer_size;
	BITMAPINFO* bitmap_info;

	hr = AVIStreamFormatSize(pstream, 0, &format_buffer_size);
	Assert( format_buffer_size > 0 );

	format_buffer = (ubyte*) malloc(format_buffer_size);
	Assert(format_buffer != NULL);	// format_buffer is free'ed when AVI is free'ed, since memory is used by b_info member in AVI_info[] structure

	hr = AVIStreamReadFormat(pstream, 0, format_buffer, &format_buffer_size);
	bitmap_info = (BITMAPINFO*)format_buffer;


	switch ( bitmap_info->bmiHeader.biCompression ) {
		case BI_RLE8:
			break;

		default:
			Assert(0);
			break;
	}

	AVI_stream.w = bitmap_info->bmiHeader.biWidth;
	AVI_stream.h = bitmap_info->bmiHeader.biHeight;
	AVI_stream.bpp = bitmap_info->bmiHeader.biBitCount;
		
	// create the palette translation look-up table
	//
	// Transparency:  If the palette color is full green, then treat as transparent
	//						
	RGBQUAD* pal;
	pal = (RGBQUAD*)(bitmap_info->bmiColors);

	// Store the palette in the AVI stream structure
	for ( int i = 0; i < 256; i++ ) {
		AVI_stream.palette[i*3]	  = pal[i].rgbRed;
		AVI_stream.palette[i*3+1] = pal[i].rgbGreen;
		AVI_stream.palette[i*3+2] = pal[i].rgbBlue;
	}	


	//	memcpy(AVI_stream.palette, pal, 256*3);
	
/*
	int transparent_found = 0;
	for ( i = 0; i < 256; i++ ) {

		//nprintf(("AVI", "AVI ==> R: %d  G: %d  B: %d\n", pal[i].rgbRed, pal[i].rgbGreen, pal[i].rgbBlue));
		if ( pal[i].rgbRed < 5 && pal[i].rgbGreen > 250 && pal[i].rgbBlue < 5 ) {
			avi_stream->pal_translation[i]	= TRANSPARENT_INDEX;
			break;	// found transparent, continue in j for loop, since don't need check any more
		}
		else
			avi_stream->pal_translation[i] = palette_find(	pal[i].rgbRed, pal[i].rgbGreen, pal[i].rgbBlue ); 
	}	

	for ( j = i+1; j < 256; j++ ) {
		avi_stream->pal_translation[j] = palette_find(	pal[j].rgbRed, pal[j].rgbGreen, pal[j].rgbBlue ); 
	}
*/

	free(format_buffer);

	// set the flag to used, so to make sure we only process one AVI stream at a time
	AVI_stream.flags |= AVI_STREAM_F_USED;	


	return 0;
}
Beispiel #18
0
// Play audio, starting at a given frame/sample
BOOL CALLBACK aviaudioPlay(HWND hwnd, PAVISTREAM pavi, LONG lStart, LONG lEnd, BOOL fWait)
{
	if (audioPlayable <= 0)
		return FALSE;

	//CString tx;
	//tx.Format("audioPlayable %d",audioPlayable);
	//MessageBox(NULL,tx,"Note",MB_OK);

	recalc = 1;

//	CString msx;
	if (lStart < 0)
		lStart = AVIStreamStart(pavi);

    if (lEnd < 0)
		lEnd = AVIStreamEnd(pavi);

    if (lEnd <= lStart) {
		return FALSE;
	}

    if (!aviaudioOpenDevice(hwnd, pavi)) {
		if ((runmode == 0) || (runmode == 1)) {
			//MessageBox(NULL,"AudioOpen failed","Note",MB_OK | MB_ICONEXCLAMATION);
			 MessageOut(NULL,IDS_AOF, IDS_NOTE, MB_OK | MB_ICONEXCLAMATION);
		}

		return FALSE;
	}

    if (!sfPlaying) {
		// We're beginning play, so pause until we've filled the buffers
		// for a seamless start
		waveOutPause(shWaveOut);

		slBegin = lStart;
		slCurrent = lStart;
		slEnd = lEnd;
		sfPlaying = TRUE;
    } else {
		slEnd = lEnd;
    }

	//ErrMsg("playbegin");

	//Error here -- program crash when play at non start time after steam is pasted
    aviaudioiFillBuffers();

	//ErrMsg("play");

    // Now unpause the audio and away it goes!
    waveOutRestart(shWaveOut);

    // Caller wants us not to return until play is finished
    if (fWait) {
		while (swBuffersOut > 0)
			Yield();
    }

    return TRUE;
}
Beispiel #19
0
HRESULT CAviBitmap::Init()
{
	HRESULT hr = E_FAIL;
	
	do 
	{
		//Open file
		hr = AVIFileOpen(&m_pAviFile, m_szFileName, OF_READ, NULL);
		if(hr != S_OK)
		{
			m_szLastErrorMsg.Format(_T("Unable to Open the Movie File"));
			break;
		}

		//Get video stream
		hr = AVIFileGetStream(m_pAviFile, &m_pAviStream, streamtypeVIDEO /*video stream*/, 0 /*first stream*/);
		if(hr != S_OK)
		{
			m_szLastErrorMsg.Format(_T("Unable to Get the video stream"));
			break;
		}

		hr = AVIStreamInfo(m_pAviStream, &m_aviInfo, sizeof(AVISTREAMINFO));
		if(hr != S_OK)
		{
			m_szLastErrorMsg.Format(_T("Unable to Get the video stream info"));
			break;
		}
		CString szFourCC;
		FourCC2Str(m_aviInfo.fccHandler, szFourCC);
		AfxTrace(_T("fccHandler=%s, 0x%08X\n"), szFourCC, m_aviInfo.fccHandler);

		ZeroMemory(&m_biWanted, sizeof(m_biWanted));
		LONG lFormat = sizeof(m_biWanted);

		hr = AVIStreamReadFormat(m_pAviStream, 0, &m_biWanted, &lFormat);
		if(hr != S_OK)
		{
			m_szLastErrorMsg.Format(_T("Unable to Get the foramt of the 1st frame"));
			break;
		}
		m_biWanted.biCompression = BI_RGB;
		m_biWanted.biBitCount = 32;
		m_biWanted.biSizeImage = m_biWanted.biWidth * 4 * m_biWanted.biHeight;

		//Set the result to Fail
		hr = E_FAIL;

		//Get the GETFRAME handle
		m_pGetFrame = AVIStreamGetFrameOpen(m_pAviStream, &m_biWanted);
		if(m_pGetFrame == NULL)
		{
			m_szLastErrorMsg.Format(_T("Unable to Get the GETFRAME handle"));
			break;
		}

		//Get the 1st sample
		m_lFirstSample = AVIStreamStart(m_pAviStream);
		if(m_lFirstSample == -1)
		{
			m_szLastErrorMsg.Format(_T("Unable to Get the first sample"));
			break;
		}

		//Get the total sample count
		m_lSampleCount = AVIStreamLength(m_pAviStream);
		if(m_lSampleCount == -1)
		{
			m_szLastErrorMsg.Format(_T("Unable to Get the sample count"));
			break;
		}

		//Done
		hr = S_OK;

	} while (FALSE);
	
	if(hr != S_OK)
	{
		ReleaseMemory();
	}

	return hr;
}
Beispiel #20
0
bool CMusic::readData(ALuint buffer, ALsizei nbr_samples)
{
    ALenum error = alGetError();

    // Lecture des échantillons audio au format entier 16 bits signé
    std::vector<ALshort> samples(nbr_samples);

    // Lecture depuis un fichier
    if (m_file)
    {
        // On remplit le premier buffer
        if (sf_read_short(m_file, &samples[0], nbr_samples) == 0)
        {
            // Fin du fichier, on revient au début
            if (m_loop)
            {
                sf_seek(m_file, 0, SEEK_SET);
#ifdef T_SYSTEM_WINDOWS
                m_sampleCount = 0;
#endif
            }
            else
            {
                m_play = false;
            }

            return true;
        }

        // On le remplit avec les données du fichier
        alBufferData(buffer, m_format, &samples[0], nbr_samples * sizeof(ALushort), m_sampleRate);

        // Traitement des erreurs
        if ((error = alGetError()) != AL_NO_ERROR)
        {
            CSoundEngine::displayOpenALError(error, "alBufferData", __LINE__);
            return false;
        }

        return true;
    }

#ifdef T_SYSTEM_WINDOWS
    // Lecture depuis une vidéo
    else if (m_fromVideo && m_aviStream)
    {
        LONG samples_read;

        switch (AVIStreamRead(m_aviStream, AVIStreamStart(m_aviStream) + m_sampleCount, nbr_samples, &samples[0], nbr_samples * sizeof(ALshort), nullptr, &samples_read))
        {
            // Aucune erreur
            case 0: break;

            case AVIERR_BUFFERTOOSMALL:
                CApplication::getApp()->log("AVIStreamRead : The buffer size was smaller than a single sample of data.", ILogger::Error);
                return false;

            case AVIERR_MEMORY:
                CApplication::getApp()->log("AVIStreamRead : There was not enough memory to complete the read operation.", ILogger::Error);
                return false;

            case AVIERR_FILEREAD:
                CApplication::getApp()->log("AVIStreamRead : A disk error occurred while reading the file.", ILogger::Error);
                return false;

            default:
                CApplication::getApp()->log("AVIStreamRead : Unknown error", ILogger::Error);
                return false;
        }

        m_sampleCount += samples_read / 2; // On divise par la taille d'un échantillon

        // Le nombre d'échantillons lus est inférieur au nombre demandé : on revient au début
        //TODO: vérifier pour chaque format possible
        if ((m_format == AL_FORMAT_MONO16   && samples_read < nbr_samples) ||
            (m_format == AL_FORMAT_STEREO16 && samples_read * 2 < nbr_samples))
        {
            if (m_loop)
            {
                m_sampleCount = 0;
            }
            else
            {
                m_play = false;
            }
        }

        // On le remplit avec les données du fichier
        alBufferData(buffer, m_format, &samples[0], samples_read * sizeof(ALushort), m_sampleRate);

        // Traitement des erreurs
        if ((error = alGetError()) != AL_NO_ERROR)
        {
            CSoundEngine::displayOpenALError(error, "alBufferData", __LINE__);
            return false;
        }

        return true;
    }
#endif

    return false;
}
VDPosition AVIReadTunnelStream::Start() {
    return AVIStreamStart(pas);
}
Beispiel #22
0
static void test_default_data(void)
{
    COMMON_AVI_HEADERS cah;
    char filename[MAX_PATH];
    PAVIFILE pFile;
    int res;
    LONG lSize;
    PAVISTREAM pStream0;
    PAVISTREAM pStream1;
    AVISTREAMINFOA asi0, asi1;
    WAVEFORMATEX wfx;

    GetTempPathA(MAX_PATH, filename);
    strcpy(filename+strlen(filename), testfilename);

    init_test_struct(&cah);
    create_avi_file(&cah, filename);

    res = AVIFileOpenA(&pFile, filename, OF_SHARE_DENY_WRITE, 0L);
    ok(res != AVIERR_BADFORMAT, "Unable to open file: error1=%u\n", AVIERR_BADFORMAT);
    ok(res != AVIERR_MEMORY, "Unable to open file: error2=%u\n", AVIERR_MEMORY);
    ok(res != AVIERR_FILEREAD, "Unable to open file: error3=%u\n", AVIERR_FILEREAD);
    ok(res != AVIERR_FILEOPEN, "Unable to open file: error4=%u\n", AVIERR_FILEOPEN);
    ok(res != REGDB_E_CLASSNOTREG, "Unable to open file: error5=%u\n", REGDB_E_CLASSNOTREG);
    ok(res == 0, "Unable to open file: error=%u\n", res);

    res = AVIFileGetStream(pFile, &pStream0, 0, 0);
    ok(res == 0, "Unable to open video stream: error=%u\n", res);

    res = AVIFileGetStream(pFile, &pStream1, 0, 1);
    ok(res == 0, "Unable to open audio stream: error=%u\n", res);

    res = AVIStreamInfoA(pStream0, &asi0, sizeof(asi0));
    ok(res == 0, "Unable to read stream info: error=%u\n", res);

    res = AVIStreamInfoA(pStream1, &asi1, sizeof(asi1));
    ok(res == 0, "Unable to read stream info: error=%u\n", res);

    res = AVIStreamReadFormat(pStream0, AVIStreamStart(pStream1), NULL, &lSize);
    ok(res == 0, "Unable to read format size: error=%u\n", res);

    res = AVIStreamReadFormat(pStream1, AVIStreamStart(pStream1), &wfx, &lSize);
    ok(res == 0, "Unable to read format: error=%u\n", res);

    ok(asi0.fccType == streamtypeVIDEO, "got 0x%x (expected streamtypeVIDEO)\n", asi0.fccType);
    ok(asi0.fccHandler == 0x30323449, "got 0x%x (expected 0x30323449)\n", asi0.fccHandler);
    ok(asi0.dwFlags == 0, "got %u (expected 0)\n", asi0.dwFlags);
    ok(asi0.wPriority == 0, "got %u (expected 0)\n", asi0.wPriority);
    ok(asi0.wLanguage == 0, "got %u (expected 0)\n", asi0.wLanguage);
    ok(asi0.dwScale == 1001, "got %u (expected 1001)\n", asi0.dwScale);
    ok(asi0.dwRate == 30000, "got %u (expected 30000)\n", asi0.dwRate);
    ok(asi0.dwStart == 0, "got %u (expected 0)\n", asi0.dwStart);
    ok(asi0.dwLength == 1, "got %u (expected 1)\n", asi0.dwLength);
    ok(asi0.dwInitialFrames == 0, "got %u (expected 0)\n", asi0.dwInitialFrames);
    ok(asi0.dwSuggestedBufferSize == 0, "got %u (expected 0)\n", asi0.dwSuggestedBufferSize);
    ok(asi0.dwQuality == 0xffffffff, "got 0x%x (expected 0xffffffff)\n", asi0.dwQuality);
    ok(asi0.dwSampleSize == 0, "got %u (expected 0)\n", asi0.dwSampleSize);
    ok(asi0.rcFrame.left == 0, "got %u (expected 0)\n", asi0.rcFrame.left);
    ok(asi0.rcFrame.top == 0, "got %u (expected 0)\n", asi0.rcFrame.top);
    ok(asi0.rcFrame.right == 8, "got %u (expected 8)\n", asi0.rcFrame.right);  /* these are based on the values in the mah and not */
    ok(asi0.rcFrame.bottom == 6, "got %u (expected 6)\n", asi0.rcFrame.bottom);/* on the ones in the ash which are 0 here */
    ok(asi0.dwEditCount == 0, "got %u (expected 0)\n", asi0.dwEditCount);
    ok(asi0.dwFormatChangeCount == 0, "got %u (expected 0)\n", asi0.dwFormatChangeCount);

    ok(asi1.fccType == streamtypeAUDIO, "got 0x%x (expected streamtypeVIDEO)\n", asi1.fccType);
    ok(asi1.fccHandler == 0x1, "got 0x%x (expected 0x1)\n", asi1.fccHandler);
    ok(asi1.dwFlags == 0, "got %u (expected 0)\n", asi1.dwFlags);
    ok(asi1.wPriority == 0, "got %u (expected 0)\n", asi1.wPriority);
    ok(asi1.wLanguage == 0, "got %u (expected 0)\n", asi1.wLanguage);
    ok(asi1.dwScale == 1, "got %u (expected 1)\n", asi1.dwScale);
    ok(asi1.dwRate == 11025, "got %u (expected 11025)\n", asi1.dwRate);
    ok(asi1.dwStart == 0, "got %u (expected 0)\n", asi1.dwStart);
    ok(asi1.dwLength == 1637, "got %u (expected 1637)\n", asi1.dwLength);
    ok(asi1.dwInitialFrames == 0, "got %u (expected 0)\n", asi1.dwInitialFrames);
    ok(asi1.dwSuggestedBufferSize == 0, "got %u (expected 0)\n", asi1.dwSuggestedBufferSize);
    ok(asi1.dwQuality == 0xffffffff, "got 0x%x (expected 0xffffffff)\n", asi1.dwQuality);
    ok(asi1.dwSampleSize == 2, "got %u (expected 2)\n", asi1.dwSampleSize);
    ok(asi1.rcFrame.left == 0, "got %u (expected 0)\n", asi1.rcFrame.left);
    ok(asi1.rcFrame.top == 0, "got %u (expected 0)\n", asi1.rcFrame.top);
    ok(asi1.rcFrame.right == 0, "got %u (expected 0)\n", asi1.rcFrame.right);
    ok(asi1.rcFrame.bottom == 0, "got %u (expected 0)\n", asi1.rcFrame.bottom);
    ok(asi1.dwEditCount == 0, "got %u (expected 0)\n", asi1.dwEditCount);
    ok(asi1.dwFormatChangeCount == 0, "got %u (expected 0)\n", asi1.dwFormatChangeCount);

    ok(wfx.wFormatTag == 1, "got %u (expected 1)\n",wfx.wFormatTag);
    ok(wfx.nChannels == 2, "got %u (expected 2)\n",wfx.nChannels);
    ok(wfx.wFormatTag == 1, "got %u (expected 1)\n",wfx.wFormatTag);
    ok(wfx.nSamplesPerSec == 11025, "got %u (expected 11025)\n",wfx.nSamplesPerSec);
    ok(wfx.nAvgBytesPerSec == 22050, "got %u (expected 22050)\n",wfx.nAvgBytesPerSec);
    ok(wfx.nBlockAlign == 2, "got %u (expected 2)\n",wfx.nBlockAlign);

    AVIStreamRelease(pStream0);
    AVIStreamRelease(pStream1);
    AVIFileRelease(pFile);
    ok(DeleteFileA(filename) !=0, "Deleting file %s failed\n", filename);
}
Beispiel #23
0
bool VideoComponent::openAvi(const std::string& filename)
{
	// Stop any currently loaded avi
	closeAvi();

	AVIFileInit();							// Opens The AVIFile Library
	// Opens The AVI Stream
	if (AVIStreamOpenFromFile(&m_pavi, filename.c_str(), streamtypeVIDEO, 0, OF_READ, NULL) !=0)
	{
		GameLog::errorMessage("Error opening avi: %s", filename.c_str());
		// An Error Occurred Opening The Stream
		AVIFileExit();								// Release The File
		return false;
	}
	AVIStreamInfo(m_pavi, &m_psi, sizeof(m_psi));						// Reads Information About The Stream Into psi
	m_width = m_psi.rcFrame.right-m_psi.rcFrame.left;					// Width Is Right Side Of Frame Minus Left
	m_height = m_psi.rcFrame.bottom-m_psi.rcFrame.top;					// Height Is Bottom Of Frame Minus Top
	if (!m_resize)
	{
		// Size should be kept
		m_resizeWidth = m_width;
		m_resizeHeight = m_height;
	}
	m_lastframe = AVIStreamLength(m_pavi);								// The Last Frame Of The Stream
	m_timePerFrame = ((float)AVIStreamSampleToTime(m_pavi, m_lastframe) / (float) m_lastframe) / 1000.0f;	// Calculate Rough Seconds Per Frame
	m_bmih.biSize		= sizeof (BITMAPINFOHEADER);					// Size Of The BitmapInfoHeader
	m_bmih.biPlanes		= 1;					// Bitplanes
	m_bmih.biBitCount	= 24;					// Bits Format We Want 24 / 8  = 3 bytes
	m_bmih.biWidth		= m_resizeWidth;		// Width We Want
	m_bmih.biHeight		= m_resizeHeight;		// Height We Want
	m_bmih.biCompression= BI_RGB;				// Requested Mode = RGB
	m_hBitmap = CreateDIBSection (m_hdc, (BITMAPINFO*)(&m_bmih), DIB_RGB_COLORS, (void**)(&m_data), NULL, NULL);	
	SelectObject (m_hdc, m_hBitmap);					// Select hBitmap Into Our Device Context (hdc)
	// Bitmapinfo header for decoding (needed for xvid)
	m_bmiavih.biSize = sizeof(BITMAPINFOHEADER);
	m_bmiavih.biPlanes			= 1;					// Bitplanes
	m_bmiavih.biBitCount		= 24;					// Bits Format We Want 24 / 8  = 3 bytes
	m_bmiavih.biWidth			= m_width;				// Width We Want
	m_bmiavih.biHeight			= m_height;				// Height We Want
	m_bmiavih.biCompression		= BI_RGB;				// Requested Mode = RGB
	// And some more infos
	m_bmiavih.biClrImportant	= 0;
	m_bmiavih.biClrUsed			= 0;
	m_bmiavih.biXPelsPerMeter	= 0;
	m_bmiavih.biYPelsPerMeter	= 0;
	m_bmiavih.biSizeImage = (((m_bmiavih.biWidth * 3) + 3) & 0xFFFC) * m_bmiavih.biHeight;
	m_pgf=AVIStreamGetFrameOpen(m_pavi, &m_bmiavih);// Create The PGETFRAME Using Our Request Mode
	if (m_pgf==0x0)
	{
		GameLog::errorMessage("Error opening first frame of avi: %s", filename.c_str());
		// An Error Occurred Opening The Frame
		DeleteObject(m_hBitmap);					// Delete The Device Dependant Bitmap Object
		AVIStreamRelease(m_pavi);					// Release The Stream
		AVIFileExit();								// Release The File
		return false;
	}
	m_fileName = filename;

	// Create buffer for converted data
	// width*height = count pixel; each pixel has 4 channels for rgba with each one byte
	int dataSize = 4*m_resizeWidth*m_resizeHeight;
	m_bgraData = new unsigned char[dataSize];
	// Initialize with 255 (black screen with full alpha)
	memset(m_bgraData, 255, dataSize);

	// Prepare horde texture stream named like the video file name, to get a unique name
	m_videoTexture = h3dCreateTexture(filename.c_str(), m_resizeWidth, m_resizeHeight, H3DFormats::TEX_BGRA8, H3DResFlags::NoTexMipmaps);
	if (m_videoTexture == 0)
	{
		GameLog::errorMessage("Error creating texture for playing avi: %s", filename.c_str());
		// Failure creating the dynamic texture
		closeAvi();
		return false;
	}

	// Find the sampler index within the material
	m_samplerIndex = h3dFindResElem(m_material, H3DMatRes::SamplerElem, H3DMatRes::SampNameStr, "albedoMap");	
	if (m_samplerIndex == -1)
	{
		GameLog::errorMessage("Error preparing material with resID %d for playing avi: %s", m_material, filename.c_str());
		// No sampler found in material
		closeAvi();
		return false;
	}

	// Store old sampler
	m_originalSampler = h3dGetResParamI(m_material, H3DMatRes::SamplerElem, m_samplerIndex, H3DMatRes::SampTexResI);

	
	// Now open the audio stream
	PAVISTREAM audioStream;
	if (AVIStreamOpenFromFile(&audioStream, filename.c_str(), streamtypeAUDIO, 0, OF_READ, NULL) == 0)
	{
		// Audio stream found
		// Get format info
		PCMWAVEFORMAT audioFormat;
		long formatSize = sizeof(audioFormat);
		int start = AVIStreamStart(audioStream);
		// TODO get channelsmask and use it
		AVIStreamReadFormat(audioStream, start, &audioFormat, &formatSize);
		long numSamples = AVIStreamLength(audioStream);
		int bitsPerSample = (audioFormat.wf.nAvgBytesPerSec * 8) / (audioFormat.wf.nSamplesPerSec * audioFormat.wf.nChannels);
		/*if (audioFormat.wf.wFormatTag == WAVE_FORMAT_MPEGLAYER3)
		{
			// TODO
			MPEGLAYER3WAVEFORMAT mp3Format;
			formatSize = sizeof(mp3Format);
			AVIStreamReadFormat(audioStream, start, &mp3Format, &formatSize);
		}*/

		// Create buffer with appropriate size
		long bufferSize = (bitsPerSample * numSamples) / 8;
		char* buffer = new char[bufferSize];
		// Read the audio data
		long bytesWritten = 0;
		AVIStreamRead(audioStream, start, numSamples, buffer, bufferSize, &bytesWritten, 0x0);

		if (bytesWritten > 0)
		{
			// Send the audio data to the sound component
			SoundResourceData eventData(buffer, bytesWritten, audioFormat.wf.nSamplesPerSec, bitsPerSample, audioFormat.wf.nChannels);
			GameEvent event(GameEvent::E_SET_SOUND_WITH_USER_DATA, &eventData, this);
			m_owner->executeEvent(&event);
			m_hasAudio = true;
		}

		// Delete the buffer data
		delete[] buffer;
	}

	if (m_autoStart)
		// Play video directly
		playAvi();

	return true;
}
Beispiel #24
-1
// Return the time in milliseconds corresponding to the  currently playing audio sample, or -1 if no audio is playing.
LONG CALLBACK aviaudioTime(void)
{
	MMTIME      mmtime;

	if (audioPlayable <= 0)
		return -1;

	if (!sfPlaying)
		return -1;

	//not sure
	if (recalc) {
		streamEnd = AVIStreamEnd(spavi);
		streamStart = AVIStreamStart(spavi);
		recalc = 0;
		//ErrMsg("recalc");
	}

	if ((streamEnd<=streamStart) || (streamEnd<=0))
		return -1;

	mmtime.wType = TIME_SAMPLES;

	waveOutGetPosition(shWaveOut, &mmtime, sizeof(mmtime));

	if (mmtime.wType == TIME_SAMPLES)
		return AVIStreamSampleToTime(spavi, slBegin) + muldiv32(mmtime.u.sample, 1000, sdwSamplesPerSec);
	else if (mmtime.wType == TIME_BYTES)
		return AVIStreamSampleToTime(spavi, slBegin) + muldiv32(mmtime.u.cb, 1000, sdwBytesPerSec);
	else
		return -1;
}