Ejemplo n.º 1
0
  void *get_frame_data(int frame_num)
  {
    if (num_frames<=0)
      return NULL;

    if (frame_num<0)
      frame_num = (frame_num % num_frames) + num_frames;
    else
      if (frame_num>=num_frames)
        frame_num = frame_num % num_frames;

  	LPBITMAPINFOHEADER lpbi = NULL;
	
		lpbi = (LPBITMAPINFOHEADER)AVIStreamGetFrame(getFrame, frame_num);
									
  	if (lpbi)
	  {
      //frame pixel data is 40 bytes past the bitmapinfoheader
      void *bitmap = ((unsigned char *)lpbi) + 40; 

      return bitmap;
    }

    return NULL;
  }
Ejemplo n.º 2
0
	/////////////////////////////////////////////////////////
	//get frame - we not give frame number- get next frame
	/////////////////////////////////////////////////////////
	DWORD CAviMan::GetFrame(LPBYTE pImageData,DWORD dwNumFrame)
	{
		if(dwNumFrame==-1) dwNumFrame = (++m_dwCurrentFrame);				//if frame number not set 

		if(m_dwCurrentFrame>=m_dwTotalFrame) return 1;

		LPBYTE pAviFrame = (BYTE*)AVIStreamGetFrame(m_aviGetFrame,dwNumFrame);

		if((pAviFrame) && (pImageData))
		{
			DWORD dwWidth = m_bmpHeader.bmiHeader.biWidth;
			DWORD dwHeight = m_bmpHeader.bmiHeader.biHeight;

			switch(m_bmpHeader.bmiHeader.biBitCount)
			{
			case(24):dwWidth*=3;break;
			case(16):dwWidth*=2;break;			
			}

			LPBYTE	pFrom = (pAviFrame+sizeof( BITMAPINFO )) + m_dwImageSize - dwWidth-1;
			LPBYTE	pTo = pImageData;

			for(DWORD i=0;i<dwHeight;i++,pFrom-=dwWidth,pTo+=dwWidth)
				memcpy(pTo,pFrom,dwWidth);

//			memcpy(pImageData,(pAviFrame+sizeof( BITMAPINFO )),m_dwImageSize);

		}
		return 0;

	}
Ejemplo n.º 3
0
bool VideoComponent::grabAviFrame(int frame)
{
	if (m_pgf != 0x0 && frame >= 0 && frame < m_lastframe && m_hdd && m_hdc)
	{
		LPBITMAPINFOHEADER lpbi;					// Holds The Bitmap Header Information
		lpbi = (LPBITMAPINFOHEADER)AVIStreamGetFrame(m_pgf, frame);	// Grab Data From The AVI Stream
		// Convert Data To Requested Bitmap Format
		if (m_resize)
		{
			// Temporarly get the frame in m_pdata
			// (Skip The Header Info To Get To The Data)
			m_pdata=(unsigned char *)lpbi+lpbi->biSize+lpbi->biClrUsed * sizeof(RGBQUAD);	// Pointer To Data Returned By AVIStreamGetFrame
			if (!DrawDibDraw (m_hdd, m_hdc, 0, 0, m_resizeWidth, m_resizeHeight, lpbi, m_pdata, 0, 0, m_width, m_height, 0))
			{
				GameLog::errorMessage("Error resizing video to requested size: %d x %d", m_resizeWidth, m_resizeHeight);
				return false;
			}
		}
		else
		{
			// Directly get the frame in m_data
			// (Skip The Header Info To Get To The Data)
			m_data=(unsigned char *)lpbi+lpbi->biSize+lpbi->biClrUsed * sizeof(RGBQUAD);	// Pointer To Data Returned By AVIStreamGetFrame
		}
		convertFrameData(m_data, m_bgraData);							// convert to horde format
		return true;
	}
	return false;
}
Ejemplo n.º 4
0
void CExtAviLabel::OnAviPaintCurrentFrame( CDC & dc, const CRect & rcDrawDst, const CSize & sizeAvi )
{
	ASSERT_VALID( this );
	ASSERT( GetSafeHwnd() != NULL );
	ASSERT( dc.GetSafeHdc() != NULL );
	if( sizeAvi.cx <= 0 || sizeAvi.cy <= 0 )
		return;
LPBITMAPINFOHEADER pBIH = (LPBITMAPINFOHEADER)AVIStreamGetFrame(m_pGF, (LONG)m_nAviFrameCurrent);
//	ASSERT( pBIH != NULL );
	if( pBIH == NULL )
		return;
CExtMemoryDC dcProcessingSurface( &dc, rcDrawDst, CExtMemoryDC::MDCOPT_TO_MEMORY|CExtMemoryDC::MDCOPT_FORCE_DIB|CExtMemoryDC::MDCOPT_RTL_COMPATIBILITY );
	VERIFY(
		::DrawDibDraw(
			m_hDrawDib, dcProcessingSurface.GetSafeHdc(),
			rcDrawDst.left + m_ptAviOffset.x, rcDrawDst.top + m_ptAviOffset.y, sizeAvi.cx, sizeAvi.cy,
			pBIH, NULL, 0, 0, -1, -1, 0
			)
		);
COLORREF clrTransparentPixels = ( ( AviStyleGet() & __EXT_AVI_TRANSPARENT_VIDEO ) != 0 ) ? m_clrTransparentEffective : PmBridge_GetPM()->GetColor( COLOR_WINDOW );
	CExtPaintManager::stat_TransparentBlt(
		dc.m_hDC,
		rcDrawDst.left + m_ptAviOffset.x, rcDrawDst.top + m_ptAviOffset.y, sizeAvi.cx, sizeAvi.cy,
		dcProcessingSurface.GetSafeHdc(),
		rcDrawDst.left + m_ptAviOffset.x, rcDrawDst.top + m_ptAviOffset.y, sizeAvi.cx, sizeAvi.cy,
		clrTransparentPixels
		);
	dcProcessingSurface.__Flush( FALSE );
}
Ejemplo n.º 5
0
static LPVOID AVIFILE_ReadFrame(IAVIEditStreamImpl* const This,
                                PAVISTREAM pstream, LONG pos)
{
  PGETFRAME pg;

  TRACE("(%p,%p,%d)\n",This,pstream,pos);

  if (pstream == NULL)
    return NULL;

  /* if stream changes make sure that only palette changes */
  if (This->pCurStream != pstream) {
    pg = AVIStreamGetFrameOpen(pstream, NULL);
    if (pg == NULL)
      return NULL;
    if (This->pg != NULL) {
      if (IGetFrame_SetFormat(pg, This->lpFrame, NULL, 0, 0, -1, -1) != S_OK) {
        AVIStreamGetFrameClose(pg);
        ERR(": IGetFrame_SetFormat failed\n");
        return NULL;
      }
      AVIStreamGetFrameClose(This->pg);
    }
    This->pg         = pg;
    This->pCurStream = pstream;
  }

  /* now get the decompressed frame */
  This->lpFrame = AVIStreamGetFrame(This->pg, pos);
  if (This->lpFrame != NULL)
    This->sInfo.dwSuggestedBufferSize = This->lpFrame->biSizeImage;

  return This->lpFrame;
}
		DWORD WINAPI AviFrameGraber::_ThreadFunc (LPVOID lpParameter){
			AviFrameGraber* avi_frame_graber;
			avi_frame_graber = (AviFrameGraber*) lpParameter;
			woodychang0611::image::ImageRGB24	image ((UINT16)avi_frame_graber->avi_info_.dwWidth,(UINT16)avi_frame_graber->avi_info_.dwHeight);
			avi_frame_graber->image_pointer_ = &image;

			//the thread will end itself if status is FRAME_SUBJECT_STOP
			while(avi_frame_graber->GetStatus() !=FRAME_SUBJECT_STOP){
				unsigned char* dib_pointer;
				switch(avi_frame_graber->GetStatus()){ 
					case FRAME_SUBJECT_PLAY:
						dib_pointer = (unsigned char*) AVIStreamGetFrame(avi_frame_graber->frame_,avi_frame_graber->current_frame_);
						if (avi_frame_graber->GetStatus()==FRAME_SUBJECT_STOP) return 0;
						if (dib_pointer){
							//Copy the from source DIB to woodychang0611::ImageRGB24
							for (UINT16 j=0;j<image.GetImageHeight();++j){
								for (UINT16 i=0;i<image.GetImageWidth();++i){
									UINT32 pos = (image.GetImageHeight()-j-1)*
										image.GetBytePerLine()+i*3+40;
									image.GetPixel(i,j).b_ = dib_pointer[pos];
									image.GetPixel(i,j).g_ = dib_pointer[pos+1];
									image.GetPixel(i,j).r_ = dib_pointer[pos+2];
								}
							}
							// Update frame info
							avi_frame_graber->frame_info_.current_frame_ = avi_frame_graber->current_frame_;
							std::time_t frame_time = mktime(&avi_frame_graber->initial_time_)+(INT32)
								((avi_frame_graber->current_frame_-avi_frame_graber->start_frame_)/
								avi_frame_graber->frame_info_.frame_per_second_);
							tm tm_frame;
							localtime_s(&tm_frame,&frame_time);
							avi_frame_graber->frame_info_.frame_time_=tm_frame;
							avi_frame_graber->NotifyObserver();
						}
						avi_frame_graber->current_frame_++;
						//Check end
						if (avi_frame_graber->current_frame_ >=
							avi_frame_graber-> start_frame_+avi_frame_graber->frame_length_){
								if(avi_frame_graber->auto_replay_){
									avi_frame_graber->current_frame_=avi_frame_graber->start_frame_;
								}
								else{
									avi_frame_graber->current_frame_ = avi_frame_graber->start_frame_;
									avi_frame_graber->SetStatus(FRAME_SUBJECT_PAUSE);
								}
						}
						break;
					default:
						break;
				}//end of switch(avi_frame_graber->GetStatus())
			}//end of while(avi_frame_graber->GetStatus() !=FRAME_SUBJECT_STOP)
			return 0;
		}
Ejemplo n.º 7
0
static HRESULT WINAPI ICMStream_fnReadFormat(IAVIStream *iface, LONG pos,
					      LPVOID format, LONG *formatsize)
{
  IAVIStreamImpl *This = impl_from_IAVIStream(iface);

  LPBITMAPINFOHEADER lpbi;
  HRESULT            hr;

  TRACE("(%p,%d,%p,%p)\n", iface, pos, format, formatsize);

  if (formatsize == NULL)
    return AVIERR_BADPARAM;

  if (This->pg == NULL) {
    hr = AVIFILE_OpenGetFrame(This);

    if (FAILED(hr))
      return hr;
  }

  lpbi = AVIStreamGetFrame(This->pg, pos);
  if (lpbi == NULL)
    return AVIERR_MEMORY;

  if (This->hic == NULL) {
    LONG size = lpbi->biSize + lpbi->biClrUsed * sizeof(RGBQUAD);

    if (size > 0) {
      if (This->sInfo.dwSuggestedBufferSize < lpbi->biSizeImage)
	This->sInfo.dwSuggestedBufferSize = lpbi->biSizeImage;

      This->cbOutput = size;
      if (format != NULL) {
	if (This->lpbiOutput != NULL)
	  memcpy(format, This->lpbiOutput, min(*formatsize, This->cbOutput));
	else
	  memcpy(format, lpbi, min(*formatsize, size));
      }
    }
  } else if (format != NULL)
    memcpy(format, This->lpbiOutput, min(*formatsize, This->cbOutput));

  if (*formatsize < This->cbOutput)
    hr = AVIERR_BUFFERTOOSMALL;
  else
    hr = AVIERR_OK;

  *formatsize = This->cbOutput;
  return hr;
}
Ejemplo n.º 8
0
void AVIOBJ::GrabAVIFrame(int frame,HDC nhdc)									// Grabs A Frame From The Stream
{
    LPBITMAPINFOHEADER lpbi;									// Holds The Bitmap Header Information

    if (pgf)
    {
        lpbi = (LPBITMAPINFOHEADER)AVIStreamGetFrame(pgf, frame);	// Grab Data From The AVI Stream
        if (lpbi)
        {
            pdata=(char *)lpbi+lpbi->biSize+lpbi->biClrUsed * sizeof(RGBQUAD);	// Pointer To Data Returned By AVIStreamGetFrame
            DrawDibDraw (hdd, nhdc, 0, 0, right-left,bottom-top, lpbi, pdata, 0, 0, width, height, 0);
        }
    }

}
Ejemplo n.º 9
0
Archivo: AVI.cpp Proyecto: DCubix/1.4.0
void CAvi::GrabAVIFrame( int frame )
{
	char* pdata;
	LPBITMAPINFOHEADER lpbi;									// Holds The Bitmap Header Information
	lpbi = (LPBITMAPINFOHEADER)AVIStreamGetFrame(m_pgf, frame);	// Grab Data From The CAvi Stream
	pdata=(char *)lpbi+lpbi->biSize+lpbi->biClrUsed * sizeof(RGBQUAD);	// Pointer To Data Returned By AVIStreamGetFrame

	// Convert Data To Requested Bitmap Format
	DrawDibDraw (m_hdd, m_hDC, 0, 0, 1024, 512, lpbi, pdata, 0, 0, m_width, m_height, 0);

	//Flip( data );
	// Update The Texture
	glBindTexture( GL_TEXTURE_2D, m_textureId );
	glTexSubImage2D (GL_TEXTURE_2D, 0, 0, 0, 1024, 512, GL_BGR_EXT, GL_UNSIGNED_BYTE, m_data);
}
Ejemplo n.º 10
0
static ImBuf *avi_fetchibuf(struct anim *anim, int position)
{
	ImBuf *ibuf = NULL;
	int *tmp;
	int y;
	
	if (anim == NULL) {
		return NULL;
	}

#if defined(_WIN32) && !defined(FREE_WINDOWS)
	if (anim->avistreams) {
		LPBITMAPINFOHEADER lpbi;

		if (anim->pgf) {
			lpbi = AVIStreamGetFrame(anim->pgf, position + AVIStreamStart(anim->pavi[anim->firstvideo]));
			if (lpbi) {
				ibuf = IMB_ibImageFromMemory((unsigned char *) lpbi, 100, IB_rect, anim->colorspace, "<avi_fetchibuf>");
//Oh brother...
			}
		}
	}
	else
#endif
	{
		ibuf = IMB_allocImBuf(anim->x, anim->y, 24, IB_rect);

		tmp = AVI_read_frame(anim->avi, AVI_FORMAT_RGB32, position,
		                     AVI_get_stream(anim->avi, AVIST_VIDEO, 0));
		
		if (tmp == NULL) {
			printf("Error reading frame from AVI: '%s'\n", anim->name);
			IMB_freeImBuf(ibuf);
			return NULL;
		}

		for (y = 0; y < anim->y; y++) {
			memcpy(&(ibuf->rect)[((anim->y - y) - 1) * anim->x],  &tmp[y * anim->x],
			       anim->x * 4);
		}
		
		MEM_freeN(tmp);
	}
	
	ibuf->rect_colorspace = colormanage_colorspace_get_named(anim->colorspace);

	return ibuf;
}
/***********************************************************************
 *      AVIStreamGetFrame	(AVIFILE.110)
 */
SEGPTR WINAPI AVIStreamGetFrame16(PGETFRAME pg, LONG pos)
{
    struct frame_wrapper16 *wrapper = (void *)pg;
    BITMAPINFOHEADER *bih;

    if (!pg) return 0;

    bih = AVIStreamGetFrame(wrapper->pg, pos);
    if (bih)
    {
        DWORD size = bih->biSize + bih->biSizeImage;
        return alloc_segptr_frame(wrapper, bih, size);
    }

    return 0;
}
Ejemplo n.º 12
0
void Movie::renderTexture(Texture* Tex)
{
    if (!Tex || Tex->getImageBuffer()->getType() != IMAGEBUFFER_UBYTE || mcrAVIData->State != MOVIESTATE_PLAYED)
        return;
    
    /* Proceed to the next frame */
    mcrAVIData->Time        = clock();
    mcrAVIData->Next        += (s32)( Speed_ * (mcrAVIData->Time - mcrAVIData->LastTime) );
    mcrAVIData->LastTime    = clock();
    mcrAVIData->Frame       = mcrAVIData->Next / mcrAVIData->VideoMPF;
    
    /* Check if the movie is finish */
    if (finish())
    {
        mcrAVIData->Next = mcrAVIData->Frame = 0;
        
        if (!mcrAVIData->Looped)
        {
            mcrAVIData->State = MOVIESTATE_STOPED;
            return;
        }
    }
    
    /* Get frame's video stream */
    LPBITMAPINFOHEADER pBitmapInfoHeader = (LPBITMAPINFOHEADER)AVIStreamGetFrame(mcrAVIData->pGetFrame, mcrAVIData->Frame);
    
    if (!pBitmapInfoHeader)
        return;
    
    mcrAVIData->VideoData = (s8*)pBitmapInfoHeader + pBitmapInfoHeader->biSize + pBitmapInfoHeader->biClrUsed * sizeof(RGBQUAD);
    
    DrawDibDraw(
        mcrAVIData->hDrawDIB, mcrAVIData->hDeviceContext,
        0, 0,
        mcrAVIData->Resolution, mcrAVIData->Resolution,
        pBitmapInfoHeader, mcrAVIData->VideoData,
        0, 0,
        mcrAVIData->Size.Width, mcrAVIData->Size.Height,
        0
    );
    
    //flipDataBuffer(MovieData->RawData, MovieData->Resolution * MovieData->Resolution * 3);
    
    /* Copy to texture */
    if (Tex->getSize() == dim::size2di(mcrAVIData->Resolution) && Tex->getImageBuffer()->getFormatSize() == 3)
        Tex->setupImageBuffer(mcrAVIData->RawData);
}
Ejemplo n.º 13
0
/*
---------------------------------------------------------------------------------------
- Grabs A Frame From The Stream
---------------------------------------------------------------------------------------
*/
void AviVideoRenderer::GrabAVIFrame(int frame)
{
	LPBITMAPINFOHEADER lpbi;									// Holds The Bitmap Header Information
	lpbi = (LPBITMAPINFOHEADER)AVIStreamGetFrame(_AVR_pgf, frame);	// Grab Data From The AVI Stream
	char* _tex_pdata=(char *)lpbi+lpbi->biSize+lpbi->biClrUsed * sizeof(RGBQUAD);	// Pointer To Data Returned By AVIStreamGetFrame

	// Convert Data To Requested Bitmap Format
	DrawDibDraw (_AVR_hdd, _AVR_hdc, 0, 0,  _heights[0], _widths[0], lpbi, _tex_pdata, 0, 0, _video_width, _video_height, 0);

	flipIt(_img_data, _heights[0], _widths[0]);	// Swap The Red And Blue Bytes (GL Compatability)

	// Update The Texture
	glBindTexture(GL_TEXTURE_2D, _textures[0]);
	glTexSubImage2D (GL_TEXTURE_2D, 0, 0, 0,  _heights[0], _widths[0], GL_RGB, GL_UNSIGNED_BYTE, _img_data);


}
Ejemplo n.º 14
0
void AVI :: Set(float time)									// Grabs A Frame From The Stream
{
	// anything to do?
	if(!tman || mpf==0)
		NTHROW("AVI :: Set - Video not created before call to set.");

	// move position forward by time
	pos+=time;
	// get the frame number
	int frame=(int)((float)pos/mpf);

	// have we gone past the end?
	if(frame>=lastframe)
	{
		if(loop)
		{
			pos=0.0f;
			frame=0;				// reset the animation
		}else
			frame=lastframe-1;		// hold at the last frame
	}

	LPBITMAPINFOHEADER lpbi;											// Holds The Bitmap Header Information
	lpbi = (LPBITMAPINFOHEADER)AVIStreamGetFrame(pgf, frame);			// Grab Data From The AVI Stream
	pdata=(BYTE *)lpbi+lpbi->biSize+lpbi->biClrUsed * sizeof(RGBQUAD);	// Pointer To Data Returned By AVIStreamGetFrame

	// Convert Data To Requested Bitmap Format
	DrawDibDraw (hdd, hdc, 0, 0, 256, 256, lpbi, pdata, 0, 0, width, height, 0);

	flipIt(data);												// Swap The Red And Blue Bytes (GL Compatability)

	// set our texture
	tman->Set(texid);

	// Update The Texture
	glTexSubImage2D (GL_TEXTURE_2D, 0, 0, 0, 256, 256, GL_RGB, GL_UNSIGNED_BYTE, data);
}
Ejemplo n.º 15
0
void CAviHelper::AVItoBmp(const wstring& strAVIFileName, const wstring& strBmpDir)
{
	AVIFileInit();
	PAVIFILE avi;
	int res = AVIFileOpen(&avi, WS2S(strAVIFileName).c_str(), OF_READ, NULL);
	int n = GetLastError();
	if (res!=AVIERR_OK)
	{
		//an error occures
		if (avi!=NULL)
			AVIFileRelease(avi);
		return ;
	}
	
	AVIFILEINFO avi_info;
	AVIFileInfo(avi, &avi_info, sizeof(AVIFILEINFO));
	PAVISTREAM pStream;
	res=AVIFileGetStream(avi, &pStream, streamtypeVIDEO /*video stream*/, 0 /*first stream*/);
	if (res!=AVIERR_OK)
	{
		if (pStream!=NULL)
			AVIStreamRelease(pStream);
		AVIFileExit();
		return ;
	}
	
	//do some task with the stream
	int iNumFrames;
	int iFirstFrame;
	iFirstFrame = AVIStreamStart(pStream);
	if (iFirstFrame==-1)
	{
		//Error getteing the frame inside the stream
		if (pStream!=NULL)
			AVIStreamRelease(pStream);
		AVIFileExit();
		return ;
	}
	
	iNumFrames = AVIStreamLength(pStream);
	if (iNumFrames==-1)
	{
		//Error getteing the number of frames inside the stream
		if (pStream!=NULL)
			AVIStreamRelease(pStream);
		AVIFileExit();
		return ;
	}
	
	//getting bitmap from frame
	BITMAPINFOHEADER bih;
	ZeroMemory(&bih, sizeof(BITMAPINFOHEADER));
	bih.biBitCount=24; //24 bit per pixel
	bih.biClrImportant=0;
	bih.biClrUsed = 0;
	bih.biCompression = BI_RGB;
	bih.biPlanes = 1;
	bih.biSize = 40;
	bih.biXPelsPerMeter = 0;
	bih.biYPelsPerMeter = 0;
	
	//calculate total size of RGBQUAD scanlines (DWORD aligned)
	bih.biSizeImage = (((bih.biWidth * 3) + 3) & 0xFFFC) * bih.biHeight ;
	PGETFRAME pFrame;
	pFrame=AVIStreamGetFrameOpen(pStream, NULL );
	AVISTREAMINFO streaminfo;
	AVIStreamInfo(pStream,&streaminfo,sizeof(AVISTREAMINFO));
	
	//Get the first frame
	BITMAPINFOHEADER bih2;
	long lsize = sizeof(bih2);
	int index= 0;
	for (int i = iFirstFrame; i < iNumFrames; i++)
	{
		index= i-iFirstFrame;
		BYTE* pDIB = (BYTE*) AVIStreamGetFrame(pFrame, index); //
		AVIStreamReadFormat(pStream,index,&bih2,&lsize);
		BITMAPFILEHEADER stFileHdr;
		BYTE* Bits=new BYTE[bih2.biSizeImage];
		AVIStreamRead(pStream,index,1,Bits,bih2.biSizeImage,NULL,NULL);
		//RtlMoveMemory(Bits, pDIB + sizeof(BITMAPINFOHEADER), bih2.biSizeImage);
		bih2.biClrUsed =0;
		stFileHdr.bfOffBits=sizeof(BITMAPFILEHEADER)+sizeof(BITMAPINFOHEADER);
		stFileHdr.bfSize=sizeof(BITMAPFILEHEADER);
		stFileHdr.bfType=0x4d42; 
		CString FileName;
		FileName.Format(_T("Frame-%05d.bmp"), index);
		CString strtemp;
		strtemp.Format(_T("%s\\%s"), strBmpDir.c_str(), FileName);
		FILE* fp=_tfopen(strtemp ,_T("wb"));
		fwrite(&stFileHdr,1,sizeof(BITMAPFILEHEADER),fp);
		fwrite(&bih2,1,sizeof(BITMAPINFOHEADER),fp);
		int ff = fwrite(Bits,1,bih2.biSizeImage,fp);
		int e = GetLastError();
		fclose(fp);
		/////
		delete Bits;
		//CreateFromPackedDIBPointer(pDIB, index);
	}
	
	AVIStreamGetFrameClose(pFrame);
	//close the stream after finishing the task
	if (pStream!=NULL)
		AVIStreamRelease(pStream);
	AVIFileExit();
}
Ejemplo n.º 16
0
LPBITMAPINFOHEADER CAviToBmp::GetFrame(DWORD Frame)
{
	return((LPBITMAPINFOHEADER)AVIStreamGetFrame(m_pGetFrame, Frame));
}
Ejemplo n.º 17
0
static HRESULT WINAPI ICMStream_fnRead(IAVIStream *iface, LONG start,
					LONG samples, LPVOID buffer,
					LONG buffersize, LPLONG bytesread,
					LPLONG samplesread)
{
  IAVIStreamImpl *This = impl_from_IAVIStream(iface);

  LPBITMAPINFOHEADER lpbi;

  TRACE("(%p,%d,%d,%p,%d,%p,%p)\n", iface, start, samples, buffer,
 	buffersize, bytesread, samplesread);

  /* clear return parameters if given */
  if (bytesread != NULL)
    *bytesread = 0;
  if (samplesread != NULL)
    *samplesread = 0;

  if (samples == 0)
    return AVIERR_OK;

  /* check parameters */
  if (samples != 1 && (bytesread == NULL && samplesread == NULL))
    return AVIERR_BADPARAM;
  if (samples == -1) /* read as much as we could */
    samples = 1;

  if (This->pg == NULL) {
    HRESULT hr = AVIFILE_OpenGetFrame(This);

    if (FAILED(hr))
      return hr;
  }

  /* compress or decompress? */
  if (This->hic == NULL) {
    /* decompress */
    lpbi = AVIStreamGetFrame(This->pg, start);
    if (lpbi == NULL)
      return AVIERR_MEMORY;

    if (buffer != NULL && buffersize > 0) {
      /* check buffersize */
      if (buffersize < lpbi->biSizeImage)
	return AVIERR_BUFFERTOOSMALL;

      memcpy(buffer, DIBPTR(lpbi), lpbi->biSizeImage);
    }

    /* fill out return parameters if given */
    if (bytesread != NULL)
      *bytesread = lpbi->biSizeImage;
  } else {
    /* compress */
    if (This->lCurrent > start)
      AVIFILE_Reset(This);

    while (start > This->lCurrent) {
      HRESULT hr;

      lpbi = AVIStreamGetFrame(This->pg, ++This->lCurrent);
      if (lpbi == NULL) {
	AVIFILE_Reset(This);
	return AVIERR_MEMORY;
      }

      hr = AVIFILE_EncodeFrame(This, lpbi, DIBPTR(lpbi));
      if (FAILED(hr)) {
	AVIFILE_Reset(This);
	return hr;
      }
    }

    if (buffer != NULL && buffersize > 0) {
      /* check buffersize */
      if (This->lpbiCur->biSizeImage > buffersize)
	return AVIERR_BUFFERTOOSMALL;

      memcpy(buffer, This->lpCur, This->lpbiCur->biSizeImage);
    }

    /* fill out return parameters if given */
    if (bytesread != NULL)
      *bytesread = This->lpbiCur->biSizeImage;
  }

  /* fill out return parameters if given */
  if (samplesread != NULL)
    *samplesread = 1;

  return AVIERR_OK;
}
Ejemplo n.º 18
0
static HRESULT AVIFILE_OpenGetFrame(IAVIStreamImpl *This)
{
  LPBITMAPINFOHEADER lpbi;
  DWORD              size;

  /* pre-conditions */
  assert(This != NULL);
  assert(This->pStream != NULL);
  assert(This->pg == NULL);

  This->pg = AVIStreamGetFrameOpen(This->pStream, NULL);
  if (This->pg == NULL)
    return AVIERR_ERROR;

  /* When we only decompress this is enough */
  if (This->sInfo.fccHandler == comptypeDIB)
    return AVIERR_OK;

  assert(This->hic != NULL);
  assert(This->lpbiOutput == NULL);

  /* get input format */
  lpbi = AVIStreamGetFrame(This->pg, This->sInfo.dwStart);
  if (lpbi == NULL)
    return AVIERR_MEMORY;

  /* get memory for output format */
  size = ICCompressGetFormatSize(This->hic, lpbi);
  if ((LONG)size < (LONG)sizeof(BITMAPINFOHEADER))
    return AVIERR_COMPRESSOR;
  This->lpbiOutput = HeapAlloc(GetProcessHeap(), 0, size);
  if (This->lpbiOutput == NULL)
    return AVIERR_MEMORY;
  This->cbOutput = size;

  if (ICCompressGetFormat(This->hic, lpbi, This->lpbiOutput) < S_OK)
    return AVIERR_BADFORMAT;

  /* update AVISTREAMINFO structure */
  This->sInfo.rcFrame.right  =
    This->sInfo.rcFrame.left + This->lpbiOutput->biWidth;
  This->sInfo.rcFrame.bottom =
    This->sInfo.rcFrame.top  + This->lpbiOutput->biHeight;
  This->sInfo.dwSuggestedBufferSize =
    ICCompressGetSize(This->hic, lpbi, This->lpbiOutput);

  /* prepare codec for compression */
  if (ICCompressBegin(This->hic, lpbi, This->lpbiOutput) != S_OK)
    return AVIERR_COMPRESSOR;

  /* allocate memory for current frame */
  size += This->sInfo.dwSuggestedBufferSize;
  This->lpbiCur = HeapAlloc(GetProcessHeap(), 0, size);
  if (This->lpbiCur == NULL)
    return AVIERR_MEMORY;
  memcpy(This->lpbiCur, This->lpbiOutput, This->cbOutput);
  This->lpCur = DIBPTR(This->lpbiCur);

  /* allocate memory for last frame if needed */
  if (This->lKeyFrameEvery != 1 &&
      (This->dwICMFlags & VIDCF_FASTTEMPORALC) == 0) {
    size = ICDecompressGetFormatSize(This->hic, This->lpbiOutput);
    This->lpbiPrev = HeapAlloc(GetProcessHeap(), 0, size);
    if (This->lpbiPrev == NULL)
      return AVIERR_MEMORY;
    if (ICDecompressGetFormat(This->hic, This->lpbiOutput, This->lpbiPrev) < S_OK)
      return AVIERR_COMPRESSOR;

    if (This->lpbiPrev->biSizeImage == 0) {
      This->lpbiPrev->biSizeImage =
	DIBWIDTHBYTES(*This->lpbiPrev) * This->lpbiPrev->biHeight;
    }

    /* get memory for format and picture */
    size += This->lpbiPrev->biSizeImage;
    This->lpbiPrev = HeapReAlloc(GetProcessHeap(), 0, This->lpbiPrev, size );
    if (This->lpbiPrev == NULL)
      return AVIERR_MEMORY;
    This->lpPrev = DIBPTR(This->lpbiPrev);

    /* prepare codec also for decompression */
    if (ICDecompressBegin(This->hic,This->lpbiOutput,This->lpbiPrev) != S_OK)
      return AVIERR_COMPRESSOR;
  }

  return AVIERR_OK;
}
Ejemplo n.º 19
0
bool CAviLoader::grabFrame()
{
    if( avistream )
        bmih = (BITMAPINFOHEADER*)AVIStreamGetFrame( getframe, pos++ );
    return bmih != 0;
}
Ejemplo n.º 20
0
static ImBuf *avi_fetchibuf(struct anim *anim, int position)
{
	ImBuf *ibuf = NULL;
	int *tmp;
	int y;
	
	if (anim == NULL) {
		return NULL;
	}

#if defined(_WIN32) && !defined(FREE_WINDOWS)
	if (anim->avistreams) {
		LPBITMAPINFOHEADER lpbi;

		if (anim->pgf) {
			lpbi = AVIStreamGetFrame(anim->pgf, position + AVIStreamStart(anim->pavi[anim->firstvideo]));
			if (lpbi) {
				ibuf = IMB_ibImageFromMemory((unsigned char *) lpbi, 100, IB_rect, anim->colorspace, "<avi_fetchibuf>");
//Oh brother...
			}
		}
	}
	else {
#else
	if (1) {
#endif
		ibuf = IMB_allocImBuf(anim->x, anim->y, 24, IB_rect);

		tmp = AVI_read_frame(anim->avi, AVI_FORMAT_RGB32, position,
		                     AVI_get_stream(anim->avi, AVIST_VIDEO, 0));
		
		if (tmp == NULL) {
			printf("Error reading frame from AVI: '%s'\n", anim->name);
			IMB_freeImBuf(ibuf);
			return NULL;
		}

		for (y = 0; y < anim->y; y++) {
			memcpy(&(ibuf->rect)[((anim->y - y) - 1) * anim->x],  &tmp[y * anim->x],
			       anim->x * 4);
		}
		
		MEM_freeN(tmp);
	}
	
	ibuf->rect_colorspace = colormanage_colorspace_get_named(anim->colorspace);

	return ibuf;
}
#endif  /* WITH_AVI */

#ifdef WITH_FFMPEG

static int startffmpeg(struct anim *anim)
{
	int i, videoStream;

	AVCodec *pCodec;
	AVFormatContext *pFormatCtx = NULL;
	AVCodecContext *pCodecCtx;
	int frs_num;
	double frs_den;
	int streamcount;

#ifdef FFMPEG_SWSCALE_COLOR_SPACE_SUPPORT
	/* The following for color space determination */
	int srcRange, dstRange, brightness, contrast, saturation;
	int *table;
	const int *inv_table;
#endif

	if (anim == 0) return(-1);

	streamcount = anim->streamindex;

	if (avformat_open_input(&pFormatCtx, anim->name, NULL, NULL) != 0) {
		return -1;
	}

	if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	av_dump_format(pFormatCtx, 0, anim->name, 0);


	/* Find the video stream */
	videoStream = -1;

	for (i = 0; i < pFormatCtx->nb_streams; i++)
		if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
			if (streamcount > 0) {
				streamcount--;
				continue;
			}
			videoStream = i;
			break;
		}

	if (videoStream == -1) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	pCodecCtx = pFormatCtx->streams[videoStream]->codec;

	/* Find the decoder for the video stream */
	pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
	if (pCodec == NULL) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	pCodecCtx->workaround_bugs = 1;

	if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
		av_close_input_file(pFormatCtx);
		return -1;
	}

	anim->duration = ceil(pFormatCtx->duration *
	                      av_q2d(pFormatCtx->streams[videoStream]->r_frame_rate) /
	                      AV_TIME_BASE);

	frs_num = pFormatCtx->streams[videoStream]->r_frame_rate.num;
	frs_den = pFormatCtx->streams[videoStream]->r_frame_rate.den;

	frs_den *= AV_TIME_BASE;

	while (frs_num % 10 == 0 && frs_den >= 2.0 && frs_num > 10) {
		frs_num /= 10;
		frs_den /= 10;
	}

	anim->frs_sec = frs_num;
	anim->frs_sec_base = frs_den;

	anim->params = 0;

	anim->x = pCodecCtx->width;
	anim->y = av_get_cropped_height_from_codec(pCodecCtx);

	anim->pFormatCtx = pFormatCtx;
	anim->pCodecCtx = pCodecCtx;
	anim->pCodec = pCodec;
	anim->videoStream = videoStream;

	anim->interlacing = 0;
	anim->orientation = 0;
	anim->framesize = anim->x * anim->y * 4;

	anim->curposition = -1;
	anim->last_frame = 0;
	anim->last_pts = -1;
	anim->next_pts = -1;
	anim->next_packet.stream_index = -1;

	anim->pFrame = avcodec_alloc_frame();
	anim->pFrameComplete = FALSE;
	anim->pFrameDeinterlaced = avcodec_alloc_frame();
	anim->pFrameRGB = avcodec_alloc_frame();

	if (avpicture_get_size(PIX_FMT_RGBA, anim->x, anim->y) !=
	    anim->x * anim->y * 4)
	{
		fprintf(stderr,
		        "ffmpeg has changed alloc scheme ... ARGHHH!\n");
		avcodec_close(anim->pCodecCtx);
		av_close_input_file(anim->pFormatCtx);
		av_free(anim->pFrameRGB);
		av_free(anim->pFrameDeinterlaced);
		av_free(anim->pFrame);
		anim->pCodecCtx = NULL;
		return -1;
	}

	if (anim->ib_flags & IB_animdeinterlace) {
		avpicture_fill((AVPicture *) anim->pFrameDeinterlaced,
		               MEM_callocN(avpicture_get_size(
		                               anim->pCodecCtx->pix_fmt,
		                               anim->pCodecCtx->width,
		                               anim->pCodecCtx->height),
		                           "ffmpeg deinterlace"),
		               anim->pCodecCtx->pix_fmt, 
		               anim->pCodecCtx->width,
		               anim->pCodecCtx->height);
	}

	if (pCodecCtx->has_b_frames) {
		anim->preseek = 25; /* FIXME: detect gopsize ... */
	}
	else {
		anim->preseek = 0;
	}
	
	anim->img_convert_ctx = sws_getContext(
	        anim->x,
	        anim->y,
	        anim->pCodecCtx->pix_fmt,
	        anim->x,
	        anim->y,
	        PIX_FMT_RGBA,
	        SWS_FAST_BILINEAR | SWS_PRINT_INFO | SWS_FULL_CHR_H_INT,
	        NULL, NULL, NULL);
		
	if (!anim->img_convert_ctx) {
		fprintf(stderr,
		        "Can't transform color space??? Bailing out...\n");
		avcodec_close(anim->pCodecCtx);
		av_close_input_file(anim->pFormatCtx);
		av_free(anim->pFrameRGB);
		av_free(anim->pFrameDeinterlaced);
		av_free(anim->pFrame);
		anim->pCodecCtx = NULL;
		return -1;
	}

#ifdef FFMPEG_SWSCALE_COLOR_SPACE_SUPPORT
	/* Try do detect if input has 0-255 YCbCR range (JFIF Jpeg MotionJpeg) */
	if (!sws_getColorspaceDetails(anim->img_convert_ctx, (int **)&inv_table, &srcRange,
	                              &table, &dstRange, &brightness, &contrast, &saturation))
	{
		srcRange = srcRange || anim->pCodecCtx->color_range == AVCOL_RANGE_JPEG;
		inv_table = sws_getCoefficients(anim->pCodecCtx->colorspace);

		if (sws_setColorspaceDetails(anim->img_convert_ctx, (int *)inv_table, srcRange,
		                             table, dstRange, brightness, contrast, saturation))
		{
			fprintf(stderr, "Warning: Could not set libswscale colorspace details.\n");
		}
	}
	else {
		fprintf(stderr, "Warning: Could not set libswscale colorspace details.\n");
	}
#endif
		
	return (0);
}
Ejemplo n.º 21
0
HRESULT CAviBitmap::GetAllFrames(LPCTSTR lpszFolderName)
{
	if(m_pGetFrame == NULL)
	{
		m_szLastErrorMsg.Format(_T("Not initialized yet"));
		return E_FAIL;
	}
	HRESULT hr = S_OK;

	int nBmpInfoHdrSize = sizeof(BITMAPINFO) + sizeof(RGBQUAD) * 256;
	BITMAPINFOHEADER* lpBmpInfoHdr = (BITMAPINFOHEADER*)(new BYTE[nBmpInfoHdrSize]);
	LONG lpcbFormat = nBmpInfoHdrSize;

	BYTE* lpDib = NULL;
	BYTE* lpBuffer = NULL;

	LONG lBytes = 0, lSamples = 0;
	BOOL bReadRaw = FALSE;

	int nPos = 0;
	int nSampleCount = min(m_lSampleCount, 101);
	for(nPos = 0; nPos < nSampleCount; nPos++)
	{
		//Get the frame format
		hr = AVIStreamReadFormat(m_pAviStream, nPos, lpBmpInfoHdr, &lpcbFormat);
		if(hr != S_OK)
		{
			m_szLastErrorMsg.Format(_T("Unable to Get the sample format: %d"), nPos);
			break;
		}

		lpBuffer = NULL;
		//Try to read raw data when the bitmap is BI_RGB
		if(lpBmpInfoHdr->biCompression == BI_RGB && (lpBmpInfoHdr->biBitCount == 24 || lpBmpInfoHdr->biBitCount == 32))
		{
			//Get the frame data
			lpBuffer = new BYTE[m_biWanted.biSizeImage];
			hr = AVIStreamRead(m_pAviStream, nPos, 1, lpBuffer, m_biWanted.biSizeImage, &lBytes, &lSamples);
			if(hr != S_OK)
			{
				m_szLastErrorMsg.Format(_T("Unable to Get the sample data: %d"), nPos);
				break;
			}
		}
		else
		{
			CString szFourCC;
			FourCC2Str(m_aviInfo.fccHandler, szFourCC);
			AfxTrace(_T("Non-RGB format at frame(%03d)=%s, 0x%08X\n"), nPos, szFourCC, lpBmpInfoHdr->biCompression);
		}


		//Get the frame at nPos
		lpDib = (BYTE*)AVIStreamGetFrame(m_pGetFrame, nPos);
		if(lpDib == NULL)
		{
			m_szLastErrorMsg.Format(_T("Unable to Get the sample: %d"), nPos);
			hr = E_FAIL;
			break;
		}

		//compare the data retrieved in 2 ways if needed
		if(lpBuffer != NULL)
		{
			if(memcmp(lpBuffer, lpDib + sizeof(BITMAPINFOHEADER), lpBmpInfoHdr->biSizeImage) != 0)
			{
				m_szLastErrorMsg.Format(_T("not equals: %d"), nPos);
				hr = E_FAIL;
				break;
			}
		}

		CString szFileName;
		if(lpszFolderName == NULL)
		{
			szFileName.Format(_T(".\\Frame%03d.bmp"), nPos);
		}
		else
		{
			szFileName.Format(_T("%s\\Frame%03d.bmp"), lpszFolderName, nPos);
		}
		BITMAPINFOHEADER* pTemp = (BITMAPINFOHEADER*)lpDib;

//		hr = SaveBitmap(lpBmpInfoHdr, lpBuffer, lpBmpInfoHdr->biSizeImage, szFileName);
		hr = SaveBitmap(&m_biWanted, lpDib + sizeof(BITMAPINFOHEADER), m_biWanted.biSizeImage, szFileName);

		if(lpBuffer != NULL)
		{
			delete [] lpBuffer;
			lpBuffer = NULL;
		}
		//Done
	}

	if(lpBuffer != NULL)
	{
		delete [] lpBuffer;
		lpBuffer = NULL;
	}

	if(lpBmpInfoHdr != NULL)
	{
		delete [] lpBmpInfoHdr;
		lpBmpInfoHdr = NULL;
	}

	ReleaseMemory();

	return hr;
}
Ejemplo n.º 22
0
void AVIDump::StoreFrame(const void* data)
{
	if (s_bitmap.biSizeImage > s_stored_frame_size)
	{
		void* temp_stored_frame = realloc(s_stored_frame, s_bitmap.biSizeImage);
		if (temp_stored_frame)
		{
			s_stored_frame = temp_stored_frame;
		}
		else
		{
			free(s_stored_frame);
			PanicAlertT("Something has gone seriously wrong.\n"
			            "Stopping video recording.\n"
			            "Your video will likely be broken.");
			Stop();
		}
		s_stored_frame_size = s_bitmap.biSizeImage;
		memset(s_stored_frame, 0, s_bitmap.biSizeImage);
	}
	if (s_stored_frame)
	{
		//PanicAlertT("Width: %i, Height: %i, Bit Count: %i", s_bitmap.biWidth, s_bitmap.biHeight, s_bitmap.biBitCount);

		if (data && (s_file_count || !Movie::cmp_isRunning || s_frame_count > 0))
		{
			bool lastSide = false, readOnly = false;

			if (Movie::cmp_isRunning && (Movie::cmp_leftFinished || Movie::cmp_rightFinished))
				lastSide = true;

			if (lastSide && Movie::cmp_startTimerFrame > Movie::cmp_curentBranchFrame) //Dragonbane: Combine frames
				readOnly = true;
			else
				readOnly = false;


			if (readOnly && s_getFrame_temp)
			{
				size_t totalBytes = s_bitmap.biSizeImage / 2;
				size_t rowSize = (s_bitmap.biWidth * (s_bitmap.biBitCount / 8)) / 2;
				size_t currentByte = 0;

				if (s_last_key_temp < 2)
				{
					BOOL result = AVIStreamIsKeyFrame(s_stream_temp, s_last_key_temp);

					if (!result)
						s_last_key_temp = AVIStreamNextKeyFrame(s_stream_temp, s_last_key_temp);
				}

				u64 samplePos = AVIStreamFindSample(s_stream_temp, s_last_key_temp, FIND_ANY);

				u64 s_last_key_old = s_last_key_temp;

				s_last_key_temp = AVIStreamNextKeyFrame(s_stream_temp, s_last_key_temp);

				void* s_uncompressed_frame = AVIStreamGetFrame(s_getFrame_temp, samplePos);
				std::string movie_file_name;

				if (!s_uncompressed_frame || s_stopTempFile)
				{
					//Close current file
					if (s_getFrame_temp)
					{
						AVIStreamGetFrameClose(s_getFrame_temp);
						s_getFrame_temp = nullptr;
					}

					if (s_stream_temp)
					{
						AVIStreamClose(s_stream_temp);
						s_stream_temp = nullptr;
					}

					if (s_file_temp)
					{
						AVIFileRelease(s_file_temp);
						s_file_temp = nullptr;

						movie_file_name = GetCurrDumpFile(tempFileCount, true);

						if (File::Exists(movie_file_name))
							File::Delete(movie_file_name);
					}

					//Check if we have another temp file
					tempFileCount++;
					s_stopTempFile = false;

					movie_file_name = GetCurrDumpFile(tempFileCount, true);

					if (File::Exists(movie_file_name)) //Dragonbane: Open temp file for reading
					{
						HRESULT h2 = AVIFileOpenA(&s_file_temp, movie_file_name.c_str(), OF_READ, nullptr);
						HRESULT h3 = AVIFileGetStream(s_file_temp, &s_stream_temp, streamtypeVIDEO, 0);

						s_last_key_temp = 0; //Not the first file anymore, so start from keyframe 0

						s_getFrame_temp = AVIStreamGetFrameOpen(s_stream_temp, &s_bitmap);

						if (!s_getFrame_temp)
						{
							PanicAlertT("Your chosen compression codec can not be decompressed again! Can't continue video comparison!");
							Movie::CancelComparison();
							return;
						}

						BOOL result = AVIStreamIsKeyFrame(s_stream_temp, s_last_key_temp);

						if (!result)
							s_last_key_temp = AVIStreamNextKeyFrame(s_stream_temp, s_last_key_temp);

						samplePos = AVIStreamFindSample(s_stream_temp, s_last_key_temp, FIND_ANY);

						s_last_key_old = s_last_key_temp;

						s_last_key_temp = AVIStreamNextKeyFrame(s_stream_temp, s_last_key_temp);

						s_uncompressed_frame = AVIStreamGetFrame(s_getFrame_temp, samplePos);

						if (!s_uncompressed_frame)
						{
							//PanicAlertT("Last frame stored. Start timer now!");
							Movie::cmp_startTimerFrame = Movie::cmp_curentBranchFrame;
							memcpy(s_stored_frame, data, s_bitmap.biSizeImage);
							return;
						}
					}
					else
					{
						//PanicAlertT("Last frame stored. Start timer now!");
						Movie::cmp_startTimerFrame = Movie::cmp_curentBranchFrame;
						memcpy(s_stored_frame, data, s_bitmap.biSizeImage);
						return;
					}
				}

				//Stop temp file on next frame if last frame is processed
				if (s_last_key_old == s_last_key_temp || AVIStreamFindSample(s_stream_temp, s_last_key_temp, FIND_ANY) == samplePos)
					s_stopTempFile = true;


				void* memptr1 = s_uncompressed_frame;
				memptr1 = static_cast<u8*>(memptr1) + sizeof(BITMAPINFOHEADER);

				if (Movie::cmp_leftFinished)
				{
					memcpy(s_stored_frame, memptr1, s_bitmap.biSizeImage);

					for (u64 currentRow = 0; currentRow < s_bitmap.biHeight; currentRow++)
					{
						currentByte += rowSize;

						void* memptr = s_stored_frame;
						const void* memptr2 = data;

						memptr = static_cast<u8*>(memptr) + currentByte;
						memptr2 = static_cast<const u8*>(memptr2) + currentByte;

						memcpy(memptr, memptr2, rowSize);

						currentByte += rowSize;
					}
				}
				else if (Movie::cmp_rightFinished)
				{
					memcpy(s_stored_frame, memptr1, s_bitmap.biSizeImage);

					//BITMAPINFOHEADER test;
					//memset(&test, 0, sizeof(BITMAPINFOHEADER));
					//memcpy(&test, s_uncompressed_frame, sizeof(BITMAPINFOHEADER));

					for (u64 currentRow = 0; currentRow < s_bitmap.biHeight; currentRow++)
					{
						void* memptr = s_stored_frame;
						const void* memptr2 = data;

						memptr = static_cast<u8*>(memptr) + currentByte;
						memptr2 = static_cast<const u8*>(memptr2) + currentByte;

						memcpy(memptr, memptr2, rowSize);

						currentByte += rowSize * 2;
					}
				}
				else
				{
					memcpy(s_stored_frame, data, s_bitmap.biSizeImage);
				}
			}
			else
			{
				memcpy(s_stored_frame, data, s_bitmap.biSizeImage);
			}
		}
		else // pitch black frame
		{
			memset(s_stored_frame, 0, s_bitmap.biSizeImage);
		}
	}
}
Ejemplo n.º 23
0
static FIMULTIBITMAP* ReadFromAvi(const char* filename) {
	int err=0;
	AVIFileInit();
	
	PAVISTREAM pavi; // Handle To An Open Stream
	if( AVIStreamOpenFromFile(&pavi, filename, streamtypeVIDEO, 0, OF_READ, NULL) != 0) {
		AVIFileExit();
		return NULL;
	}



	AVISTREAMINFO		psi;				// Pointer To A Structure Containing Stream Info
	AVIStreamInfo(pavi, &psi, sizeof(psi));				// Reads Information About The Stream Into psi
	int width  = psi.rcFrame.right-psi.rcFrame.left;			// Width Is Right Side Of Frame Minus Left
	int height = psi.rcFrame.bottom-psi.rcFrame.top;			// Height Is Bottom Of Frame Minus Top
	int frameCount = AVIStreamLength(pavi);							// The Last Frame Of The Stream

	double mpf = AVIStreamSampleToTime(pavi, frameCount) / (double)frameCount;		// Calculate Rough Milliseconds Per Frame

	PGETFRAME pgf = AVIStreamGetFrameOpen(pavi, NULL);				// Create The PGETFRAME Using Our Request Mode
	if (pgf==NULL)
	{
		// An Error Occurred Opening The Frame
		error("Failed To Open frame from AVI");
	}

	//HDC hdc = GetDC(0);

	HDRAWDIB hdd = DrawDibOpen();													// Handle For Our Dib

	BITMAPINFOHEADER bmih;										// Header Information For DrawDibDraw Decoding
	bmih.biSize = sizeof (BITMAPINFOHEADER);					// Size Of The BitmapInfoHeader
	bmih.biPlanes = 1;											// Bitplanes	
	bmih.biBitCount = 24;										// Bits Format We Want (24 Bit, 3 Bytes)
	bmih.biWidth = width;										// Width We Want (256 Pixels)
	bmih.biHeight = height;										// Height We Want (256 Pixels)
	bmih.biCompression = BI_RGB;								// Requested Mode = RGB

	char		*data;						// Pointer To Texture Data
	HBITMAP hBitmap = CreateDIBSection(hdc, (BITMAPINFO*)(&bmih), DIB_RGB_COLORS, (void**)(&data), NULL, NULL);
	SelectObject(hdc, hBitmap);								// Select hBitmap Into Our Device Context (hdc)

	// create a new freeimage anim
	someError=false;
	FIMULTIBITMAP* ret = FreeImage_OpenMultiBitmap(FIF_TIFF, "temp.tiff", TRUE, FALSE);
	if (!ret || someError) {
		error("Couldnt create multibitmap");
	}

	for (int frame=0; frame<frameCount; frame++) {
		fprintf(stderr, "Loading frame %i\n", frame);
		// Grab Data From The AVI Stream
		LPBITMAPINFOHEADER lpbi = (LPBITMAPINFOHEADER)AVIStreamGetFrame(pgf, frame);	
		// Pointer To Data Returned By AVIStreamGetFrame
		// (Skip The Header Info To Get To The Data)
		char* pdata = (char *)lpbi + lpbi->biSize + lpbi->biClrUsed * sizeof(RGBQUAD);	
		
		// Convert Data To Requested Bitmap Format
		DrawDibDraw(hdd, hdc, 0, 0, width, height, lpbi, pdata, 0, 0, width, height, 0);

		// copy into the freeimage thing
		FIBITMAP* fiBitmap = FreeImage_ConvertFromRawBits((BYTE*)data, width, height, width*3, 24, 0xFF0000, 0x00FF00, 0x0000FF);
/*		BYTE* src = (BYTE*)data;
		for (int y=0; y<height; y++) {
			BYTE* dst = FreeImage_GetScanLine(fiBitmap, y);
			for (int x=0; x<width; x++) {
				//src++;
				*dst++ = *src++;
				*dst++ = *src++;
				*dst++ = *src++;
			}
		}
*/
		FIBITMAP* grayBitmap = FreeImage_ConvertToGreyscale(fiBitmap);
		FreeImage_Unload(fiBitmap);

		FreeImage_AppendPage(ret, grayBitmap);
	}
	FreeImage_CloseMultiBitmap(ret);
	ret = FreeImage_OpenMultiBitmap(FIF_TIFF, "temp.tiff", FALSE, TRUE);

	DeleteObject(hBitmap);										// Delete The Device Dependant Bitmap Object
	DrawDibClose(hdd);											// Closes The DrawDib Device Context
	AVIStreamGetFrameClose(pgf);								// Deallocates The GetFrame Resources
	AVIStreamRelease(pavi);										// Release The Stream
	AVIFileExit();												// Release The File

	return ret;
}
Ejemplo n.º 24
0
void CLoaderAVI::update(unsigned int frameTime)
{
    if (!m_play || m_frameTime == 0)
    {
        return;
    }

    m_time += frameTime;
    unsigned int new_frame = m_time / m_frameTime;

    // La frame n'a pas changée
    if (new_frame == m_frame)
    {
        return;
    }

    // La vidéo est terminée
    if (new_frame >= m_lastFrame)
    {
        if (m_loop)
        {
            m_frame = 0;
            m_time = 0;
        }
        // Dernière frame
        else
        {
            if (new_frame == m_lastFrame)
            {
                // On remplace la vidéo par une image noire
                for (int i = 0; i < m_width * m_height; ++i)
                {
                    m_pixels[4 * i + 0] = 0x00;
                    m_pixels[4 * i + 1] = 0x00;
                    m_pixels[4 * i + 2] = 0x00;
                    m_pixels[4 * i + 3] = 0xFF;
                }

                // Mise-à-jour de la texture
                CImage img(m_width, m_height, reinterpret_cast<CColor *>(&m_pixels[0]));
                Game::textureManager->reloadTexture(m_texture, img);
            }

            m_frame = m_lastFrame;
            m_time = m_lastFrame * m_frameTime;
            m_play = false;

            return;
        }
    }
    else
    {
        m_frame = new_frame;
    }

#ifdef T_SYSTEM_WINDOWS

    // On récupère la frame (plante !)
    LPBITMAPINFOHEADER lpbi = reinterpret_cast<LPBITMAPINFOHEADER>(AVIStreamGetFrame(m_pgf, m_frame));

    if (lpbi == nullptr)
    {
        CApplication::getApp()->log("AVIStreamGetFrame retourne un pointeur nul", ILogger::Error);
        return;
    }

    // DEBUG
    if (lpbi->biBitCount != 24)
    {
        CApplication::getApp()->log(CString::fromUTF8("La vidéo n'utilise pas 3 composantes par couleur (%1)").arg(lpbi->biBitCount), ILogger::Error);
        return;
    }

    unsigned char * temp = reinterpret_cast<unsigned char *>(lpbi) + lpbi->biSize + lpbi->biClrUsed * sizeof(RGBQUAD);
    CColor * tempRGBA = new CColor[m_width * m_height];

    for (int h = 0; h < m_height; ++h)
    {
        for (int w = 0; w < m_width; ++w)
        {
            tempRGBA[(m_height - h - 1) * m_width + w].setBlue(temp[(h * m_width + w) * 3 + 0]);
            tempRGBA[(m_height - h - 1) * m_width + w].setGreen(temp[(h * m_width + w) * 3 + 1]);
            tempRGBA[(m_height - h - 1) * m_width + w].setRed(temp[(h * m_width + w) * 3 + 2]);
        }
    }

    // Mise-à-jour de la texture
    CImage img(m_width, m_height, tempRGBA);
    Game::textureManager->reloadTexture(m_texture, img);

#endif
}
Ejemplo n.º 25
0
bool CvCaptureAVI_VFW::grabFrame()
{
    if( avistream )
        bmih = (BITMAPINFOHEADER*)AVIStreamGetFrame( getframe, pos++ );
    return bmih != 0;
}
Ejemplo n.º 26
0
BOOL ExtractAVIFrames(CString szFileName)
{
	AVIFileInit();

	PAVIFILE avi;
	int res = AVIFileOpen(&avi, szFileName, OF_READ, NULL);

	if (res != AVIERR_OK)
	{
		//an error occures
		if (avi != NULL)
			AVIFileRelease(avi);

		return FALSE;
	}

	AVIFILEINFO avi_info;
	AVIFileInfo(avi, &avi_info, sizeof(AVIFILEINFO));

	CString szFileInfo;
	szFileInfo.Format("Dimention: %dx%d\n"
		"Length: %d frames\n"
		"Max bytes per second: %d\n"
		"Samples per second: %d\n"
		"Streams: %d\n"
		"File Type: %d", avi_info.dwWidth,
		avi_info.dwHeight,
		avi_info.dwLength,
		avi_info.dwMaxBytesPerSec,
		(DWORD)(avi_info.dwRate / avi_info.dwScale),
		avi_info.dwStreams,
		avi_info.szFileType);

	AfxMessageBox(szFileInfo, MB_ICONINFORMATION | MB_OK);

	PAVISTREAM pStream;
	res = AVIFileGetStream(avi, &pStream, streamtypeVIDEO /*video stream*/,
		0 /*first stream*/);

	if (res != AVIERR_OK)
	{
		if (pStream != NULL)
			AVIStreamRelease(pStream);

		AVIFileExit();
		return FALSE;
	}

	//do some task with the stream
	int iNumFrames;
	int iFirstFrame;

	iFirstFrame = AVIStreamStart(pStream);
	if (iFirstFrame == -1)
	{
		//Error getteing the frame inside the stream

		if (pStream != NULL)
			AVIStreamRelease(pStream);

		AVIFileExit();
		return FALSE;
	}

	iNumFrames = AVIStreamLength(pStream);
	if (iNumFrames == -1)
	{
		//Error getteing the number of frames inside the stream

		if (pStream != NULL)
			AVIStreamRelease(pStream);

		AVIFileExit();
		return FALSE;
	}

	//getting bitmap from frame
	BITMAPINFOHEADER bih;
	ZeroMemory(&bih, sizeof(BITMAPINFOHEADER));

	bih.biBitCount = 24;    //24 bit per pixel
	bih.biClrImportant = 0;
	bih.biClrUsed = 0;
	bih.biCompression = BI_RGB;
	bih.biPlanes = 1;
	bih.biSize = 40;
	bih.biXPelsPerMeter = 0;
	bih.biYPelsPerMeter = 0;
	//calculate total size of RGBQUAD scanlines (DWORD aligned)
	bih.biSizeImage = (((bih.biWidth * 3) + 3) & 0xFFFC) * bih.biHeight;

	PGETFRAME pFrame;
	pFrame = AVIStreamGetFrameOpen(pStream,
		NULL/*(BITMAPINFOHEADER*) AVIGETFRAMEF_BESTDISPLAYFMT*/ /*&bih*/);

	//Get the first frame
	int index = 0;
	for (int i = iFirstFrame; i<iNumFrames; i++)
	{
		index = i - iFirstFrame;

		BYTE* pDIB = (BYTE*)AVIStreamGetFrame(pFrame, index);

		CreateFromPackedDIBPointer(pDIB, index);
	}

	AVIStreamGetFrameClose(pFrame);

	//close the stream after finishing the task
	if (pStream != NULL)
		AVIStreamRelease(pStream);

	AVIFileExit();

	return TRUE;
}