Ejemplo n.º 1
0
// 
// Sprite::VOnRestore - Chapter 10, page 321
//
void Sprite::VOnUpdate(int deltaMS)
{
	if (m_IsPaused)
	{
		return;	
	}

	m_ElapsedTime += deltaMS;

	// Only call SetFrame() if we have to.
	// We're guaranteed to have to advance at least one frame...

	if (m_ElapsedTime >= m_MSPerFrame)
	{
		DWORD const numFramesToAdvance = (m_ElapsedTime / m_MSPerFrame);

		m_ElapsedTime -= (numFramesToAdvance * m_MSPerFrame);

		int desiredFrame = GetFrame() + numFramesToAdvance;

		//Check if we're looping...
		if ((false==m_LoopingAnim) && (desiredFrame >= GetFrameCount()))
		{
			desiredFrame = GetFrameCount() - 1;	//Stay on last frame...
		}
		
		//Now advance our frame properly...
		SetFrame(desiredFrame);		
	}
}
Ejemplo n.º 2
0
////////////////////////////////////////
//		PUBLIC UTILITY FUNCTIONS
////////////////////////////////////////
// Gets the current frame in the animation
// and moves to the next one in order.
RECT CAnimation::GetFrame()
{
	SetTimeWaited(GetTimeWaited() + GAME->GetTimer().GetDeltaTime());

	RECT tRect;
	
	tRect.left = GetCurrentFrame() * GetFrameWidth();
	tRect.top = 0;
	tRect.right = tRect.left + GetFrameWidth();
	tRect.bottom = tRect.top + GetFrameHeight();	
	
				
	if(GetTimeWaited() > GetTimePerFrame() && !GetStatic())
	{	
		SetTimeWaited(0.0f);
		SetCurrentFrame(GetCurrentFrame() + 1);

		if(GetCurrentFrame() > GetFrameCount())
		{
			if(GetLooping())
				ResetAnimation();	
			else
			{
				SetCurrentFrame(GetFrameCount());
				SetStatic(true);
			}
		}
		
	}

	return tRect;
}
Ejemplo n.º 3
0
	void App::UpdateDraw(unsigned int SyncInterval) {
		if (!m_WorldInterface) {
			//シーンがが無効なら
			throw BaseException(
				L"シーンがありません",
				L"if(!m_SceneInterface)",
				L"App::UpdateDraw()"
			);
		}

		// シーン オブジェクトを更新します。
		m_InputDevice.ResetControlerState();
		m_Timer.Tick([&]()
		{
			m_WorldInterface->OnUpdate();
		});
		// 初回更新前にレンダリングは行わない。
		if (GetFrameCount() == 0)
		{
			return;
		}
		m_WorldInterface->OnDraw();
		// バックバッファからフロントバッファに転送
		m_DeviceResources->Present(SyncInterval, 0);
	}
Ejemplo n.º 4
0
static void
shofixti_intelligence (PELEMENT ShipPtr, PEVALUATE_DESC ObjectsOfConcern, COUNT ConcernCounter)
{
	STARSHIPPTR StarShipPtr;

	ship_intelligence (ShipPtr,
			ObjectsOfConcern, ConcernCounter);

	GetElementStarShip (ShipPtr, &StarShipPtr);
	if (StarShipPtr->special_counter == 0)
	{
		if (StarShipPtr->ship_input_state & SPECIAL)
			StarShipPtr->ship_input_state &= ~SPECIAL;
		else
		{
			PEVALUATE_DESC lpWeaponEvalDesc, lpShipEvalDesc;

			lpWeaponEvalDesc = &ObjectsOfConcern[ENEMY_WEAPON_INDEX];
			lpShipEvalDesc = &ObjectsOfConcern[ENEMY_SHIP_INDEX];
			if (StarShipPtr->RaceDescPtr->ship_data.special[0]
					&& (GetFrameCount (StarShipPtr->RaceDescPtr->ship_data.captain_control.special)
					- GetFrameIndex (StarShipPtr->RaceDescPtr->ship_data.captain_control.special) > 5
					|| (lpShipEvalDesc->ObjectPtr != NULL_PTR
					&& lpShipEvalDesc->which_turn <= 4)
					|| (lpWeaponEvalDesc->ObjectPtr != NULL_PTR
								/* means IMMEDIATE WEAPON */
					&& (((lpWeaponEvalDesc->ObjectPtr->state_flags & PLAYER_SHIP)
					&& ShipPtr->crew_level == 1)
					|| (PlotIntercept (lpWeaponEvalDesc->ObjectPtr, ShipPtr, 2, 0)
					&& lpWeaponEvalDesc->ObjectPtr->mass_points >= ShipPtr->crew_level
					&& (TFB_Random () & 1))))))
				StarShipPtr->ship_input_state |= SPECIAL;
		}
	}
}
Ejemplo n.º 5
0
void SFObjectMD2::SetActiveFrame(int frame)
{
	if( frame >= GetFrameCount()-1 )
		return;

	active_frame = frame;
}
Ejemplo n.º 6
0
void SensorsRead(char SensorType, char interval)
{
#if STACK_ACC
	if(SensorType&SENSOR_ACC&&SensorInitState.ACC_Done) {
		SensorReadACC();
		nvtInputSensorRawACC(&Sensor.rawACC[0]);
	}
#endif
#if STACK_MAG
	if(SensorType&SENSOR_MAG&&SensorInitState.MAG_Done) {
		if((GetFrameCount()%interval)==0) {
			SensorReadMAG();
			nvtInputSensorRawMAG(&Sensor.rawMAG[0]);
		}
	}
	else {
		Sensor.rawMAG[0] = 0;
		Sensor.rawMAG[1] = 0;
		Sensor.rawMAG[2] = 0;
		nvtInputSensorRawMAG(&Sensor.rawMAG[0]);
	}
#endif
#if STACK_GYRO
	if(SensorType&SENSOR_GYRO&&SensorInitState.GYRO_Done) {
		SensorReadGYRO();
		nvtInputSensorRawGYRO(&Sensor.rawGYRO[0]);
	}
#endif
}
Ejemplo n.º 7
0
// set timeout and frame disposal method for the current frame
void
nsPNGDecoder::EndImageFrame()
{
  if (mFrameIsHidden) {
    return;
  }

  mNumFrames++;

  Opacity opacity = Opacity::SOME_TRANSPARENCY;
  if (format == gfx::SurfaceFormat::B8G8R8X8) {
    opacity = Opacity::OPAQUE;
  }

#ifdef PNG_APNG_SUPPORTED
  uint32_t numFrames = GetFrameCount();

  // We can't use mPNG->num_frames_read as it may be one ahead.
  if (numFrames > 1) {
    PostInvalidation(mFrameRect);
  }
#endif

  PostFrameStop(opacity, mAnimInfo.mDispose, mAnimInfo.mTimeout,
                mAnimInfo.mBlend);
}
Ejemplo n.º 8
0
////////////////////////////////////////////////////////////////////////////////
// 
// FUNCTION:	TestForAnimatedGIF
// 
// DESCRIPTION:	Check GIF/Image for avialability of animation
// 
// RETURNS:	
// 
// NOTES:		
// 
// MODIFICATIONS:
// 
// Name				Date		Version		Comments
// N T ALMOND       29012002	1.0			Origin
// 
////////////////////////////////////////////////////////////////////////////////
bool ImageEx::TestForAnimatedGIF()
{
	UINT count = 0;
	count = GetFrameDimensionsCount();
	GUID* pDimensionIDs = new GUID[count];

	// Get the list of frame dimensions from the Image object.
	GetFrameDimensionsList(pDimensionIDs, count);

	// Get the number of frames in the first dimension.
	m_nFrameCount = GetFrameCount(&pDimensionIDs[0]);

	// Assume that the image has a property item of type PropertyItemEquipMake.
	// Get the size of that property item.
	int nSize = GetPropertyItemSize(PropertyTagFrameDelay);
	if(nSize>0)
	{
		// Allocate a buffer to receive the property item.
		m_pPropertyItem = (PropertyItem*) malloc(nSize);

		GetPropertyItem(PropertyTagFrameDelay, nSize, m_pPropertyItem);

		delete  pDimensionIDs;
	}else
	{
		XTRACE("GIFͼƬ´íÎó\n");
		delete pDimensionIDs;
		return false;
	}

	return true;
}
Ejemplo n.º 9
0
void
nsWEBPDecoder::FinishInternal()
{
  // Flush the Decoder and let it free the output image buffer.
  WebPIDelete(mDecoder);
  WebPFreeDecBuffer(&mDecBuf);

  // We should never make multiple frames
  MOZ_ASSERT(GetFrameCount() <= 1, "Multiple WebP frames?");

  // Send notifications if appropriate
  if (!IsSizeDecode() && (GetFrameCount() == 1)) {
    PostFrameStop();
    PostDecodeDone();
  }
}
Ejemplo n.º 10
0
// CreateFrame() is used for both simple and animated images
void nsPNGDecoder::CreateFrame(png_uint_32 x_offset, png_uint_32 y_offset,
                               PRInt32 width, PRInt32 height,
                               gfxASurface::gfxImageFormat format)
{
  PRUint32 imageDataLength;
  nsresult rv = mImage->EnsureFrame(GetFrameCount(), x_offset, y_offset,
                                    width, height, format,
                                    &mImageData, &imageDataLength);
  if (NS_FAILED(rv))
    longjmp(png_jmpbuf(mPNG), 5); // NS_ERROR_OUT_OF_MEMORY

  mFrameRect.x = x_offset;
  mFrameRect.y = y_offset;
  mFrameRect.width = width;
  mFrameRect.height = height;

#ifdef PNG_APNG_SUPPORTED
  if (png_get_valid(mPNG, mInfo, PNG_INFO_acTL))
    SetAnimFrameInfo();
#endif

  // Tell the superclass we're starting a frame
  PostFrameStart();

  PR_LOG(gPNGDecoderAccountingLog, PR_LOG_DEBUG,
         ("PNGDecoderAccounting: nsPNGDecoder::CreateFrame -- created "
          "image frame with %dx%d pixels in container %p",
          width, height,
          mImage.get ()));

  mFrameHasNoAlpha = PR_TRUE;
}
Ejemplo n.º 11
0
void CJavascriptStackTrace::Dump(std::ostream& os) const
{
  v8::HandleScope handle_scope(m_isolate);

  v8::TryCatch try_catch;

  std::ostringstream oss;

  for (int i=0; i<GetFrameCount(); i++)
  {
    v8::Handle<v8::StackFrame> frame = GetFrame(i)->Handle();

    v8::String::Utf8Value funcName(frame->GetFunctionName()), scriptName(frame->GetScriptName());

    os << "\tat ";

    if (funcName.length())
      os << std::string(*funcName, funcName.length()) << " (";

    if (frame->IsEval())
    {
      os << "(eval)";
    }
    else
    {
      os << std::string(*scriptName, scriptName.length()) << ":"
          << frame->GetLineNumber() << ":" << frame->GetColumn();
    }

    if (funcName.length())
      os << ")";

    os << std::endl;
  }
}
Ejemplo n.º 12
0
void SFObjectMD2::Render(LPDIRECT3DDEVICE9 RenderingDevice, float ElapsedSecs)
{
	if(isAnimated)
	{
		int play=-1;
		uint32 l = 0;

		//Loop through all animation names
		for (l=0;l<m_anim.size();l++)
		{
			if(strcmp (m_anim[l].name.c_str(), curr_animation) == 0)
			{
				if (Advance)
					m_anim[l].cur += m_anim[l].add;

				//restart animation
				if (m_anim[l].cur >= m_anim[l].end)
					m_anim[l].cur = (float)m_anim[l].start;

				play=l;
				
				break;
			}
		}
		sCore.DeviceHandler()->SetFVF (D3DFVF_MODELVERTEX);

		if (play==-1) return;
		if (play>=GetFrameCount()) return;

		sCore.DeviceHandler()->DrawPrimitiveUP(D3DPT_TRIANGLELIST, //Typ	
													GetTriangleCount (),		  //Anzahl
													(BYTE**)&m_data[int(m_anim[l].cur)].vertex[0],		  //Pointer auf Daten
													sizeof(MODELVERTEX));  //Größe Vertex
	}
	else
	{
		if( active_frame >= GetFrameCount()-1 )
			return;
		
		sCore.DeviceHandler()->SetFVF (D3DFVF_MODELVERTEX);

		sCore.DeviceHandler()->DrawPrimitiveUP(D3DPT_TRIANGLELIST, //Typ	
													GetTriangleCount (),		  //Anzahl
													(BYTE**)&m_data[active_frame].vertex[0],		  //Pointer auf Daten
													sizeof(MODELVERTEX));  //Größe Vertex
	}
}
Ejemplo n.º 13
0
// Main Control loop
void loop()
{
	static uint32_t nextTick = 0;
	while(getTickCount()<nextTick);
	nextTick = getTickCount()+TICK_FRAME_PERIOD;
	CommandProcess();
#ifdef GPS
  GPSCommandProcess();
#endif
	SensorsRead(SENSOR_ACC|SENSOR_GYRO|SENSOR_MAG|SENSOR_BARO,1);
#ifdef ABROBOT
 SensorsRead(SENSOR_HALL,1);
#endif
#ifdef OPTION_RC
	if(IsSSVConnected()) {
	ssv_rc_update();
	if(getTickCount()%1000)
		UpdateBattery();
	}
	if(ChronographRead(ChronRC)>= OUTPUT_RC_INTERVAL) {
		SensorsDynamicCalibrate(SENSOR_GYRO|SENSOR_MAG);
		ChronographSet(ChronRC);
		computeRC();
		armDetect();
	}
#endif
	if(getMagMode()||!(GetSensorCalState()&(1<<MAG)))
		nvtUpdateAHRS(SENSOR_ACC|SENSOR_GYRO|SENSOR_MAG);
	else
		nvtUpdateAHRS(SENSOR_ACC|SENSOR_GYRO);

	if((GetFrameCount()%18)==0)
		report_sensors();
	
	IncFrameCount(1);
#ifdef OPTION_RC
	if(GetFrameCount()==MOTORS_ESC_DELAY)
		motorsStart();
	stabilizer();
	if((GetFrameCount()%12)==0)
		UpdateLED();
#endif
}
Ejemplo n.º 14
0
CTimecode::CTimecode(WORD FrameRate, bool bDropFrame, LPCTSTR lpszTimecode)
{
    switch (FrameRate)
    {
    case 23:
        ++FrameRate;

    case 24:
        m_Timecode.wFrameRate = ED_FORMAT_SMPTE_24;
        break;

    case 25:
        m_Timecode.wFrameRate = ED_FORMAT_SMPTE_25;
        break;

    case 29:
    case 59:
        m_Timecode.wFrameRate = ED_FORMAT_SMPTE_30DROP;
        ++FrameRate;
        break;

    case 30:
    case 60:
        m_Timecode.wFrameRate = bDropFrame ? ED_FORMAT_SMPTE_30DROP : ED_FORMAT_SMPTE_30;
        break;

    default:
        m_Timecode.wFrameRate = m_FrameRate = 0;
        m_Timecode.dwFrames = m_InvalidTimecodeBCD;
        FrameRate = 0;
        break;
    }

    if (FrameRate)
    {
        m_FrameRate = FrameRate;
        m_Timecode.dwFrames = m_InvalidTimecodeBCD;
        m_FrameCount = 0;

        basic_string<TCHAR> Timecode = lpszTimecode;
        if (!Timecode.empty())
        {
            int Frames = 0, Seconds = 0, Minutes = 0, Hours = 0;
            char Dummy;	// Ignore the drop/non-drop notation.
            if (5 == _stscanf(Timecode.c_str(), TEXT("%d:%d:%d%c%d"), &Hours, &Minutes, &Seconds, &Dummy, &Frames))
            {
                m_FrameCount = GetFrameCount(Hours, Minutes, Seconds, Frames);
                m_Timecode.dwFrames = FrameCountToBCD(m_FrameCount);
            }
        }
    }

    m_Timecode.wFrameFract = 0x1000;
}
Ejemplo n.º 15
0
void AnimManager::Play() {
	PlayIndex = 0;
	//if (Frame == 0 && PlayID == 0) {
		CurLoop = animList[PlayIndex].Loops;
		Frame = anims[animList[PlayIndex].AnimID].timeStart;
		TotalFrames = GetFrameCount();
	//}

	Paused = false;
	AnimParticles = false;
}
Ejemplo n.º 16
0
/**
 * Gets the end time of the interval for which these statistics
 * apply. The end interval is the time when the last frame was
 * presented.
 *
 * @return The end time in nanoseconds or {@link #UNDEFINED_TIME_NANO}
 *         if there is no frame data.
 */
ECode FrameStats::GetEndTimeNano(
    /* [out] */ Int64* nano)
{
    Int32 count;
    GetFrameCount(&count);
    if (count <= 0) {
        *nano = UNDEFINED_TIME_NANO;
    }
    *nano = (*mFramesPresentedTimeNano)[mFramesPresentedTimeNano->GetLength() - 1];
    return NOERROR;
}
/**
* 读空网络接收缓冲区
* @param void
* @return void
*/
void CSocketTailTimeFrame::RemoveFrameDataAll()
{
	int iFrameCount = 0;
	iFrameCount = GetFrameCount();
	if(iFrameCount > 0)
	{
		for(int i = 0; i < iFrameCount; i++)
		{
			GetFrameData();
		}		
	}
}
Ejemplo n.º 18
0
/// @brief Get frame 
/// @param _n 
/// @return 
///
const AegiVideoFrame FFmpegSourceVideoProvider::GetFrame(int n) {
	FrameNumber = mid(0, n, GetFrameCount() - 1);

	// decode frame
	const FFMS_Frame *SrcFrame = FFMS_GetFrame(VideoSource, FrameNumber, &ErrInfo);
	if (SrcFrame == NULL) {
		throw VideoDecodeError(std::string("Failed to retrieve frame:") +  ErrInfo.Buffer);
	}

	CurFrame.SetTo(SrcFrame->Data[0], Width, Height, SrcFrame->Linesize[0]);
	return CurFrame;
}
Ejemplo n.º 19
0
HRESULT CMatrixBox::QuickPaint(CRect rcClip0)
{
	//拖拽时直接向窗口绘制缓存的dc,提高效率
	CRect rcParent(0,0,0,0);
	if(m_pParentFrame)
	{
		rcParent = m_pParentFrame->GetPaintRect();
	}
	CRect rcClip;
	CRect rcTmp;
	if(IntersectRect(&rcTmp,&rcParent,&m_rcPaint) == 0)
	{
		return E_FAIL;
	}
	if(IntersectRect(&rcClip,&rcTmp,&rcClip0) == 0)
	{
		return E_FAIL;
	}

	CRect rcWin;
	::GetWindowRect(GetHwnd(),&rcWin);

	HDC hdcMem = ::CreateCompatibleDC(m_hMemDC);
	HBITMAP hbmp = ::CreateCompatibleBitmap(m_hMemDC,rcWin.Width(),rcWin.Height());
	HBITMAP hOldBmp = (HBITMAP)::SelectObject(hdcMem,hbmp);

	::BitBlt(hdcMem,rcClip.left,rcClip.top,rcClip.Width(),rcClip.Height(),m_hMemDC,rcClip.left,rcClip.top,SRCCOPY);

	int nCount = GetFrameCount();
	
	for(int i= 0;i<nCount;i++)
	{
		CMatrixItem* pItem = (CMatrixItem*)GetFrameByIndex(i);
		if(pItem)
		{
			CRect rcItem = pItem->GetPaintRect();
			if ( IntersectRect( &rcTmp,&rcItem,&rcClip ) != 0 )
			{
				pItem->QuickPaint(hdcMem);
			}	
		}
	}

	HDC hdcWin = ::GetDC(GetHwnd());
	::BitBlt(hdcWin,rcClip.left,rcClip.top,rcClip.Width(),rcClip.Height(),hdcMem,rcClip.left,rcClip.top,SRCCOPY);
	ReleaseDC(GetHwnd(),hdcWin);

	::SelectObject(hdcMem,hOldBmp);
	::DeleteObject(hbmp);
	::DeleteDC(hdcMem);

	return S_OK;
}
Ejemplo n.º 20
0
void PS3::PrintInfo() {
	std::cout << "GUID:\t" << GUID2String( GetGUID(), '-', true ) << std::endl;
	std::cout << "Framerate:\t" << _frameRate << std::endl;
	std::cout << "Running:\t" << _bRunning << std::endl;
	std::cout << "Capturing:\t" << _bCapture << std::endl;
	std::cout << "Window title:\t" << _windowTitle << std::endl;
	std::cout << "Horizontal flip:\t" << _bHFlip << std::endl;
	std::cout << "Vertical flip:\t" << _bVFlip << std::endl;
	std::cout << "FPS:\t" << GetFPS() << std::endl;
	std::cout << "Width:\t" << GetWidth() << std::endl;
	std::cout << "Height:\t" << GetHeight() << std::endl;
	std::cout << "FrameCount:\t" << GetFrameCount() << std::endl;
}
Ejemplo n.º 21
0
HRESULT CMatrixBox::SaveChildDC(BOOL bSave)
{
	int nCount = GetFrameCount();
	for(int i= 0;i<nCount;i++)
	{
		CMatrixItem* pItem = (CMatrixItem*)GetFrameByIndex(i);
		if(pItem)
		{
			pItem->SaveDC(bSave);
		}
	}
	return S_OK;
}
Ejemplo n.º 22
0
BOOL C3dMazeEffect::OnTimer(HDC hDC)
{
	BOOL bEndMazeShow = FALSE;

    if(GetFrameCount() != 0)
	{
		bEndMazeShow = !(m_pAI->WalkAhead());
	}

	Draw3dMaze(hDC);
// 	DrawMiniMaze(hDC);

	return !bEndMazeShow;
}
Ejemplo n.º 23
0
BOOL C3BodyEffect::OnTimer(HDC hDC)
{
	if(GetFrameCount() != 0)
	{
		if(!m_pScene->Act())
			return FALSE;
	}

	if(!m_pScene->Draw(hDC))
		return FALSE;

	if(m_pScene->SceneEnd())
		return FALSE;

	return GetElapsedTime() < m_pScene->GetMaxTime();
}
Ejemplo n.º 24
0
static void
shofixti_postprocess (PELEMENT ElementPtr)
{
	STARSHIPPTR StarShipPtr;

	GetElementStarShip (ElementPtr, &StarShipPtr);
	if ((StarShipPtr->cur_status_flags
			^ StarShipPtr->old_status_flags) & SPECIAL)
	{
		StarShipPtr->RaceDescPtr->ship_data.captain_control.special =
				IncFrameIndex (StarShipPtr->RaceDescPtr->ship_data.captain_control.special);
		if (GetFrameCount (StarShipPtr->RaceDescPtr->ship_data.captain_control.special)
				- GetFrameIndex (StarShipPtr->RaceDescPtr->ship_data.captain_control.special) == 3)
			self_destruct (ElementPtr);
	}
}
Ejemplo n.º 25
0
void AnimManager::Prev() {
	if(CurLoop >= animList[PlayIndex].Loops) {
		PlayIndex--;

		if (PlayIndex < 0) {
			Stop();
			return;
		}

		CurLoop = animList[PlayIndex].Loops;
	} else if(CurLoop < animList[PlayIndex].Loops) {
		CurLoop++;
	}

	Frame = anims[animList[PlayIndex].AnimID].timeEnd;
	TotalFrames = GetFrameCount();
}
Ejemplo n.º 26
0
Image2::Image2(
               IN const WCHAR* filename,
               GifCallback * call_back, 
               IN BOOL useEmbeddedColorManagement// = FALSE
               )
               : Image(filename, useEmbeddedColorManagement)
               , m_gif_call_back(call_back)
               , m_gif_timer_task(*this)
               , m_gif_frame_count(0)
               , m_gif_total_delay_time(0)
               , m_gif_status(STATUS_STOP) 
               , m_gif_play_time(0)
               , m_gif_play_index(0)
{
    // 分析gif中的帧数和每帧之间的间隔时间

    int frame_dimension_count = GetFrameDimensionsCount() ;
    if (frame_dimension_count > 0)
    {
        GUID * dimension_ids = new GUID[frame_dimension_count] ;
        GetFrameDimensionsList(dimension_ids, frame_dimension_count) ;
        m_gif_frame_count = GetFrameCount(dimension_ids) ;
        delete [] dimension_ids ;

        if (m_gif_frame_count > 1)
        {
            int property_item_size = GetPropertyItemSize(PropertyTagFrameDelay) ;
            PropertyItem * items = (PropertyItem*) new char[property_item_size] ;
            GetPropertyItem(PropertyTagFrameDelay, property_item_size, items) ;

            UINT i, temp_delayed_time ;
            for (i = 0 ; i < m_gif_frame_count ; ++ i)
            {
                temp_delayed_time = *((UINT*)(items->value) + i) ;
                if (temp_delayed_time < 5) temp_delayed_time = 10 ;
                m_gif_frame_delay_times.push_back(temp_delayed_time * 10) ;
                m_gif_total_delay_time += (temp_delayed_time * 10) ;
            }

            delete [] items ;

            SelectActiveFrame(0) ;
        }
    }
}
// 处理首包帧
void ProcHeadFrame(m_oHeadFrameThreadStruct* pHeadFrameThread)
{
	ASSERT(pHeadFrameThread != NULL);
	// 帧数量设置为0
	int iFrameCount = 0;
	EnterCriticalSection(&pHeadFrameThread->m_pHeadFrame->m_oSecHeadFrame);
	// 得到首包接收网络端口帧数量
	iFrameCount = GetFrameCount(pHeadFrameThread->m_pHeadFrame->m_oHeadFrameSocket,
		pHeadFrameThread->m_pThread->m_pConstVar->m_iRcvFrameSize, 
		pHeadFrameThread->m_pThread->m_pLogOutPut);
	LeaveCriticalSection(&pHeadFrameThread->m_pHeadFrame->m_oSecHeadFrame);
	// 判断帧数量是否大于0
	if(iFrameCount > 0)
	{
		// 循环处理每1帧
		for(int i = 0; i < iFrameCount; i++)
		{
			EnterCriticalSection(&pHeadFrameThread->m_pHeadFrame->m_oSecHeadFrame);
			// 得到帧数据
			if (false == GetFrameData(pHeadFrameThread->m_pHeadFrame->m_oHeadFrameSocket,
				pHeadFrameThread->m_pHeadFrame->m_cpRcvFrameData, 
				pHeadFrameThread->m_pThread->m_pConstVar->m_iRcvFrameSize, 
				pHeadFrameThread->m_pThread->m_pLogOutPut))
			{
				LeaveCriticalSection(&pHeadFrameThread->m_pHeadFrame->m_oSecHeadFrame);
				continue;
			}
			LeaveCriticalSection(&pHeadFrameThread->m_pHeadFrame->m_oSecHeadFrame);
			if (false == ParseInstrHeadFrame(pHeadFrameThread->m_pHeadFrame, 
				pHeadFrameThread->m_pThread->m_pConstVar, pHeadFrameThread->m_pThread->m_pLogOutPut))
			{
				AddMsgToLogOutPutList(pHeadFrameThread->m_pThread->m_pLogOutPut, "ParseInstrumentHeadFrame", 
					"", ErrorType, IDS_ERR_PARSE_HEADFRAME);
				continue;
			}
			// 处理单个首包帧
			ProcHeadFrameOne(pHeadFrameThread);
			// 系统发生变化的时间
			UpdateLineChangeTime(pHeadFrameThread->m_pLineList);	
		}		
	}
}
// 处理IP地址设置应答帧
void ProcIPSetReturnFrame(m_oIPSetFrameThreadStruct* pIPSetFrameThread)
{
	ASSERT(pIPSetFrameThread != NULL);
	// 帧数量设置为0
	int iFrameCount = 0;
	EnterCriticalSection(&pIPSetFrameThread->m_pIPSetFrame->m_oSecIPSetFrame);
	// 得到首包接收网络端口帧数量
	iFrameCount = GetFrameCount(pIPSetFrameThread->m_pIPSetFrame->m_oIPSetFrameSocket,
		pIPSetFrameThread->m_pThread->m_pConstVar->m_iRcvFrameSize, 
		pIPSetFrameThread->m_pThread->m_pLogOutPut);
	LeaveCriticalSection(&pIPSetFrameThread->m_pIPSetFrame->m_oSecIPSetFrame);
	// 判断帧数量是否大于0
	if(iFrameCount > 0)
	{
		// 循环处理每1帧
		for(int i = 0; i < iFrameCount; i++)
		{
			EnterCriticalSection(&pIPSetFrameThread->m_pIPSetFrame->m_oSecIPSetFrame);
			// 得到帧数据
			if (false == GetFrameData(pIPSetFrameThread->m_pIPSetFrame->m_oIPSetFrameSocket,
				pIPSetFrameThread->m_pIPSetFrame->m_cpRcvFrameData, 
				pIPSetFrameThread->m_pThread->m_pConstVar->m_iRcvFrameSize, 
				pIPSetFrameThread->m_pThread->m_pLogOutPut))
			{
				LeaveCriticalSection(&pIPSetFrameThread->m_pIPSetFrame->m_oSecIPSetFrame);
				continue;
			}
			LeaveCriticalSection(&pIPSetFrameThread->m_pIPSetFrame->m_oSecIPSetFrame);
			if (false == ParseInstrIPSetReturnFrame(pIPSetFrameThread->m_pIPSetFrame, 
				pIPSetFrameThread->m_pThread->m_pConstVar, pIPSetFrameThread->m_pThread->m_pLogOutPut))
			{
				AddMsgToLogOutPutList(pIPSetFrameThread->m_pThread->m_pLogOutPut, 
					"ParseInstrumentIPSetReturnFrame", "", ErrorType, IDS_ERR_PARSE_IPSETRETURNFRAME);
				continue;
			}
			// 处理单个IP地址设置应答帧
			ProcIPSetReturnFrameOne(pIPSetFrameThread);			
		}
	}
}
Ejemplo n.º 29
0
static void
flame_napalm_preprocess (ELEMENT *ElementPtr)
{
	ZeroVelocityComponents (&ElementPtr->velocity);

	if (ElementPtr->state_flags & NONSOLID)
	{
		ElementPtr->state_flags &= ~NONSOLID;
		ElementPtr->state_flags |= APPEARING;
		SetPrimType (&(GLOBAL (DisplayArray))[ElementPtr->PrimIndex],
				STAMP_PRIM);

		InitIntersectStartPoint (ElementPtr);
		InitIntersectEndPoint (ElementPtr);
		InitIntersectFrame (ElementPtr);
	}
	/* turn_wait is abused here to store the speed of the decay animation */
	else if (ElementPtr->turn_wait > 0)
		--ElementPtr->turn_wait;
	else
	{
		if (ElementPtr->life_span <= NUM_NAPALM_FADES * (NAPALM_DECAY_RATE + 1)
				|| GetFrameIndex (
				ElementPtr->current.image.frame
				) != NUM_NAPALM_FADES)
			ElementPtr->next.image.frame =
					DecFrameIndex (ElementPtr->current.image.frame);
		else if (ElementPtr->life_span > NUM_NAPALM_FADES * (NAPALM_DECAY_RATE + 1))
			ElementPtr->next.image.frame = SetAbsFrameIndex (
					ElementPtr->current.image.frame,
					GetFrameCount (ElementPtr->current.image.frame) - 1
					);

		/* turn_wait is abused here to store the speed of the decay
		 * animation. */
		ElementPtr->turn_wait = NAPALM_DECAY_RATE;
		ElementPtr->state_flags |= CHANGING;
	}
}
Ejemplo n.º 30
0
void
Decoder::PostDecodeDone()
{
  NS_ABORT_IF_FALSE(!IsSizeDecode(), "Can't be done with decoding with size decode!");
  NS_ABORT_IF_FALSE(!mInFrame, "Can't be done decoding if we're mid-frame!");
  NS_ABORT_IF_FALSE(!mDecodeDone, "Decode already done!");
  mDecodeDone = true;

  // Set premult before DecodingComplete(), since DecodingComplete() calls Optimize()
  int frames = GetFrameCount();
  bool isNonPremult = GetDecodeFlags() & DECODER_NO_PREMULTIPLY_ALPHA;
  for (int i = 0; i < frames; i++) {
    mImage.SetFrameAsNonPremult(i, isNonPremult);
  }

  // Notify
  mImage.DecodingComplete();
  if (mObserver) {
    mObserver->OnStopContainer(nsnull, &mImage);
    mObserver->OnStopDecode(nsnull, NS_OK, nsnull);
  }
}