double		CAAudioFileReader::GetCurrentPosition() const
{
	return double(GetCurrentFrame()) / double(GetNumberFrames());
#if 0
	double nFrames = double(GetNumberFrames());	// +1 to account for leftovers from decoder
	if (!mRunning)
		return double(GetCurrentFrame()) / nFrames;

	if (mEndOfStream) {
		//printf("EOF\n");
		return 1.0;
	}

	const FileReadBuffer *b = static_cast<const FileReadBuffer *>(GetCurrentBuffer());
		// the buffer from which we're reading
	UInt32 startFrame, endFrame;
	b->GetLocation(startFrame, endFrame);
	//printf("%qd + %ld / %.f\n", b->mBufferStartFileFrame, startFrame, nFrames);
	return double(b->mBufferStartFileFrame + startFrame) / nFrames;
	//if (endFrame > 0) {
		//double frac = (double(startFrame) / double(endFrame)) * double(endPacket - startPacket);
		//packetIndex += frac;
		//printf("frames %ld-%ld, packets %qd-%qd, frac %.3f\n",
		//	startFrame, endFrame, startPacket, endPacket, frac);
	//}
	//double pos = packetIndex / nPacketsPlus1;
	//printf("%.3f / %.0f = %.3f\n", packetIndex, nPacketsPlus1, pos);
	//return pos;

	//return double(GetCurrentFrame()) / nFrames;
#endif
}
Exemplo n.º 2
0
////////////////////////////////////////
//		PUBLIC UTILITY FUNCTIONS
////////////////////////////////////////
// Gets the current frame in the animation
// and moves to the next one in order.
RECT CAnimation::GetFrame()
{
	SetTimeWaited(GetTimeWaited() + GAME->GetTimer().GetDeltaTime());

	RECT tRect;
	
	tRect.left = GetCurrentFrame() * GetFrameWidth();
	tRect.top = 0;
	tRect.right = tRect.left + GetFrameWidth();
	tRect.bottom = tRect.top + GetFrameHeight();	
	
				
	if(GetTimeWaited() > GetTimePerFrame() && !GetStatic())
	{	
		SetTimeWaited(0.0f);
		SetCurrentFrame(GetCurrentFrame() + 1);

		if(GetCurrentFrame() > GetFrameCount())
		{
			if(GetLooping())
				ResetAnimation();	
			else
			{
				SetCurrentFrame(GetFrameCount());
				SetStatic(true);
			}
		}
		
	}

	return tRect;
}
Exemplo n.º 3
0
int TestGetCurrentFrame()
{
    struct TBacktraceRecord record;
    int result = GetCurrentFrame(&record, 0);
    REQUIRE_EQUAL(result, 1);
    REQUIRE_STRINGS_EQUAL(record.Symbol_, "TestGetCurrentFrame");
    result = GetCurrentFrame(&record, 1);
    REQUIRE_EQUAL(result, 1);
    REQUIRE_STRINGS_EQUAL(record.Symbol_, "main");
    result = GetCurrentFrame(&record, 100);
    REQUIRE_EQUAL(result, 0);
    return 0;
}
Exemplo n.º 4
0
DWORD_PTR
LLDBServices::GetExpression(
    PCSTR exp)
{
    if (exp == nullptr)
    {
        return 0;
    }

    lldb::SBFrame frame = GetCurrentFrame();
    if (!frame.IsValid())
    {
        return 0;
    }

    DWORD_PTR result = 0;
    lldb::SBError error;
    std::string str;

    // To be compatible with windbg/dbgeng, we need to emulate the default
    // hex radix (because sos prints addresses and other hex values without
    // the 0x) by first prepending 0x and if that fails use the actual
    // undecorated expression.
    str.append("0x");
    str.append(exp);

    result = GetExpression(frame, error, str.c_str());
    if (error.Fail())
    {
        result = GetExpression(frame, error, exp);
    }

    return result;
}
Exemplo n.º 5
0
void DrawParticle(Particle *p,int animationNum,int x, int y)
{
		Animate(&p->sprite->animation[animationNum],p->sprite->animation[animationNum].startFrame);
	DrawSprite(p->sprite,x
		,y,GetCurrentFrame(&p->sprite->animation[animationNum]),
		GetRenderer(),SDL_FLIP_NONE);
	p->frame++;
	p->position.y -=.5;
}
Exemplo n.º 6
0
/*
** G: 当进行两轮投郑,含全中,得分为10,7,2
** W: 计算当前轮
** T: 当前轮为第二轮
*/
TEST_F(TestGetCurrentFrame, ThrowTreeBallWithStrike)
{
    ThrowBall(10);
    ThrowBall(7);
    ThrowBall(2);

    returnvalue = GetCurrentFrame();

    EXPECT_EQ(2, returnvalue);

}
Exemplo n.º 7
0
/*
** G: 当进行一轮投郑,含补中得分为3,7,3
** W: 计算当前轮
** T: 当前轮为第一轮

*/
TEST_F(TestGetCurrentFrame, ThrowThreeBallWithSpare)
{
    ThrowBall(3);
    ThrowBall(7);
    ThrowBall(3);

    returnvalue = GetCurrentFrame();

    EXPECT_EQ(2, returnvalue);

}
Exemplo n.º 8
0
/*
** G: 当进行两轮投郑,不含全中与补中,得分为1,2,3,5
** W: 计算当前轮
** T: 当前轮为第二轮
*/
TEST_F(TestGetCurrentFrame, TheFirstFrame)
{
    ThrowBall(1);
    ThrowBall(2);
    ThrowBall(3);
    ThrowBall(5);

    returnvalue = GetCurrentFrame();

    EXPECT_EQ(2, returnvalue);
}
Exemplo n.º 9
0
static int TestGetCurrentFrameStatic()
{
    struct TBacktraceRecord record;
    int result = GetCurrentFrame(&record, 0);
    REQUIRE_EQUAL(result, 1);
#ifdef __FreeBSD__
    REQUIRE_STRINGS_EQUAL(record.Symbol_, "_start");
#else
    REQUIRE_EQUAL(record.Symbol_, 0);
#endif
    return 0;
}
Exemplo n.º 10
0
//******************************************************************************
void nsGIFDecoder2::BeginImageFrame(uint16_t aDepth)
{
  gfx::SurfaceFormat format;
  if (mGIFStruct.is_transparent)
    format = gfx::SurfaceFormat::B8G8R8A8;
  else
    format = gfx::SurfaceFormat::B8G8R8X8;

  MOZ_ASSERT(HasSize());

  // Use correct format, RGB for first frame, PAL for following frames
  // and include transparency to allow for optimization of opaque images
  if (mGIFStruct.images_decoded) {
    // Image data is stored with original depth and palette
    NeedNewFrame(mGIFStruct.images_decoded, mGIFStruct.x_offset,
                 mGIFStruct.y_offset, mGIFStruct.width, mGIFStruct.height,
                 format, aDepth);
  }

  // Our first full frame is automatically created by the image decoding
  // infrastructure. Just use it as long as it matches up.
  else if (!GetCurrentFrame()->GetRect().IsEqualEdges(nsIntRect(mGIFStruct.x_offset,
                                                                mGIFStruct.y_offset,
                                                                mGIFStruct.width,
                                                                mGIFStruct.height))) {
    // Regardless of depth of input, image is decoded into 24bit RGB
    NeedNewFrame(mGIFStruct.images_decoded, mGIFStruct.x_offset,
                 mGIFStruct.y_offset, mGIFStruct.width, mGIFStruct.height,
                 format);
  } else {
    // Our preallocated frame matches up, with the possible exception of alpha.
    if (format == gfx::SurfaceFormat::B8G8R8X8) {
      GetCurrentFrame()->SetHasNoAlpha();
    }
  }

  mCurrentFrameIndex = mGIFStruct.images_decoded;
}
Exemplo n.º 11
0
HRESULT 
LLDBServices::GetFrameOffset(
    PULONG64 offset)
{
    lldb::SBFrame frame = GetCurrentFrame();
    if (!frame.IsValid())
    {
        *offset = 0;
        return E_FAIL;
    }

    *offset = frame.GetFP();
    return S_OK;
}
Exemplo n.º 12
0
/* Get CD-ROM status */
static CDstatus SDL_SYS_CDStatus (SDL_CD *cdrom, int *position)
{
    if (position) {
        int trackFrame;
        
        Lock ();
        trackFrame = GetCurrentFrame ();
        Unlock ();
    
        *position = cdrom->track[currentTrack].offset + trackFrame;
    }
    
    return status;
}
Exemplo n.º 13
0
HRESULT 
DebugClient::GetStackOffset(
    PULONG64 offset)
{
    lldb::SBFrame frame = GetCurrentFrame();
    if (!frame.IsValid())
    {
        *offset = 0;
        return E_FAIL;
    }

    *offset = frame.GetSP();
    return S_OK;
}
Exemplo n.º 14
0
// CreateFrame() is used for both simple and animated images
void nsPNGDecoder::CreateFrame(png_uint_32 x_offset, png_uint_32 y_offset,
                               int32_t width, int32_t height,
                               gfx::SurfaceFormat format)
{
  // Our first full frame is automatically created by the image decoding
  // infrastructure. Just use it as long as it matches up.
  MOZ_ASSERT(HasSize());
  if (mNumFrames != 0 ||
      !GetCurrentFrame()->GetRect().IsEqualEdges(nsIntRect(x_offset, y_offset, width, height))) {
    NeedNewFrame(mNumFrames, x_offset, y_offset, width, height, format);
  } else if (mNumFrames == 0) {
    // Our preallocated frame matches up, with the possible exception of alpha.
    if (format == gfx::SurfaceFormat::B8G8R8X8) {
      GetCurrentFrame()->SetHasNoAlpha();
    }
  }

  mFrameRect.x = x_offset;
  mFrameRect.y = y_offset;
  mFrameRect.width = width;
  mFrameRect.height = height;

  PR_LOG(GetPNGDecoderAccountingLog(), PR_LOG_DEBUG,
         ("PNGDecoderAccounting: nsPNGDecoder::CreateFrame -- created "
          "image frame with %dx%d pixels in container %p",
          width, height,
          &mImage));

  mFrameHasNoAlpha = true;

#ifdef PNG_APNG_SUPPORTED
  if (png_get_valid(mPNG, mInfo, PNG_INFO_acTL)) {
    mAnimInfo = AnimFrameInfo(mPNG, mInfo);
  }
#endif
}
Exemplo n.º 15
0
void MorphanView::DeleteSelection()
{
    if (!modifyTool) return;
    if (!modifyTool->HasSelection()) return;

    const std::set<PrimitiveSelection>& selection = modifyTool->GetSelection();
    for (const PrimitiveSelection& item : selection)
    {
        Primitive* primitive = item.primitive;
        MorphanKeyFrame& frame = GetCurrentFrame();
        frame.Delete(primitive);
    }
    modifyTool->Clear();
    panel->Refresh();
}
Exemplo n.º 16
0
  bool __stdcall Sts_UpdatePlayClockCurrent(
    StsPlayClock_t hPlayClock,
    __deref_out int* frame,
    __deref_out BOOL* bUpdate,
    __deref_out BOOL* bFinished)
  {
    auto p = reinterpret_cast<PlayClock*>(hPlayClock);
    if (p==nullptr) { PKY_ERROR("p==nullptr"); return false; }

    bool update = false;
    bool finish = false;
    *frame = p->GetCurrentFrame(&update, &finish);
    if (bUpdate!=nullptr)*bUpdate = update;
    if (bFinished!=nullptr)*bFinished = finish;
    return true;
  }
// Draws the animation and progress bar (if any) on the screen.
// Does not flip pages.
// Should only be called with updateMutex locked.
void ScreenRecoveryUI::draw_foreground_locked() {
    if (currentIcon != NONE) {
        GRSurface* frame = GetCurrentFrame();
        int frame_width = gr_get_width(frame);
        int frame_height = gr_get_height(frame);
        int frame_x = (gr_fb_width() - frame_width) / 2;
        int frame_y = GetAnimationBaseline();
        gr_blit(frame, 0, 0, frame_width, frame_height, frame_x, frame_y);
    }

    if (progressBarType != EMPTY) {
        int width = gr_get_width(progressBarEmpty);
        int height = gr_get_height(progressBarEmpty);

        int progress_x = (gr_fb_width() - width)/2;
        int progress_y = GetProgressBaseline();

        // Erase behind the progress bar (in case this was a progress-only update)
        gr_color(0, 0, 0, 255);
        gr_fill(progress_x, progress_y, width, height);

        if (progressBarType == DETERMINATE) {
            float p = progressScopeStart + progress * progressScopeSize;
            int pos = (int) (p * width);

            if (rtl_locale) {
                // Fill the progress bar from right to left.
                if (pos > 0) {
                    gr_blit(progressBarFill, width-pos, 0, pos, height,
                            progress_x+width-pos, progress_y);
                }
                if (pos < width-1) {
                    gr_blit(progressBarEmpty, 0, 0, width-pos, height, progress_x, progress_y);
                }
            } else {
                // Fill the progress bar from left to right.
                if (pos > 0) {
                    gr_blit(progressBarFill, 0, 0, pos, height, progress_x, progress_y);
                }
                if (pos < width-1) {
                    gr_blit(progressBarEmpty, pos, 0, width-pos, height,
                            progress_x+pos, progress_y);
                }
            }
        }
    }
}
Exemplo n.º 18
0
// CreateFrame() is used for both simple and animated images
void nsPNGDecoder::CreateFrame(png_uint_32 x_offset, png_uint_32 y_offset,
                               int32_t width, int32_t height,
                               gfx::SurfaceFormat format)
{
  MOZ_ASSERT(HasSize());

  if (format == gfx::SurfaceFormat::B8G8R8A8) {
    PostHasTransparency();
  }

  // Our first full frame is automatically created by the image decoding
  // infrastructure. Just use it as long as it matches up.
  nsIntRect neededRect(x_offset, y_offset, width, height);
  nsRefPtr<imgFrame> currentFrame = GetCurrentFrame();
  if (!currentFrame->GetRect().IsEqualEdges(neededRect)) {
    if (mNumFrames == 0) {
      // We need padding on the first frame, which means that we don't draw into
      // part of the image at all. Report that as transparency.
      PostHasTransparency();
    }

    NeedNewFrame(mNumFrames, x_offset, y_offset, width, height, format);
  } else if (mNumFrames != 0) {
    NeedNewFrame(mNumFrames, x_offset, y_offset, width, height, format);
  }

  mFrameRect = neededRect;

  MOZ_LOG(GetPNGDecoderAccountingLog(), LogLevel::Debug,
         ("PNGDecoderAccounting: nsPNGDecoder::CreateFrame -- created "
          "image frame with %dx%d pixels in container %p",
          width, height,
          &mImage));

#ifdef PNG_APNG_SUPPORTED
  if (png_get_valid(mPNG, mInfo, PNG_INFO_acTL)) {
    mAnimInfo = AnimFrameInfo(mPNG, mInfo);

    if (mAnimInfo.mDispose == DisposalMethod::CLEAR) {
      // We may have to display the background under this image during
      // animation playback, so we regard it as transparent.
      PostHasTransparency();
    }
  }
#endif
}
Exemplo n.º 19
0
	void CBasicAnimation::FrameChanged(void)
	{
		if (!m_animationSheet)
			return;

		ColRow cr = GetColRow(GetCols(), GetCurrentFrame());
		size_t x((cr.col - 1) * m_frameW);
		size_t y((cr.row - 1) * m_frameH);

		if (m_frame)
		{
			m_frame.Reset();
			//al_destroy_bitmap(m_frame);
			//m_frame = nullptr;
		}

		m_frame = CBitmap(m_animationSheet, x, y, m_frameW, m_frameH);
		//m_frame = al_create_sub_bitmap(m_animationSheet, x, y, m_frameW, m_frameH);
	}
Exemplo n.º 20
0
//******************************************************************************
void
nsGIFDecoder2::BeginImageFrame(uint16_t aDepth)
{
  MOZ_ASSERT(HasSize());

  gfx::SurfaceFormat format;
  if (mGIFStruct.is_transparent) {
    format = gfx::SurfaceFormat::B8G8R8A8;
    PostHasTransparency();
  } else {
    format = gfx::SurfaceFormat::B8G8R8X8;
  }

  // Use correct format, RGB for first frame, PAL for following frames
  // and include transparency to allow for optimization of opaque images
  if (mGIFStruct.images_decoded) {
    // Image data is stored with original depth and palette
    NeedNewFrame(mGIFStruct.images_decoded, mGIFStruct.x_offset,
                 mGIFStruct.y_offset, mGIFStruct.width, mGIFStruct.height,
                 format, aDepth);
  } else {
    nsRefPtr<imgFrame> currentFrame = GetCurrentFrame();

    // Our first full frame is automatically created by the image decoding
    // infrastructure. Just use it as long as it matches up.
    if (!currentFrame->GetRect().IsEqualEdges(nsIntRect(mGIFStruct.x_offset,
                                                        mGIFStruct.y_offset,
                                                        mGIFStruct.width,
                                                        mGIFStruct.height))) {

      // We need padding on the first frame, which means that we don't draw into
      // part of the image at all. Report that as transparency.
      PostHasTransparency();

      // Regardless of depth of input, image is decoded into 24bit RGB
      NeedNewFrame(mGIFStruct.images_decoded, mGIFStruct.x_offset,
                   mGIFStruct.y_offset, mGIFStruct.width, mGIFStruct.height,
                   format);
    }
  }

  mCurrentFrameIndex = mGIFStruct.images_decoded;
}
Exemplo n.º 21
0
HRESULT CBSprite::Draw(int X, int Y, CBObject* Register, float ZoomX, float ZoomY, DWORD Alpha)
{
	GetCurrentFrame(ZoomX, ZoomY);
	if(m_CurrentFrame<0 || m_CurrentFrame>=m_Frames.GetSize()) 
	{
		return S_OK;
	}
	// move owner if allowed to
	if(m_Changed && m_Owner && m_Owner->m_Movable)
	{
		m_Owner->m_PosX += m_MoveX;
		m_Owner->m_PosY += m_MoveY;
		m_Owner->AfterMove();
		
		X = m_Owner->m_PosX;
		Y = m_Owner->m_PosY;
	}

	// draw frame
	return Display(X, Y, Register, ZoomX, ZoomY, Alpha);
}
Exemplo n.º 22
0
HRESULT
LLDBServices::GetValueByName(
    PCSTR name,
    PDWORD_PTR debugValue)
{
    lldb::SBFrame frame = GetCurrentFrame();
    if (!frame.IsValid())
    {
        *debugValue = 0;
        return E_FAIL;
    }

    lldb::SBValue value = frame.FindRegister(name);
    if (!value.IsValid())
    {
        *debugValue = 0;
        return E_FAIL;
    }

    *debugValue = value.GetValueAsUnsigned();
    return S_OK;
}
Exemplo n.º 23
0
/**
 * @brief Interface for getting Audio/Video data between processes
 *
 *
 * @param   field    The operation field, the value should be AV_OP_XXX_XXX defined at Appro_interface.h
 * @param   serial   frame serial number , used when locking frame data
 * @param   ptr    frame information data structure defined at Appro_interface.h
 *
 * @return the value should be RET_XXXXX defined at Appro_interface.h
 *
 * @pre Must have called ApproDrvInit() and func_get_mem()
 *
 */
int GetAVData( unsigned int field, int serial, AV_DATA * ptr )
{
	int ret=RET_SUCCESS;
	if(virptr == NULL)
		return RET_ERROR_OP;
	switch(field){
		case AV_OP_GET_MJPEG_SERIAL:
			if(serial != -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t curframe = GetCurrentFrame(FMT_MJPEG);
				if(curframe.serial_no < 0){
					ret = RET_NO_VALID_DATA;
				} else {
					int cnt = 0;
					ptr->serial = curframe.serial_no;
					ptr->size = curframe.size;
					ptr->width = curframe.width;
					ptr->height = curframe.height;
					ptr->timestamp = curframe.timestamp;
					for (cnt = 0; cnt < FMT_MAX_NUM; cnt++ )
					{
						ptr->ref_serial[cnt] = curframe.ref_serial[cnt];
					}
				}
			}
			break;
		case AV_OP_WAIT_NEW_MJPEG_SERIAL:
			if(serial != -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t curframe = WaitNewFrame(FMT_MJPEG);
				if(curframe.serial_no < 0){
					ret = RET_NO_VALID_DATA;
				} else {
					ptr->serial = curframe.serial_no;
					ptr->size = curframe.size;
					ptr->width = curframe.width;
					ptr->height = curframe.height;
				}
			}
			break;
		case AV_OP_LOCK_MJPEG:
			if(serial == -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t frame;
				if(serial < 0){
					ret = RET_INVALID_PRM;
				} else {
					int cnt = 0;
					frame.serial_no = serial;
					frame.format = FMT_MJPEG;
					ret = LockFrame(&frame);
					switch(ret){
						case -98:
							ret = RET_OVERWRTE;
							break;
						case -99:
							ret = RET_NO_MEM;
							break;
						case 0:
							ptr->serial = serial;
							ptr->size = frame.size;
							ptr->width = frame.width;
							ptr->height = frame.height;
							ptr->quality = frame.quality;
							ptr->flags = frame.flags;
							ptr->timestamp = frame.timestamp;
							for (cnt = 0; cnt < FMT_MAX_NUM; cnt++ )
							{
								ptr->ref_serial[cnt] = frame.ref_serial[cnt];
							}
							ptr->ptr = (unsigned char *)virptr + frame.offset;
							break;
						default:
							ret = RET_UNKNOWN_ERROR;
							break;
					}
				}
			}
			break;
		case AV_OP_UNLOCK_MJPEG:
			if(serial == -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t frame;
				if(serial < 0){
					ret = RET_INVALID_PRM;
				} else {
					frame.serial_no = serial;
					frame.format = FMT_MJPEG;
					UnlockFrame(&frame);
				}
			}
			break;
		case AV_OP_GET_MPEG4_SERIAL:
			if(serial != -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t curframe = GetCurrentFrame(FMT_MPEG4);
				if(curframe.serial_no < 0){
					ret = RET_NO_VALID_DATA;
				} else {
					int cnt = 0;
					ptr->serial = curframe.serial_no;
					ptr->size = curframe.size;
					ptr->width = curframe.width;
					ptr->height = curframe.height;
					ptr->flags = curframe.flags;
					ptr->timestamp = curframe.timestamp;
					ptr->temporalId = curframe.temporalId;
					for (cnt = 0; cnt < FMT_MAX_NUM; cnt++ )
					{
						ptr->ref_serial[cnt] = curframe.ref_serial[cnt];
					}
				}
			}
			break;
		case AV_OP_GET_MPEG4_CIF_SERIAL:
			if(serial != -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t curframe = GetCurrentFrame(FMT_MPEG4_EXT);
				if(curframe.serial_no < 0){
					ret = RET_NO_VALID_DATA;
				} else {
					int cnt = 0;
					ptr->serial = curframe.serial_no;
					ptr->size = curframe.size;
					ptr->width = curframe.width;
					ptr->height = curframe.height;
					ptr->flags = curframe.flags;
					ptr->timestamp = curframe.timestamp;
					ptr->temporalId = curframe.temporalId;
					for (cnt = 0; cnt < FMT_MAX_NUM; cnt++ )
					{
						ptr->ref_serial[cnt] = curframe.ref_serial[cnt];
					}
				}
			}
			break;
		case AV_OP_WAIT_NEW_MPEG4_SERIAL:
			if(serial != -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t curframe = WaitNewFrame(FMT_MPEG4);
				if(curframe.serial_no < 0){
					ret = RET_NO_VALID_DATA;
				} else {
					ptr->serial = curframe.serial_no;
					ptr->size = curframe.size;
					ptr->width = curframe.width;
					ptr->height = curframe.height;
					ptr->flags = curframe.flags;
					ptr->temporalId = curframe.temporalId;
				}
			}
			break;

		case AV_OP_WAIT_NEW_MPEG4_CIF_SERIAL:
			if(serial != -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t curframe = WaitNewFrame(FMT_MPEG4);
				if(curframe.serial_no < 0){
					ret = RET_NO_VALID_DATA;
				} else {
					ptr->serial = curframe.serial_no;
					ptr->size = curframe.size;
					ptr->width = curframe.width;
					ptr->height = curframe.height;
					ptr->flags = curframe.flags;
					ptr->temporalId = curframe.temporalId;
				}
			}
			break;

		case AV_OP_LOCK_MP4:
			if(serial == -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t frame;
				if(serial < 0){
					ret = RET_INVALID_PRM;
				} else {
					int cnt = 0;
					frame.serial_no = serial;
					frame.format = FMT_MPEG4;
					ret = LockFrame(&frame);
					switch(ret){
						case -99:
							ret = RET_OVERWRTE;
							printf("RET_OVERWRTE\n");
							break;
						case -100:
							ret = RET_NO_MEM;
							printf("RET_NO_MEM\n");
							break;
						case -101:
							ret = RET_ERROR_OP;
							printf("RET_ERROR_OP: serial: %d\n",serial);
							break;
						case -98:
							ret = RET_NO_VALID_DATA;
							//printf("RET_NO_VALID_DATA\n");
							break;
						case 0:
							ptr->serial = serial;
							ptr->size = frame.size;
							ptr->width = frame.width;
							ptr->height = frame.height;
							ptr->quality = frame.quality;
							ptr->flags = frame.flags;
							ptr->frameType = frame.frameType;
							ptr->timestamp = frame.timestamp;
							ptr->temporalId = frame.temporalId;
							for (cnt = 0; cnt < FMT_MAX_NUM; cnt++ )
							{
								ptr->ref_serial[cnt] = frame.ref_serial[cnt];
							}
							ptr->ptr = (unsigned char *)virptr + frame.offset;
							break;
						default:
							ret = RET_UNKNOWN_ERROR;
							printf("RET_UNKNOWN_ERROR\n");
							break;
					}
				}
			}
			break;
			case AV_OP_LOCK_MP4_CIF:
			if(serial == -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t frame;
				if(serial < 0){
					ret = RET_INVALID_PRM;
				} else {
					int cnt = 0;
					frame.serial_no = serial;
					frame.format = FMT_MPEG4_EXT;
					ret = LockFrame(&frame);
					switch(ret){
						case -99:
							ret = RET_OVERWRTE;
							printf("RET_OVERWRTE\n");
							break;
						case -100:
							ret = RET_NO_MEM;
							printf("RET_NO_MEM\n");
							break;
						case -101:
							ret = RET_ERROR_OP;
							printf("RET_ERROR_OP: serial: %d\n",serial);
							break;
						case -98:
							ret = RET_NO_VALID_DATA;
							//printf("RET_NO_VALID_DATA\n");
							break;
						case 0:
							ptr->serial = serial;
							ptr->size = frame.size;
							ptr->width = frame.width;
							ptr->height = frame.height;
							ptr->quality = frame.quality;
							ptr->flags = frame.flags;
							ptr->frameType = frame.frameType;
							ptr->timestamp = frame.timestamp;
							ptr->temporalId = frame.temporalId;
							for (cnt = 0; cnt < FMT_MAX_NUM; cnt++ )
							{
								ptr->ref_serial[cnt] = frame.ref_serial[cnt];
							}
							ptr->ptr = (unsigned char *)virptr + frame.offset;
							break;
						default:
							ret = RET_UNKNOWN_ERROR;
							printf("RET_UNKNOWN_ERROR\n");
							break;
					}
				}
			}
			break;

		case AV_OP_UNLOCK_MP4:
			if(serial == -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t frame;
				if(serial < 0){
					ret = RET_INVALID_PRM;
				} else {
					frame.serial_no = serial;
					frame.format = FMT_MPEG4;
					UnlockFrame(&frame);
				}
			}
			break;

		case AV_OP_UNLOCK_MP4_CIF:
			if(serial == -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t frame;
				if(serial < 0){
					ret = RET_INVALID_PRM;
				} else {
					frame.serial_no = serial;
					frame.format = FMT_MPEG4_EXT;
					UnlockFrame(&frame);
				}
			}
			break;
		case AV_OP_LOCK_MP4_VOL:
		{
			FrameInfo_t frame;
			ret = GetVolInfo(&frame,FMT_MPEG4);
			if( ret > 0 )
			{
				memcpy( VolInfo,((unsigned char *)virptr + frame.offset),ret);
			}
			ptr->ptr = VolInfo;
			ptr->size = ret;
			ret = RET_SUCCESS;
			break;
		}
		case AV_OP_GET_MEDIA_INFO:
		{
			FrameInfo_t frame;
			ptr->flags = GetMediaInfo(&frame);
			ret = RET_SUCCESS;
			break;
		}
		case AV_OP_LOCK_MP4_CIF_VOL:
		{
			FrameInfo_t frame;
			ret = GetVolInfo(&frame,FMT_MPEG4_EXT);
			if( ret > 0 )
			{
				memcpy( VolInfo_cif,((unsigned char *)virptr + frame.offset),ret);
			}
			ptr->ptr = VolInfo_cif;
			ptr->size = ret;
			ret = RET_SUCCESS;
			break;
		}
		case AV_OP_UNLOCK_MP4_VOL:
			ret = RET_SUCCESS;
			break;

		case AV_OP_UNLOCK_MP4_CIF_VOL:
			ret = RET_SUCCESS;
			break;

		case AV_OP_LOCK_MP4_IFRAME:
		{
			FrameInfo_t curframe;
			LockMpeg4IFrame(&curframe, FMT_MPEG4);
			if(curframe.serial_no == -1){
				ret = RET_NO_VALID_DATA;
			} else if(curframe.serial_no == -2){
				ret = RET_NO_MEM;
			} else {
				int cnt = 0;
				ptr->serial = curframe.serial_no;
				ptr->size = curframe.size;
				ptr->width = curframe.width;
				ptr->height = curframe.height;
				ptr->quality = curframe.quality;
				ptr->flags = curframe.flags;
				ptr->frameType = curframe.frameType;
				ptr->timestamp = curframe.timestamp;
				ptr->temporalId = curframe.temporalId;
				for (cnt = 0; cnt < FMT_MAX_NUM; cnt++ )
				{
					ptr->ref_serial[cnt] = curframe.ref_serial[cnt];
				}
				ptr->ptr = (unsigned char *)virptr + curframe.offset;
				ret = RET_SUCCESS;
			}
			break;
		}

		case AV_OP_LOCK_MP4_CIF_IFRAME:
		{
			FrameInfo_t curframe;
			LockMpeg4IFrame(&curframe,FMT_MPEG4_EXT);
			if(curframe.serial_no == -1){
				ret = RET_NO_VALID_DATA;
			} else if(curframe.serial_no == -2){
				ret = RET_NO_MEM;
			} else {
				int cnt = 0;
				ptr->serial = curframe.serial_no;
				ptr->size = curframe.size;
				ptr->width = curframe.width;
				ptr->height = curframe.height;
				ptr->quality = curframe.quality;
				ptr->flags = curframe.flags;
				ptr->frameType = curframe.frameType;
				ptr->timestamp = curframe.timestamp;
				ptr->temporalId = curframe.temporalId;
				for (cnt = 0; cnt < FMT_MAX_NUM; cnt++ )
				{
					ptr->ref_serial[cnt] = curframe.ref_serial[cnt];
				}
				ptr->ptr = (unsigned char *)virptr + curframe.offset;
				ret = RET_SUCCESS;
			}
			break;
		}
		case AV_OP_LOCK_ULAW:
			if(serial == -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t frame;
				if(serial < 0){
					ret = RET_INVALID_PRM;
				} else {
					int cnt = 0;
					frame.serial_no = serial;
					frame.format = FMT_AUDIO;
					ret = LockFrame(&frame);
					switch(ret){
						case -97:
						case -98:
							ret = RET_NO_VALID_DATA;
							break;
						case -99:
							ret = RET_NO_MEM;
							break;
						case 0:
							ptr->serial = serial;
							ptr->size = frame.size;
							ptr->width = frame.width;
							ptr->height = frame.height;
							ptr->quality = frame.quality;
							ptr->flags = frame.flags;
							ptr->timestamp = frame.timestamp;
							ptr->temporalId = frame.temporalId;
							for (cnt = 0; cnt < FMT_MAX_NUM; cnt++ )
							{
								ptr->ref_serial[cnt] = frame.ref_serial[cnt];
							}
							ptr->ptr = (unsigned char *)virptr + frame.offset;
							break;
						default:
							ret = RET_UNKNOWN_ERROR;
							break;
					}
				}
			}
			break;
		case AV_OP_UNLOCK_ULAW:
			if(serial == -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t frame;
				if(serial < 0){
					ret = RET_INVALID_PRM;
				} else {
					frame.serial_no = serial;
					frame.format = FMT_AUDIO;
					UnlockFrame(&frame);
				}
			}
			break;
		case AV_OP_GET_ULAW_SERIAL:
			if(serial != -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t curframe = GetCurrentFrame(FMT_AUDIO);
				if(curframe.serial_no < 0){
					ret = RET_NO_VALID_DATA;
				} else {
					int cnt = 0;
					ptr->serial = curframe.serial_no;
					ptr->size = curframe.size;
					ptr->width = curframe.width;
					ptr->height = curframe.height;
					ptr->timestamp = curframe.timestamp;
					ptr->temporalId = curframe.temporalId;
					for (cnt = 0; cnt < FMT_MAX_NUM; cnt++ )
					{
						ptr->ref_serial[cnt] = curframe.ref_serial[cnt];
					}
				}
			}
			break;

		case AV_OP_WAIT_NEW_ULAW_SERIAL:
			if(serial != -1){
				ret = RET_INVALID_PRM;
			} else {
				FrameInfo_t curframe = WaitNewFrame(FMT_AUDIO);
				if(curframe.serial_no < 0){
					ret = RET_NO_VALID_DATA;
				} else {
					ptr->serial = curframe.serial_no;
					ptr->size = curframe.size;
					ptr->width = curframe.width;
					ptr->height = curframe.height;
				}
			}
			break;
		default:
			ret = RET_INVALID_COMMAND;
			dbg("Command field = %d\n", field);
			break;
	}
	return ret;
}
Exemplo n.º 24
0
void Player::Update(float elapsed) {
	// this is an override.  do the base class update
	Character::Update(elapsed);

	// get ducking, jumping and movement key input
	const Uint8 *state = SDL_GetKeyState(NULL);
	ducking = state[SDLK_s];
	if (state[SDLK_a]) {
		vx -= speed * elapsed;
	}
	if (state[SDLK_d]) {
		vx += speed * elapsed;
	}
	if ((state[SDLK_w] || state[SDLK_SPACE]) && onGround) {
		vy -= 7.5f;
	}
	
	// determine the facing direction
	if (vx != 0.0f)
		isFlipped = vx < 0;

	// can't move while ducking unless in the air
	if (ducking && onGround) {
		vx = 0.0f;
	}

	// apply gravity
	vy += Uber::I().gravity * elapsed;

	// do movement with collision checks.  y, then x
	if (MoveWithCollisionCheckY(vy)) {
		onGround = vy > 0.0f;
		vy = 0.0f;
	}
	else {
		onGround = false;
	}
	if (MoveWithCollisionCheckX(vx)) {
		vx = 0.0f;
	}

	// fall death
	if (Uber::I().level->EntityAt(x, y).compare("death") == 0) {
		Respawn();
	}
	
	// determine the animation based on the player state
	SetAnimation(ducking ? "p3_duck" : (!onGround ? "p3_jump" : (vx ? "p3_walk" : "p3_stand")), isFlipped);

	// check if the player intersected an enemy
	for (auto enemy : Uber::I().enemies) {

		// skip checking against dead enemies
		if (!enemy->isAlive) continue;

		SDL_Rect* p = &Utils::MakeRect(x + offsetX, y + offsetY, GetCurrentFrame().w, GetCurrentFrame().h);
		SDL_Rect* e = &Utils::MakeRect(enemy->x + enemy->offsetX, enemy->y + enemy->offsetY, enemy->GetCurrentFrame().w, enemy->GetCurrentFrame().h);
		// check if the sprites overlap
		if (Utils::intersection(p, e)) {
			// if the player's feet is higher than the enemy's midpoint, kill the enemy
			if (p->y + p->h <= e->y + e->h / 2.0f) {
				enemy->GotHit(this);
				// bounce off
				vy = (state[SDLK_w] || state[SDLK_SPACE]) ? -8.0f : -4.0f;
			}
			// otherwise, the player got hit
			else {
				GotHit(enemy);
			}
		}
	}

	// always zero out the x velocity because that's how platformers work!
	vx = 0.0f;
}
Exemplo n.º 25
0
INT32 Cps2Frame()
{
	INT32 nDisplayEnd, nNext;									// variables to keep track of executed 68K cyles
	INT32 i;

	if (CpsReset) {
		DrvReset();
	}

//	extern INT32 prevline;
//	prevline = -1;

	SekNewFrame();
	if (!Cps2DisableQSnd) QsndNewFrame();

	nCpsCycles = (INT32)(((INT64)nCPS68KClockspeed * nBurnCPUSpeedAdjust) / 0x0100);
	SekOpen(0);
	SekSetCyclesScanline(nCpsCycles / nCpsNumScanlines);

	CpsRwGetInp();											// Update the input port values
	
	// Check the volumes every 5 frames or so
#if 0
	if (GetCurrentFrame() % 5 == 0) {
		if (Cps2VolUp) Cps2Volume++;
		if (Cps2VolDwn) Cps2Volume--;
		
		if (Cps2Volume > 39) Cps2Volume = 39;
		if (Cps2Volume < 0) Cps2Volume = 0;
		
		QscSetRoute(BURN_SND_QSND_OUTPUT_1, Cps2Volume / 39.0, BURN_SND_ROUTE_LEFT);
		QscSetRoute(BURN_SND_QSND_OUTPUT_2, Cps2Volume / 39.0, BURN_SND_ROUTE_RIGHT);
	}
#endif
	
	nDisplayEnd = nCpsCycles * (nFirstLine + 224) / nCpsNumScanlines;	// Account for VBlank

	nInterrupt = 0;
   memset(nRasterline, 0, MAX_RASTER + 2 * sizeof(nRasterline[0]));

	// Determine which (if any) of the line counters generates the first IRQ
	bEnableAutoIrq50 = bEnableAutoIrq52 = false;
	nIrqLine50 = nIrqLine52 = nCpsNumScanlines;
	if (BURN_ENDIAN_SWAP_INT16(*((UINT16*)(CpsReg + 0x50))) & 0x8000) {
		bEnableAutoIrq50 = true;
	}
	if (bEnableAutoIrq50 || (BURN_ENDIAN_SWAP_INT16(*((UINT16*)(CpsReg + 0x4E))) & 0x0200) == 0) {
		nIrqLine50 = (BURN_ENDIAN_SWAP_INT16(*((UINT16*)(CpsReg + 0x50))) & 0x01FF);
	}
	if (BURN_ENDIAN_SWAP_INT16(*((UINT16*)(CpsReg + 0x52))) & 0x8000) {
		bEnableAutoIrq52 = true;
	}
	if (bEnableAutoIrq52 || (BURN_ENDIAN_SWAP_INT16(*((UINT16*)(CpsReg + 0x4E))) & 0x0200) == 0) {
		nIrqLine52 = (BURN_ENDIAN_SWAP_INT16(*((UINT16*)(CpsReg + 0x52))) & 0x01FF);
	}
	ScheduleIRQ();

	SekIdle(nCpsCyclesExtra);

	if (nIrqCycles < nCpsCycles * nFirstLine / nCpsNumScanlines) {
		SekRun(nIrqCycles);
		DoIRQ();
	}
	nNext = nCpsCycles * nFirstLine / nCpsNumScanlines;
	if (SekTotalCycles() < nNext) {
		SekRun(nNext - SekTotalCycles());
	}

	CopyCpsReg(0);										// Get inititial copy of registers
	CopyCpsFrg(0);										//

	if (nIrqLine >= nCpsNumScanlines && (BURN_ENDIAN_SWAP_INT16(*((UINT16*)(CpsReg + 0x4E))) & 0x0200) == 0) {
		nIrqLine50 = BURN_ENDIAN_SWAP_INT16(*((UINT16*)(CpsReg + 0x50))) & 0x01FF;
		nIrqLine52 = BURN_ENDIAN_SWAP_INT16(*((UINT16*)(CpsReg + 0x52))) & 0x01FF;
		ScheduleIRQ();
	}

	{
		nNext = (nDisplayEnd) / 3;			// find out next cycle count to run to

		while (nNext > nIrqCycles && nInterrupt < MAX_RASTER) {
			SekRun(nIrqCycles - SekTotalCycles());
			DoIRQ();
		}
		SekRun(nNext - SekTotalCycles());				// run cpu

		nNext = (2 * nDisplayEnd) / 3;			// find out next cycle count to run to

		while (nNext > nIrqCycles && nInterrupt < MAX_RASTER) {
			SekRun(nIrqCycles - SekTotalCycles());
			DoIRQ();
		}
		SekRun(nNext - SekTotalCycles());				// run cpu

		nNext = (3 * nDisplayEnd) / 3;			// find out next cycle count to run to

		while (nNext > nIrqCycles && nInterrupt < MAX_RASTER) {
			SekRun(nIrqCycles - SekTotalCycles());
			DoIRQ();
		}
		SekRun(nNext - SekTotalCycles());				// run cpu
	}
	
	CpsObjGet();										// Get objects

//	nCpsCyclesSegment[0] = (nCpsCycles * nVBlank) / nCpsNumScanlines;
//	nDone += SekRun(nCpsCyclesSegment[0] - nDone);

	SekSetIRQLine(2, SEK_IRQSTATUS_AUTO);				// VBlank
	//if (pBurnDraw)
		CpsDraw();
	SekRun(nCpsCycles - SekTotalCycles());	

	nCpsCyclesExtra = SekTotalCycles() - nCpsCycles;

	if (!Cps2DisableQSnd) QsndEndFrame();

	SekClose();

//	bprintf(PRINT_NORMAL, _T("    -\n"));

#if 0 && defined FBA_DEBUG
	if (nInterrupt) {
		bprintf(PRINT_IMPORTANT, _T("Beam synchronized interrupt at line %2X.\r"), nRasterline[nInterrupt]);
	} else {
		bprintf(PRINT_NORMAL, _T("Beam synchronized interrupt disabled.   \r"));
	}

	extern INT32 counter;
	if (counter) {
		bprintf(PRINT_NORMAL, _T("\n\nSlices start at: "));
		for (i = 0; i < MAX_RASTER + 2; i++) {
			bprintf(PRINT_NORMAL, _T("%2X "), nRasterline[i]);
		}
		bprintf(PRINT_NORMAL, _T("\n"));
		for (i = 0; i < 0x80; i++) {
			if (*((UINT16*)(CpsSaveReg[0] + i * 2)) != *((UINT16*)(CpsSaveReg[nInterrupt] + i * 2))) {
				bprintf(PRINT_NORMAL, _T("Register %2X: %4X -> %4X\n"), i * 2, *((UINT16*)(CpsSaveReg[0] + i * 2)), *((UINT16*)(CpsSaveReg[nInterrupt] + i * 2)));
			}
		}
		bprintf(PRINT_NORMAL, _T("\n"));
		for (i = 0; i < 0x010; i++) {
			if (CpsSaveFrg[0][i] != CpsSaveFrg[nInterrupt][i]) {
				bprintf(PRINT_NORMAL, _T("FRG %X: %02X -> %02X\n"), i, CpsSaveFrg[0][i], CpsSaveFrg[nInterrupt][i]);
			}
		}
		bprintf(PRINT_NORMAL, _T("\n"));
		if (((CpsSaveFrg[0][4] << 8) | CpsSaveFrg[0][5]) != ((CpsSaveFrg[nInterrupt][4] << 8) | CpsSaveFrg[nInterrupt][5])) {
			bprintf(PRINT_NORMAL, _T("Layer-sprite priority: %04X -> %04X\n"), ((CpsSaveFrg[0][4] << 8) | CpsSaveFrg[0][5]), ((CpsSaveFrg[nInterrupt][4] << 8) | CpsSaveFrg[nInterrupt][5]));
		}

		bprintf(PRINT_NORMAL, _T("\n"));
		for (INT32 j = 0; j <= nInterrupt; j++) {
			if (j) {
				bprintf(PRINT_NORMAL, _T("IRQ : %i (triggered at line %3i)\n\n"), j, nRasterline[j]);
			} else {
				bprintf(PRINT_NORMAL, _T("Initial register status\n\n"));
			}

			for (i = 0; i < 0x080; i+= 8) {
				bprintf(PRINT_NORMAL, _T("%2X: %4X %4X %4X %4X %4X %4X %4X %4X\n"), i * 2, *((UINT16*)(CpsSaveReg[j] + 0 + i * 2)), *((UINT16*)(CpsSaveReg[j] + 2 + i * 2)), *((UINT16*)(CpsSaveReg[j] + 4 + i * 2)), *((UINT16*)(CpsSaveReg[j] + 6 + i * 2)), *((UINT16*)(CpsSaveReg[j] + 8 + i * 2)), *((UINT16*)(CpsSaveReg[j] + 10 + i * 2)), *((UINT16*)(CpsSaveReg[j] + 12 + i * 2)), *((UINT16*)(CpsSaveReg[j] + 14 + i * 2)));
			}

			bprintf(PRINT_NORMAL, _T("\nFRG: "));
			for (i = 0; i < 0x010; i++) {
				bprintf(PRINT_NORMAL, _T("%02X "), CpsSaveFrg[j][i]);
			}
			bprintf(PRINT_NORMAL, _T("\n\n"));

		}

		extern INT32 bRunPause;
		bRunPause = 1;
		counter = 0;
	}
#endif

	return 0;
}
Exemplo n.º 26
0
CFrameSelection CFrameEditorModel::GetActiveSelection() const {
	return IsSelecting() ? m_selection : MakeFrameSelection(GetCurrentFrame());
}
Exemplo n.º 27
0
void HiscoreApply()
{
#if defined FBA_DEBUG
	if (!Debug_HiscoreInitted) bprintf(PRINT_ERROR, _T("HiscoreApply called without init\n"));
#endif

	if (!CheckHiscoreAllowed() || !HiscoresInUse) return;
	
	if (nCpuType == -1) set_cpu_type();
	
	for (UINT32 i = 0; i < nHiscoreNumRanges; i++) {
		if (HiscoreMemRange[i].Loaded && HiscoreMemRange[i].Applied == APPLIED_STATE_ATTEMPTED) {
			INT32 Confirmed = 1;
			cpu_open(HiscoreMemRange[i].nCpu);
			for (UINT32 j = 0; j < HiscoreMemRange[i].NumBytes; j++) {
				if (cpu_read_byte(HiscoreMemRange[i].Address + j) != HiscoreMemRange[i].Data[j]) {
					Confirmed = 0;
				}
			}
			cpu_close();
			
			if (Confirmed == 1) {
				HiscoreMemRange[i].Applied = APPLIED_STATE_CONFIRMED;
#if 1 && defined FBA_DEBUG
				bprintf(PRINT_IMPORTANT, _T("Applied Hi Score Memory Range %i on frame number %i\n"), i, GetCurrentFrame());
#endif
			} else {
				HiscoreMemRange[i].Applied = APPLIED_STATE_NONE;
				HiscoreMemRange[i].ApplyNextFrame = 1;
#if 1 && defined FBA_DEBUG
				bprintf(PRINT_IMPORTANT, _T("Failed attempt to apply Hi Score Memory Range %i on frame number %i\n"), i, GetCurrentFrame());
#endif
			}
		}
		
		if (HiscoreMemRange[i].Loaded && HiscoreMemRange[i].Applied == APPLIED_STATE_NONE && HiscoreMemRange[i].ApplyNextFrame) {			
			cpu_open(HiscoreMemRange[i].nCpu);
			for (UINT32 j = 0; j < HiscoreMemRange[i].NumBytes; j++) {
				cpu_write_byte(HiscoreMemRange[i].Address + j, HiscoreMemRange[i].Data[j]);				
			}
			cpu_close();
			
			HiscoreMemRange[i].Applied = APPLIED_STATE_ATTEMPTED;
			HiscoreMemRange[i].ApplyNextFrame = 0;
		}
		
		if (HiscoreMemRange[i].Loaded && HiscoreMemRange[i].Applied == APPLIED_STATE_NONE) {
			cpu_open(HiscoreMemRange[i].nCpu);
			if (cpu_read_byte(HiscoreMemRange[i].Address) == HiscoreMemRange[i].StartValue && cpu_read_byte(HiscoreMemRange[i].Address + HiscoreMemRange[i].NumBytes - 1) == HiscoreMemRange[i].EndValue) {
				HiscoreMemRange[i].ApplyNextFrame = 1;
			}
			cpu_close();
		}
	}
}
Exemplo n.º 28
0
int32_t WrapperDLL::MediaPlayer_GetCurrentFrame(void* self){
	auto self_ = (MediaPlayer*)self;
	auto ret = self_->GetCurrentFrame();
	return ret;
};
Exemplo n.º 29
0
// Update CpsPal with the new palette at pNewPal (length 0x1000 bytes)
int CpsPalUpdate(unsigned char* pNewPal, int bRecalcAll)
{
	int i;
	unsigned short *ps, *pn;

	// If we are recalculating the whole palette, just copy to CpsPalSrc
	// and recalculate it all
	if (bRecalcAll) {
		ps = (unsigned short*)CpsPalSrc;
		pn = (unsigned short*)pNewPal;

		if (nLagObjectPalettes) {
			int nBuffer = 0x0C00 + ((GetCurrentFrame() & 1) << 9);

			memcpy(ps + 0x0200, pn + 0x0200, 0x0600 << 1);
			memcpy(ps + nBuffer, pn, 0x0200 << 1);
			memcpy(ps + 0x0E00, pn, 0x0200 << 1);

			CpsObjPal = CpsPal + nBuffer;
		} else {
			memcpy(ps, pn, 0x0800 << 1);
		}

		CalcAll();

		return 0;
	}

	if (Cps == 2) {
		if (nLagObjectPalettes) {
			int nBuffer = 0x0C00 + ((GetCurrentFrame() & 1) << 9);

			ps = (unsigned short*)CpsPalSrc + (nBuffer ^ 0x0200);
			pn = (unsigned short*)pNewPal;
			CpsObjPal = CpsPal + (nBuffer ^ 0x0200);

			for (i = 0; i < 0x0200; i++, ps++, pn++) {
				unsigned short n;
				n = *pn;
				if (*ps == n) {
					continue;								// Colour hasn't changed - great!
				}

				*ps = n;									// Update our copy of the palette

				CpsObjPal[i ^ 15] = CalcColCPS2(n);
			}

			ps = (unsigned short*)CpsPalSrc + 0x0200;
			pn = (unsigned short*)pNewPal + 0x0200;

			for (i = 0x0200; i < 0x0800; i++, ps++, pn++) {
				unsigned short n;
				n = *pn;
				if (*ps == n) {
					continue;								// Colour hasn't changed - great!
				}

				*ps = n;									// Update our copy of the palette

				CpsPal[i ^ 15] = CalcColCPS2(n);
			}

			CpsObjPal = CpsPal + nBuffer;
		} else {
			ps = (unsigned short*)CpsPalSrc;
			pn = (unsigned short*)pNewPal;

			for (i = 0x0000; i < 0x0800; i++, ps++, pn++) {
				unsigned short n = *pn;
				if (*ps == n) {
					continue;								// Colour hasn't changed - great!
				}

				*ps = n;									// Update our copy of the palette

				CpsPal[i ^ 15] = CalcColCPS2(n);
			}
		}
	} else {
		ps = (unsigned short*)CpsPalSrc;
		pn = (unsigned short*)pNewPal;

		for (i = 0x0000; i < 0x0800; i++, ps++, pn++) {
			unsigned short n = *pn;
			if (*ps == n) {
                continue;								// Colour hasn't changed - great!
			}

			*ps = n;									// Update our copy of the palette

			CpsPal[i ^ 15] = CalcColCPS1(n);
		}
	}

	return 0;
}
Exemplo n.º 30
0
FGameFrame* FGearVR::GetFrame() const
{
	return static_cast<FGameFrame*>(GetCurrentFrame());
}