예제 #1
0
void ARX_SPEECH_Update() {
	
	unsigned long tim = (unsigned long)(arxtime);

	if(cinematicBorder.isActive() || BLOCK_PLAYER_CONTROLS)
		ARX_CONVERSATION_CheckAcceleratedSpeech();

	for(size_t i = 0; i < MAX_ASPEECH; i++) {
		if(!aspeech[i].exist)
			continue;

		Entity * io = aspeech[i].io;

		// updates animations
		if(io) {
			if(aspeech[i].flags & ARX_SPEECH_FLAG_OFFVOICE)
				ARX_SOUND_RefreshSpeechPosition(aspeech[i].sample);
			else
				ARX_SOUND_RefreshSpeechPosition(aspeech[i].sample, io);

			if((io != entities.player() || (io == entities.player() && EXTERNALVIEW)) && ValidIOAddress(io))
			{
				if(!io->anims[aspeech[i].mood])
					aspeech[i].mood = ANIM_TALK_NEUTRAL;
				
				ANIM_HANDLE * anim = io->anims[aspeech[i].mood];
				if(anim) {
					AnimLayer & layer2 = io->animlayer[2];
					if(layer2.cur_anim != anim || (layer2.flags & EA_ANIMEND)) {
						changeAnimation(io, 2, anim);
					}
				}
			}
		}

		// checks finished speech
		if(tim >= aspeech[i].time_creation + aspeech[i].duration) {
			EERIE_SCRIPT *es = aspeech[i].es;
			Entity *io = aspeech[i].ioscript;
			long scrpos = aspeech[i].scrpos;
			ARX_SPEECH_Release(i);

			if(es && ValidIOAddress(io))
				ScriptEvent::send(es, SM_EXECUTELINE, "", io, "", scrpos);
		}
	}

	for(size_t i = 0; i < MAX_ASPEECH; i++) {
		ARX_SPEECH *speech = &aspeech[i];

		if(!speech->exist)
			continue;

		if(speech->text.empty())
			continue;
		
		if(!cinematicBorder.isActive())
			continue;

		if(CINEMA_DECAL < 100.f)
			continue;

		Vec2i sSize = hFontInBook->getTextSize(speech->text);

		float fZoneClippHeight	=	static_cast<float>(sSize.y * 3);
		float fStartYY			=	100 * g_sizeRatio.y;
		float fStartY			=	static_cast<float>(((int)fStartYY - (int)fZoneClippHeight) >> 1);
		float fDepY				=	((float)g_size.height()) - fStartYY + fStartY - speech->fDeltaY + sSize.y;
		float fZoneClippY		=	fDepY + speech->fDeltaY;

		float fAdd = fZoneClippY + fZoneClippHeight ;

		Rect::Num y = checked_range_cast<Rect::Num>(fZoneClippY);
		Rect::Num h = checked_range_cast<Rect::Num>(fAdd);
		
		Rect clippingRect(0, y+1, g_size.width(), h);
		if(config.video.limitSpeechWidth) {
			s32 w = std::min(g_size.width(), s32(640 * g_sizeRatio.y));
			clippingRect.left = (g_size.width() - w) / 2;
			clippingRect.right = (g_size.width() + w) / 2;
		}
		
		float height = (float)ARX_UNICODE_DrawTextInRect(
							hFontInBook,
							Vec2f(clippingRect.left + 10.f, fDepY + fZoneClippHeight),
							clippingRect.right - 10.f,
							speech->text,
							Color::white,
							&clippingRect);

		GRenderer->SetBlendFunc(Renderer::BlendZero, Renderer::BlendInvSrcColor);
		GRenderer->SetRenderState(Renderer::AlphaBlending, true);
		GRenderer->SetRenderState(Renderer::DepthTest, false);

		EERIEDrawFill2DRectDegrad(Vec2f(0.f, fZoneClippY - 1.f),
		                          Vec2f(static_cast<float>(g_size.width()), fZoneClippY + (sSize.y * 3 / 4)),
		                          0.f, Color::white, Color::black);
		EERIEDrawFill2DRectDegrad(Vec2f(0.f, fZoneClippY + fZoneClippHeight - (sSize.y * 3 / 4)),
		                          Vec2f(static_cast<float>(g_size.width()), fZoneClippY + fZoneClippHeight),
		                          0.f, Color::black, Color::white);

		GRenderer->SetBlendFunc(Renderer::BlendOne, Renderer::BlendZero);
		GRenderer->SetRenderState(Renderer::DepthTest, true);
		GRenderer->SetRenderState(Renderer::AlphaBlending, false);

		height += fZoneClippHeight;

		if(speech->fDeltaY <= height) {
			//vitesse du scroll
			float fDTime;

			if(speech->sample) {
				float duration = ARX_SOUND_GetDuration(speech->sample);
				if(duration == 0.0f) {
					duration = 4000.0f;
				}

				fDTime = (height * framedelay) / duration; //speech->duration;
				float fTimeOneLine = ((float)sSize.y) * fDTime;

				if(((float)speech->iTimeScroll) >= fTimeOneLine) {
					float fResteLine = (float)sSize.y - speech->fPixelScroll;
					float fTimePlus = (fResteLine * framedelay) / duration;
					fDTime -= fTimePlus;
					speech->fPixelScroll = 0.f;
					speech->iTimeScroll = 0;
				}
				speech->iTimeScroll	+= checked_range_cast<int>(framedelay);
			} else {
				fDTime = (height * framedelay) / 4000.0f;
			}

			speech->fDeltaY			+= fDTime;
			speech->fPixelScroll	+= fDTime;
		}
	}
}
예제 #2
0
static void PlayerBookDrawRune(Rune rune) {
	
	ARX_SPELLS_RequestSymbolDraw2(entities.player(), rune, ARX_SOUND_GetDuration(SND_SYMB[rune]));
	ARX_SOUND_PlayInterface(SND_SYMB[rune]);
}
예제 #3
0
long ARX_SPEECH_AddSpeech(Entity * io, const std::string & data, long mood,
                          SpeechFlags flags) {
	
	if(data.empty()) {
		return -1;
	}
	
	ARX_SPEECH_ClearIOSpeech(io);
	
	long num = ARX_SPEECH_GetFree();
	if(num < 0) {
		return -1;
	}
	
	aspeech[num].exist = 1;
	aspeech[num].time_creation = arxtime.get_updated_ul();
	aspeech[num].io = io; // can be NULL
	aspeech[num].duration = 2000; // Minimum value
	aspeech[num].flags = flags;
	aspeech[num].sample = audio::INVALID_ID;
	aspeech[num].fDeltaY = 0.f;
	aspeech[num].iTimeScroll = 0;
	aspeech[num].fPixelScroll = 0.f;
	aspeech[num].mood = mood;

	LogDebug("speech \"" << data << '"');
	
	res::path sample;
	
	if(flags & ARX_SPEECH_FLAG_NOTEXT) {
		
		// For non-conversation speech choose a random variant
		
		long count = getLocalisedKeyCount(data);  
		long variant = 1;
		
		// TODO For some samples there are no corresponding entries
		// in the localization file  (utext_*.ini) -> count will be 0
		// We should probably just count the number of sample files
		
		if(count > 1) {
			do {
				variant = Random::get(1, count);
			} while(io->lastspeechflag == variant);
			io->lastspeechflag = checked_range_cast<short>(variant);
		}
		
		LogDebug(" -> " << variant << " / " << count);
		
		if(variant > 1) {
			sample = data + boost::lexical_cast<std::string>(variant);
		} else {
			sample = data;
		}
		
	} else {
		
		std::string _output = getLocalised(data);
		
		io->lastspeechflag = 0;
		aspeech[num].text.clear();
		aspeech[num].text = _output;
		aspeech[num].duration = std::max(aspeech[num].duration, (unsigned long)(strlen(_output.c_str()) + 1) * 100);
		
		sample = data;
	}
	
	Entity * source = (aspeech[num].flags & ARX_SPEECH_FLAG_OFFVOICE) ? NULL : io;
	aspeech[num].sample = ARX_SOUND_PlaySpeech(sample, source);
	
	if(aspeech[num].sample == ARX_SOUND_TOO_FAR) {
		aspeech[num].sample = audio::INVALID_ID;
	}

	//Next lines must be removed (use callback instead)
	aspeech[num].duration = (unsigned long)ARX_SOUND_GetDuration(aspeech[num].sample);

	if ((io->ioflags & IO_NPC) && !(aspeech[num].flags & ARX_SPEECH_FLAG_OFFVOICE)) {
		float fDiv = aspeech[num].duration /= io->_npcdata->speakpitch;
		aspeech[num].duration = static_cast<unsigned long>(fDiv);
	}

	if (aspeech[num].duration < 500) aspeech[num].duration = 2000;
	
	return num;
}