//==============================================================================
    void run()
    {
        {
            // Allow the message thread to finish setting-up the context before using it..
            MessageManagerLock mml (this);
            if (! mml.lockWasGained())
                return;
        }

        nativeContext->makeActive();
        initialiseOnThread();

       #if JUCE_USE_OPENGL_SHADERS && ! JUCE_OPENGL_ES
        shadersAvailable = OpenGLShaderProgram::getLanguageVersion() > 0;
       #endif

        while (! threadShouldExit())
        {
            const uint32 frameRenderStartTime = Time::getMillisecondCounter();

            if (renderFrame())
                waitForNextFrame (frameRenderStartTime);
        }

        shutdownOnThread();
    }
示例#2
0
//======================================================================================================================
// exec                                                                                                                =
//======================================================================================================================
int Application::exec()
{
	while(1)
	{
		inputCallback();
		processInput();

		glMatrixMode(GL_MODELVIEW);
		glLoadIdentity();
		gluLookAt(camPos.getX(), camPos.getY(), camPos.getZ(), camPos.getX() - camZ.getX(), camPos.getY() - camZ.getY(),
							camPos.getZ() - camZ.getZ(), camY.getX(), camY.getY(), camY.getZ());

		glClear(GL_COLOR_BUFFER_BIT);
		glClear(GL_DEPTH_BUFFER_BIT);

		renderGrid();

		mainLoop();

		SDL_GL_SwapBuffers();
		waitForNextFrame();
	}
	
	return 0;
}
示例#3
0
void FlcPlayer::decodeVideo(bool skipLastFrame)
{
	bool videoFrameFound = false;
	
	while (!videoFrameFound)
	{
		if (!isValidFrame(_videoFrameData, _videoFrameSize, _videoFrameType))
		{
			_playingState = FINISHED;
			break;
		}

		switch (_videoFrameType)
		{
		case FRAME_TYPE:

			Uint32 delay;

			readU16(_frameChunks, _videoFrameData + 6);
			readU16(_delayOverride, _videoFrameData + 8);

			if (_headerType == FLI_TYPE)
			{
				delay = _delayOverride > 0 ? _delayOverride : _headerSpeed * (1000.0 / 70.0);
			}
			else
			{
				delay = _videoDelay;
			}

			waitForNextFrame(delay);

			// Skip the frame header, we are not interested in the rest
			_chunkData = _videoFrameData + 16;

			_videoFrameData += _videoFrameSize;
			// If this frame is the last one, don't play it
			if(isEndOfFile(_videoFrameData))
				_playingState = FINISHED;

			if(!shouldQuit() || !skipLastFrame)
				playVideoFrame();

			videoFrameFound = true;

			break;
		case AUDIO_CHUNK:
			_videoFrameData += _videoFrameSize + 16;
			break;
		case PREFIX_CHUNK:
			// Just skip it
			_videoFrameData += _videoFrameSize;

			break;
		}
	}
}
示例#4
0
void ImageSequenceMovieSaver::frameWritingThreadMethod(void)
	{
	/* Save frames until shut down: */
	unsigned int frameIndex=0;
	while(!done)
		{
		/* Add the most recent frame to the captured frame queue: */
		{
		Threads::MutexCond::Lock captureLock(captureCond);
		frames.lockNewValue();
		capturedFrames.push_back(frames.getLockedValue());
		captureCond.signal(captureLock);
		}
		
		/* Wait for the next frame: */
		int numSkippedFrames=waitForNextFrame();
		if(numSkippedFrames>0)
			{
			std::cerr<<"MovieSaver: Skipped frames "<<frameIndex<<" to "<<frameIndex+numSkippedFrames-1<<std::endl;
			frameIndex+=numSkippedFrames;
			}
		}
	}
void TheoraMovieSaver::frameWritingThreadMethod(void)
	{
	/* Get the first frame: */
	frames.lockNewValue();
	const FrameBuffer& frame=frames.getLockedValue();
	
	/* Create the Theora info structure: */
	Video::TheoraInfo theoraInfo;
	unsigned int imageSize[2];
	for(int i=0;i<2;++i)
		imageSize[i]=(unsigned int)frame.getFrameSize()[i];
	theoraInfo.setImageSize(imageSize);
	theoraInfo.colorspace=TH_CS_UNSPECIFIED;
	theoraInfo.pixel_fmt=TH_PF_420;
	theoraInfo.target_bitrate=theoraBitrate;
	theoraInfo.quality=theoraQuality;
	theoraInfo.setGopSize(theoraGopSize);
	theoraInfo.fps_numerator=theoraFrameRate;
	theoraInfo.fps_denominator=1;
	theoraInfo.aspect_numerator=1;
	theoraInfo.aspect_denominator=1;
	theoraEncoder.init(theoraInfo);
	if(!theoraEncoder.isValid())
		{
		std::cerr<<"MovieSaver: Could not initialize Theora encoder"<<std::endl;
		return;
		}
	
	/* Create the image extractor: */
	imageExtractor=new Video::ImageExtractorRGB8(imageSize);
	
	/* Create the Theora frame buffer: */
	theoraFrame.init420(theoraInfo);
	
	/*************************************************
	Write the Theora stream headers to the Ogg stream:
	*************************************************/
	
	/* Set up a comment structure: */
	Video::TheoraComment comments;
	comments.setVendorString("Virtual Reality User Interface (Vrui) MovieSaver");
	
	/* Write the first stream header packet to the movie file: */
	Video::TheoraPacket packet;
	if(theoraEncoder.emitHeader(comments,packet))
		{
		/* Write the packet to the movie file: */
		oggStream.packetIn(packet);
		Video::OggPage page;
		while(oggStream.flush(page))
			page.write(*movieFile);
		}
	
	/* Write all remaining stream header packets to the movie file: */
	while(theoraEncoder.emitHeader(comments,packet))
		{
		oggStream.packetIn(packet);
		Video::OggPage page;
		while(oggStream.pageOut(page))
			page.write(*movieFile);
		}
	
	/* Flush the Ogg stream: */
	Video::OggPage page;
	while(oggStream.flush(page))
		page.write(*movieFile);
	
	/* Encode and save frames until shut down: */
	unsigned int frameIndex=0;
	while(true)
		{
		/* Get the most recent frame and check whether it's new: */
		bool newFrame=frames.lockNewValue();
		FrameBuffer& frame=frames.getLockedValue();
		if(newFrame)
			{
			/* Check if it's still the same size: */
			if(imageSize[0]!=(unsigned int)frame.getFrameSize()[0]||imageSize[1]!=(unsigned int)frame.getFrameSize()[1])
				{
				/* Theora cannot handle changing frame sizes; bail out with an error: */
				std::cerr<<"MovieSaver: Terminating due to changed frame size"<<std::endl;
				return;
				}
			
			/* Convert the new raw RGB frame to Y'CbCr 4:2:0: */
			Video::FrameBuffer tempFrame;
			tempFrame.start=frame.getBuffer();
			imageExtractor->extractYpCbCr420(&tempFrame,theoraFrame.planes[0].data,theoraFrame.planes[0].stride,theoraFrame.planes[1].data,theoraFrame.planes[1].stride,theoraFrame.planes[2].data,theoraFrame.planes[2].stride);
			}
		
		/* Feed the last converted Y'CbCr 4:2:0 frame to the Theora encoder: */
		theoraEncoder.encodeFrame(theoraFrame);
		
		/* Write all encoded Theora packets to the movie file: */
		Video::TheoraPacket packet;
		while(theoraEncoder.emitPacket(packet))
			{
			/* Add the packet to the Ogg stream: */
			oggStream.packetIn(packet);
			
			/* Write any generated pages to the movie file: */
			Video::OggPage page;
			while(oggStream.pageOut(page))
				page.write(*movieFile);
			}
		++frameIndex;
		
		/* Wait for the next frame: */
		int numSkippedFrames=waitForNextFrame();
		if(numSkippedFrames>0)
			{
			std::cerr<<"MovieSaver: Skipped frames "<<frameIndex<<" to "<<frameIndex+numSkippedFrames-1<<std::endl;
			frameIndex+=numSkippedFrames;
			}
		}
	}