Exemple #1
0
void
MPEGCodec::decompress(const DataChunk &data)
{
	const int channels = 3;
	const size_t bytes_per_channel = sizeof(unsigned char);
	
	const UInt32 width = _descriptor.getStoredWidth();
	const UInt32 height = _descriptor.getStoredHeight();
	
	const ptrdiff_t stride = bytes_per_channel * channels;
	const size_t rowbytes = width * stride;
	const size_t data_size = rowbytes * height;
	
	DataChunkPtr buf_data = new DataChunk(data_size);
	
	char *data_origin = (char *)buf_data->Data;
	
	
	
	FrameBufferPtr buf = new FrameBuffer(width, height);
	
	buf->insert("R", Slice(MoxFiles::UINT8, data_origin + (bytes_per_channel * 0), stride, rowbytes));
	buf->insert("G", Slice(MoxFiles::UINT8, data_origin + (bytes_per_channel * 1), stride, rowbytes));
	buf->insert("B", Slice(MoxFiles::UINT8, data_origin + (bytes_per_channel * 2), stride, rowbytes));
	
	memset(data_origin, 0, data_size);
	
	buf->attachData(buf_data);
	
	
	storeFrame(buf);
}
Exemple #2
0
    void reshape( const Vector2ui& frameSize )
    {
        CameraPtr camera = _engine->getCamera();
        FrameBufferPtr frameBuffer = _engine->getFrameBuffer();

        if( frameBuffer->getSize() == frameSize )
            return;

        frameBuffer->resize( frameSize );
        camera->setAspectRatio(
            static_cast< float >( frameSize.x()) /
            static_cast< float >( frameSize.y()));
    }
Exemple #3
0
// 设置当前渲染目标
/////////////////////////////////////////////////////////////////////////////////
void RenderEngine::BindFrameBuffer(FrameBufferPtr const & fb)
{
    FrameBufferPtr new_fb;
    if (fb)
    {
        new_fb = fb;
    }
    else
    {
        new_fb = this->DefaultFrameBuffer();
    }

    if ((fb != new_fb) || (fb && fb->Dirty()))
    {
        if (cur_frame_buffer_)
        {
            cur_frame_buffer_->OnUnbind();
        }

        cur_frame_buffer_ = new_fb;
        cur_frame_buffer_->OnBind();

        this->DoBindFrameBuffer(cur_frame_buffer_);
    }
}
Exemple #4
0
    void render( const RenderInput& renderInput,
                 RenderOutput& renderOutput )
    {
        reshape( renderInput.windowSize );

        _engine->preRender();

        _engine->getCamera()->set(
            renderInput.position, renderInput.target, renderInput.up );

#if(BRAYNS_USE_DEFLECT || BRAYNS_USE_REST)
        if( !_extensionPluginFactory )
            _intializeExtensionPluginFactory( );
        _extensionPluginFactory->execute( );
#endif

        ScenePtr scene = _engine->getScene();
        CameraPtr camera = _engine->getCamera();
        FrameBufferPtr frameBuffer = _engine->getFrameBuffer();
        const Vector2i& frameSize = frameBuffer->getSize();

        if( _parametersManager->getRenderingParameters().getSunOnCamera() )
        {
            LightPtr sunLight = scene->getLight( 0 );
            DirectionalLight* sun =
                dynamic_cast< DirectionalLight* > ( sunLight.get() );
            if( sun )
            {
                sun->setDirection( camera->getTarget() - camera->getPosition() );
                scene->commitLights();
            }
        }

        camera->commit();
        _render( );

        uint8_t* colorBuffer = frameBuffer->getColorBuffer( );
        size_t size =
            frameSize.x( ) * frameSize.y( ) * frameBuffer->getColorDepth( );
        renderOutput.colorBuffer.assign( colorBuffer, colorBuffer + size );

        float* depthBuffer = frameBuffer->getDepthBuffer( );
        size = frameSize.x( ) * frameSize.y( );
        renderOutput.depthBuffer.assign( depthBuffer, depthBuffer + size );

        _engine->postRender();
    }
Exemple #5
0
    void _setDefaultCamera()
    {
        ScenePtr scene = _engine->getScene();
        CameraPtr camera = _engine->getCamera();
        FrameBufferPtr frameBuffer = _engine->getFrameBuffer();
        const Vector2i& frameSize = frameBuffer->getSize();

        const Boxf& worldBounds = scene->getWorldBounds();
        const Vector3f& target = worldBounds.getCenter();
        const Vector3f& diag = worldBounds.getSize();
        Vector3f position = target;
        position.z() -= diag.z();

        const Vector3f up = Vector3f( 0.f, 1.f, 0.f );
        camera->setInitialState( position, target, up );
        camera->setAspectRatio(
            static_cast< float >( frameSize.x()) /
            static_cast< float >( frameSize.y()));
    }
Exemple #6
0
		void endIDRender()
		{
			glUseProgram( m_prevProgram );
			glViewport( m_prevViewport[0], m_prevViewport[1], m_prevViewport[2], m_prevViewport[3] );
			m_frameBufferBinding.reset();

			IECore::ImagePrimitivePtr idsImage = m_frameBuffer->getColor()->imagePrimitive();
			const IECore::UIntVectorData *idsData = static_cast<const IECore::UIntVectorData *>( idsImage->variables["Y"].data.get() );
			const std::vector<unsigned int> ids = idsData->readable();

			IECore::ImagePrimitivePtr zImage = m_frameBuffer->getDepth()->imagePrimitive();
			const IECore::FloatVectorData *zData = static_cast<const IECore::FloatVectorData *>( zImage->variables["Z"].data.get() );
			const std::vector<float> z = zData->readable();

			std::map<unsigned int, HitRecord> idRecords;	
			for( size_t i = 0, e = ids.size(); i < e; i++ )
			{
				if( ids[i] == 0 )
				{
					continue;
				}
				std::map<unsigned int, HitRecord>::iterator it = idRecords.find( ids[i] );
				if( it == idRecords.end() )
				{
					HitRecord r( Imath::limits<float>::max(), Imath::limits<float>::min(), ids[i] );
					it = idRecords.insert( std::pair<unsigned int, HitRecord>( ids[i], r ) ).first;
				}
				it->second.depthMin = std::min( it->second.depthMin, z[i] );
				it->second.depthMax = std::max( it->second.depthMax, z[i] );		
			}

			m_hits.clear();
			m_hits.reserve( idRecords.size() );
			for( std::map<unsigned int, HitRecord>::const_iterator it = idRecords.begin(), eIt = idRecords.end(); it != eIt; it++ )
			{
				m_hits.push_back( it->second );
			}
		}
Exemple #7
0
    void render()
    {
        ScenePtr scene = _engine->getScene();
        CameraPtr camera = _engine->getCamera();
        FrameBufferPtr frameBuffer = _engine->getFrameBuffer();
        const Vector2i& frameSize = frameBuffer->getSize();

        _engine->preRender();

#if(BRAYNS_USE_DEFLECT || BRAYNS_USE_REST)
        if( !_extensionPluginFactory )
            _intializeExtensionPluginFactory( );
        _extensionPluginFactory->execute( );
#endif
        if( _parametersManager->getRenderingParameters().getSunOnCamera() )
        {
            LightPtr sunLight = scene->getLight( 0 );
            DirectionalLight* sun =
                dynamic_cast< DirectionalLight* > ( sunLight.get() );
            if( sun )
            {
                sun->setDirection( camera->getTarget() - camera->getPosition() );
                scene->commitLights();
            }
        }

        camera->commit();
        _render( );

        _engine->postRender();

        const Vector2ui windowSize = _parametersManager
            ->getApplicationParameters()
            .getWindowSize();
        if( windowSize != frameSize )
            reshape(windowSize);
    }
Exemple #8
0
	// 设置当前渲染目标
	/////////////////////////////////////////////////////////////////////////////////
	void OGLESRenderEngine::DoBindFrameBuffer(FrameBufferPtr const & fb)
	{
		BOOST_ASSERT(fb);

		Viewport const & vp = *fb->GetViewport();
		if ((vp_x_ != vp.left) || (vp_y_ != vp.top) || (vp_width_ != vp.width) || (vp_height_ != vp.height))
		{
			glViewport(vp.left, vp.top, vp.width, vp.height);

			vp_x_ = vp.left;
			vp_y_ = vp.top;
			vp_width_ = vp.width;
			vp_height_ = vp.height;
		}
	}
Exemple #9
0
		void beginIDRender()
		{
			m_frameBuffer = new FrameBuffer();
			m_frameBuffer->setColor( new UIntTexture( 128, 128 ) );
			m_frameBuffer->setDepth( new DepthTexture( 128, 128 ) );
			m_frameBuffer->validate();
			m_frameBufferBinding = boost::shared_ptr<FrameBuffer::ScopedBinding>( new FrameBuffer::ScopedBinding( *m_frameBuffer ) );
			
			glGetIntegerv( GL_VIEWPORT, m_prevViewport );
			glViewport( 0, 0, 128, 128 );
			
			glClearColor( 0.0, 0.0, 0.0, 1.0 );
			glClearDepth( 1.0 );
			glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
			
			const std::vector<StateComponentPtr> &stateComponents = idStateComponents();
			for( std::vector<StateComponentPtr>::const_iterator it = stateComponents.begin(), eIt = stateComponents.end(); it != eIt; it++ )
			{
				m_baseState->add( *it, true /* override */ );
			}
			
			glGetIntegerv( GL_CURRENT_PROGRAM, &m_prevProgram );
			pushIDShader( m_baseState->get<ShaderStateComponent>()->shaderSetup()->shader() );	
		}
Exemple #10
0
	// 建立渲染窗口
	/////////////////////////////////////////////////////////////////////////////////
	void OGLESRenderEngine::DoCreateRenderWindow(std::string const & name,
		RenderSettings const & settings)
	{
		motion_frames_ = settings.motion_frames;

		FrameBufferPtr win = MakeSharedPtr<OGLESRenderWindow>(name, settings);

		this->FillRenderDeviceCaps();
		this->InitRenderStates();

		win->Attach(FrameBuffer::ATT_Color0,
			MakeSharedPtr<OGLESScreenColorRenderView>(win->Width(), win->Height(), settings.color_fmt));
		if (NumDepthBits(settings.depth_stencil_fmt) > 0)
		{
			win->Attach(FrameBuffer::ATT_DepthStencil,
				MakeSharedPtr<OGLESScreenDepthStencilRenderView>(win->Width(), win->Height(), settings.depth_stencil_fmt));
		}

		this->BindFrameBuffer(win);

		glGenFramebuffers(1, &fbo_blit_src_);
		glGenFramebuffers(1, &fbo_blit_dst_);
	}
Exemple #11
0
void
JPEGLSCodec::decompress(const DataChunk &data)
{
	ByteStreamInfo inStream = FromByteArray(data.Data, data.Size);
	
	
	struct JlsParameters info;
	
	JLS_ERROR err = JpegLsReadHeaderStream(inStream, &info);
	
	if(err == OK)
	{
		const int width = info.width;
		const int height = info.height;
		
		assert(info.components == (_channels == JPEGLS_RGBA ? 4 : 3));
		assert(info.colorTransform == COLORXFORM_NONE);
		
		const PixelType pixType = (_depth == JPEGLS_8 ? UINT8 :
									_depth == JPEGLS_10 ? UINT10 :
									_depth == JPEGLS_12 ? UINT12 :
									_depth == JPEGLS_16 ? UINT16 :
									UINT8);
		
		const size_t pixSize = PixelSize(pixType);
		const unsigned int bitDepth = PixelBits(pixType);
		
		assert(info.bitspersample == bitDepth);
									
		const size_t pixelSize = (info.components * PixelSize(pixType));
		const size_t rowBytes = (width * pixelSize);
		const size_t bufSize = (height * rowBytes);
		
		DataChunkPtr frameData = new DataChunk(bufSize);
		
		char *buf = (char *)frameData->Data;
		
		const Box2i dataW = dataWindow();
		
		assert(width == (dataW.max.x - dataW.min.x + 1));
		assert(height == (dataW.max.y - dataW.min.y + 1));
		assert(dataW.min.x == 0);
		assert(dataW.min.y == 0);
			
		FrameBufferPtr frameBuffer = new FrameBuffer(dataW);
		
		frameBuffer->insert("R", Slice(pixType, &buf[0 * pixSize], pixelSize, rowBytes));
		frameBuffer->insert("G", Slice(pixType, &buf[1 * pixSize], pixelSize, rowBytes));
		frameBuffer->insert("B", Slice(pixType, &buf[2 * pixSize], pixelSize, rowBytes));
		
		if(info.components >= 4)
			frameBuffer->insert("A", Slice(pixType, &buf[3 * pixSize], pixelSize, rowBytes));
		
		frameBuffer->attachData(frameData);
		
		
		ByteStreamInfo outStream = FromByteArray(frameData->Data, frameData->Size);
		
		
		err = JpegLsDecodeStream(outStream, inStream, &info);
		
		if(err == OK)
		{
			storeFrame(frameBuffer);
		}
	}
	
	if(err != OK)
		throw MoxMxf::ArgExc("JPEG-LS decompression error");
}
Exemple #12
0
	void MultiResLayer::BindBuffers(TexturePtr const & rt0_tex, TexturePtr const & rt1_tex, TexturePtr const & depth_tex,
			TexturePtr const & multi_res_tex)
	{
		UNREF_PARAM(rt1_tex);

		RenderFactory& rf = Context::Instance().RenderFactoryInstance();
		RenderEngine& re = rf.RenderEngineInstance();
		RenderDeviceCaps const & caps = re.DeviceCaps();

		g_buffer_rt0_tex_ = rt0_tex;
		g_buffer_depth_tex_ = depth_tex;

		ElementFormat fmt8;
		if (caps.rendertarget_format_support(EF_ABGR8, 1, 0))
		{
			fmt8 = EF_ABGR8;
		}
		else
		{
			BOOST_ASSERT(caps.rendertarget_format_support(EF_ARGB8, 1, 0));

			fmt8 = EF_ARGB8;
		}

		ElementFormat depth_fmt;
		if (caps.pack_to_rgba_required)
		{
			if (caps.rendertarget_format_support(EF_ABGR8, 1, 0))
			{
				depth_fmt = EF_ABGR8;
			}
			else
			{
				BOOST_ASSERT(caps.rendertarget_format_support(EF_ARGB8, 1, 0));
				depth_fmt = EF_ARGB8;
			}
		}
		else
		{
			if (caps.rendertarget_format_support(EF_R16F, 1, 0))
			{
				depth_fmt = EF_R16F;
			}
			else
			{
				BOOST_ASSERT(caps.rendertarget_format_support(EF_R32F, 1, 0));
				depth_fmt = EF_R32F;
			}
		}

		multi_res_tex_ = multi_res_tex;
		if (multi_res_tex->NumMipMaps() > 1)
		{
			depth_deriative_tex_ = rf.MakeTexture2D(multi_res_tex->Width(0), multi_res_tex->Height(0),
				multi_res_tex->NumMipMaps(), 1, depth_fmt, 1, 0, EAH_GPU_Read | EAH_GPU_Write, nullptr);
			normal_cone_tex_ = rf.MakeTexture2D(multi_res_tex->Width(0), multi_res_tex->Height(0),
				multi_res_tex->NumMipMaps(), 1, fmt8, 1, 0, EAH_GPU_Read | EAH_GPU_Write, nullptr);

			depth_deriative_small_tex_ = rf.MakeTexture2D(multi_res_tex->Width(1), multi_res_tex->Height(1),
				multi_res_tex->NumMipMaps() - 1, 1, EF_R16F, 1, 0, EAH_GPU_Write, nullptr);
			normal_cone_small_tex_ = rf.MakeTexture2D(multi_res_tex->Width(1), multi_res_tex->Height(1),
				multi_res_tex->NumMipMaps() - 1, 1, fmt8, 1, 0, EAH_GPU_Write, nullptr);
	
			multi_res_pingpong_tex_ = rf.MakeTexture2D(multi_res_tex->Width(0), multi_res_tex->Height(0),
				multi_res_tex->NumMipMaps() - 1, 1, multi_res_tex_->Format(), 1, 0, EAH_GPU_Write, nullptr);
		}
		multi_res_fbs_.resize(multi_res_tex->NumMipMaps());
		for (uint32_t i = 0; i < multi_res_tex->NumMipMaps(); ++ i)
		{
			RenderViewPtr subsplat_ds_view = rf.Make2DDepthStencilRenderView(multi_res_tex->Width(i),
				multi_res_tex->Height(i), EF_D24S8, 1, 0);

			FrameBufferPtr fb = rf.MakeFrameBuffer();
			fb->Attach(FrameBuffer::ATT_Color0, rf.Make2DRenderView(*multi_res_tex, 0, 1, i));
			fb->Attach(FrameBuffer::ATT_DepthStencil, subsplat_ds_view);
			multi_res_fbs_[i] = fb;
		}
	}
Exemple #13
0
void
JPEGCodec::decompress(const DataChunk &data)
{
	struct jpeg_error_mgr jerr;
	
	jerr.error_exit = my_error_exit;
	jerr.emit_message = my_emit_message;
	jerr.output_message = my_output_message;
	jerr.format_message = my_format_message;
	jerr.reset_error_mgr = my_reset_error_mgr;
	
	jerr.trace_level = 0;
	jerr.num_warnings = 0;
	jerr.msg_code = 0;
	
	jerr.jpeg_message_table = jpeg_std_message_table;
	jerr.last_jpeg_message = (int) JMSG_LASTMSGCODE - 1;
	
	jerr.addon_message_table = NULL;
	jerr.first_addon_message = 0;
	jerr.last_jpeg_message = 0;
	
	
	my_source_mgr mgr;
	
	mgr.infile = new MemoryFile(data);
	mgr.buffer = (JOCTET *)malloc(4096 * sizeof(JOCTET));
	mgr.bufferSize = 4096;
	mgr.pub.bytes_in_buffer = 0;
	mgr.pub.next_input_byte = NULL;
	
	if(mgr.buffer == NULL)
		throw MoxMxf::NullExc("out of memory");
	
	mgr.pub.init_source = my_init_source;
	mgr.pub.fill_input_buffer = my_fill_input_buffer;
	mgr.pub.skip_input_data = my_skip_input_data;
	mgr.pub.resync_to_restart = jpeg_resync_to_restart;
	mgr.pub.term_source = my_term_source;
	
	
	struct jpeg_decompress_struct cinfo;
	
	cinfo.err = &jerr;
	
	jpeg_create_decompress(&cinfo);
	
	
	cinfo.src = (jpeg_source_mgr *)&mgr;
	
	
	bool success = true;
	
	try
	{
		const int status = jpeg_read_header(&cinfo, TRUE);
		
		if(status == JPEG_HEADER_OK)
		{
			jpeg_start_decompress(&cinfo);
			
			const JDIMENSION width = cinfo.image_width;
			const JDIMENSION height = cinfo.image_height;
			
			assert(cinfo.num_components == 3);
			assert(cinfo.out_color_space == JCS_RGB);
			
			
			const size_t pixelSize = (3 * PixelSize(UINT8));
			const size_t rowBytes = (width * pixelSize);
			const size_t bufSize = (height * rowBytes);
			
			DataChunkPtr frameData = new DataChunk(bufSize);
			
			char *buf = (char *)frameData->Data;
			
			const Box2i dataW = dataWindow();
			
			assert(width == (dataW.max.x - dataW.min.x + 1));
			assert(height == (dataW.max.y - dataW.min.y + 1));
			assert(dataW.min.x == 0);
			assert(dataW.min.y == 0);
				
			FrameBufferPtr frameBuffer = new FrameBuffer(dataW);
			
			frameBuffer->insert("R", Slice(UINT8, &buf[0], pixelSize, rowBytes));
			frameBuffer->insert("G", Slice(UINT8, &buf[1], pixelSize, rowBytes));
			frameBuffer->insert("B", Slice(UINT8, &buf[2], pixelSize, rowBytes));
			
			frameBuffer->attachData(frameData);
			
			
			JSAMPARRAY scanlines = (JSAMPARRAY)malloc(height * sizeof(JSAMPROW));
			
			if(scanlines == NULL)
				throw MoxMxf::NullExc("out of memory");
			
			for(int y=0; y < height; y++)
			{
				scanlines[y] = (JSAMPROW)(buf + (y * rowBytes));
			}
			
			
			JDIMENSION linesRead = 0;
			
			while(linesRead < height)
			{
				linesRead += jpeg_read_scanlines(&cinfo, &scanlines[linesRead], height - linesRead);
			}
			
			
			free(scanlines);
			
			jpeg_finish_decompress(&cinfo);
			
			
			storeFrame(frameBuffer);
		}
		else
			throw MoxMxf::ArgExc("Error reading header");
	}
	catch(...)
	{
		success = false;
	}
	
	
	jpeg_destroy_decompress(&cinfo);
	
	
	free(mgr.buffer);
	
	delete mgr.infile;
	
	
	if(!success)
		throw MoxMxf::ArgExc("JPEG decompression error");
}
Exemple #14
0
	void MultiResSILLayer::GBuffer(TexturePtr const & rt0_tex, TexturePtr const & rt1_tex, TexturePtr const & depth_tex)
	{
		RenderFactory& rf = Context::Instance().RenderFactoryInstance();
		RenderEngine& re = rf.RenderEngineInstance();
		RenderDeviceCaps const & caps = re.DeviceCaps();

		g_buffer_texs_[0] = rt0_tex;
		g_buffer_texs_[1] = rt1_tex;
		g_buffer_depth_tex_ = depth_tex;

		ElementFormat fmt8;
		if (caps.rendertarget_format_support(EF_ABGR8, 1, 0))
		{
			fmt8 = EF_ABGR8;
		}
		else
		{
			BOOST_ASSERT(caps.rendertarget_format_support(EF_ARGB8, 1, 0));

			fmt8 = EF_ARGB8;
		}

		ElementFormat depth_fmt;
		if (caps.rendertarget_format_support(EF_R16F, 1, 0))
		{
			depth_fmt = EF_R16F;
		}
		else
		{
			if (caps.rendertarget_format_support(EF_R32F, 1, 0))
			{
				depth_fmt = EF_R32F;
			}
			else
			{
				BOOST_ASSERT(caps.rendertarget_format_support(EF_ABGR16F, 1, 0));

				depth_fmt = EF_ABGR16F;
			}
		}

		uint32_t const width = rt0_tex->Width(0);
		uint32_t const height = rt0_tex->Height(0);

		int const MAX_IL_MIPMAP_LEVELS = 3;

		depth_deriative_tex_ = rf.MakeTexture2D(width / 2, height / 2, MAX_IL_MIPMAP_LEVELS, 1, depth_fmt, 1, 0, EAH_GPU_Read | EAH_GPU_Write, nullptr);
		normal_cone_tex_ = rf.MakeTexture2D(width / 2, height / 2, MAX_IL_MIPMAP_LEVELS, 1, fmt8, 1, 0, EAH_GPU_Read | EAH_GPU_Write, nullptr);
		if (depth_deriative_tex_->NumMipMaps() > 1)
		{
			depth_deriative_small_tex_ = rf.MakeTexture2D(width / 4, height / 4, MAX_IL_MIPMAP_LEVELS - 1, 1, EF_R16F, 1, 0, EAH_GPU_Write, nullptr);
			normal_cone_small_tex_ = rf.MakeTexture2D(width / 4, height / 4, MAX_IL_MIPMAP_LEVELS - 1, 1, fmt8, 1, 0, EAH_GPU_Write, nullptr);
		}
		indirect_lighting_tex_ = rf.MakeTexture2D(width / 2, height / 2, MAX_IL_MIPMAP_LEVELS, 1, EF_ABGR16F, 1, 0,  EAH_GPU_Read | EAH_GPU_Write, nullptr);
		indirect_lighting_pingpong_tex_ = rf.MakeTexture2D(width / 2, height / 2, MAX_IL_MIPMAP_LEVELS - 1, 1, EF_ABGR16F, 1, 0, EAH_GPU_Write, nullptr);
		vpls_lighting_fbs_.resize(MAX_IL_MIPMAP_LEVELS);
		for (uint32_t i = 0; i < indirect_lighting_tex_->NumMipMaps(); ++ i)
		{
			RenderViewPtr subsplat_ds_view = rf.Make2DDepthStencilRenderView(indirect_lighting_tex_->Width(i), indirect_lighting_tex_->Height(i),
				EF_D24S8, 1, 0);

			FrameBufferPtr fb = rf.MakeFrameBuffer();
			fb->Attach(FrameBuffer::ATT_Color0, rf.Make2DRenderView(*indirect_lighting_tex_, 0, 1, i));
			fb->Attach(FrameBuffer::ATT_DepthStencil, subsplat_ds_view);
			vpls_lighting_fbs_[i] = fb;
		}
	}