Esempio n. 1
0
void VarianceShadowRenderer::render(void *ptr) {
	child->render(ptr);

	getFrameBuffer().bind();
	ShaderInstance *sh = getVSMShader();
	sh->setValue(SVTexture0, child->getShadowMap());
	sh->bind();
	GLContext::getContext()->getScreen().draw(MaterialRenderData());

	FrameBuffer *temp = GLContext::getContext()->getFrameBufferPool().get(getSize(), false, ImageFormat::RG32F);
	blurs[1]->setValue(SVTexture0, temp->getAttachment(0));
	blurs[0]->setValue(SVTexture0, getFrameBuffer().getAttachment(0));

	temp->bind();
	blurs[0]->bind();
	GLContext::getContext()->getScreen().draw(MaterialRenderData());

	getFrameBuffer().bind();
	blurs[1]->bind();
	GLContext::getContext()->getScreen().draw(MaterialRenderData());

	blurs[1]->unbind();
	GLContext::getContext()->getFrameBufferPool().add(temp);
	child->discardBuffer();
}
void captureScreen()
{
	int row, col;
	uint* framePointer = getFrameBuffer();
	uint* screenCapture = getScreenCaptureFramePointer();
	for (row = 0; row < SCREENBUFFER_HEIGHT; row++)
		for (col = 0; col < SCREENBUFFER_WIDTH; col++) {
			int index = row * SCREENBUFFER_WIDTH + col;
			screenCapture[index] = framePointer[index];
		}
}
Esempio n. 3
0
const void *VDVideoSourceFLM::streamGetFrame(const void *inputBuffer, uint32 data_len, bool is_preroll, VDPosition frame_num, VDPosition target_sample) {
	VDPixmap srcbm = {0};

	srcbm.data		= (void *)inputBuffer;
	srcbm.pitch		= mWidth * 4;
	srcbm.w			= mWidth;
	srcbm.h			= mHeight;
	srcbm.format	= nsVDPixmap::kPixFormat_XRGB8888;

	VDPixmapBlt(mTargetFormat, srcbm);

	mCachedFrame = frame_num;

	return getFrameBuffer();
}
Esempio n. 4
0
bool VideoSourceImages::setTargetFormat(int format) {
	if (!format)
		format = nsVDPixmap::kPixFormat_XRGB8888;

	switch(format) {
	case nsVDPixmap::kPixFormat_XRGB1555:
	case nsVDPixmap::kPixFormat_RGB888:
	case nsVDPixmap::kPixFormat_XRGB8888:
		if (!VideoSource::setTargetFormat(format))
			return false;

		invalidateFrameBuffer();

		mvbFrameBuffer.init((void *)getFrameBuffer(), mpTargetFormatHeader->biWidth, mpTargetFormatHeader->biHeight, mpTargetFormatHeader->biBitCount);
		mvbFrameBuffer.AlignTo4();

		return true;
	}

	return false;
}
	virtual void render(const btDiscreteDynamicsWorld* rbWorld) 
	{
		//clear the color buffer
		TGAColor clearColor;
		clearColor.bgra[0] = 255;
		clearColor.bgra[1] = 255;
		clearColor.bgra[2] = 255;
		clearColor.bgra[3] = 255;
		
		clearBuffers(clearColor);
	
		
		ATTRIBUTE_ALIGNED16(btScalar modelMat[16]);
		ATTRIBUTE_ALIGNED16(float viewMat[16]);
		ATTRIBUTE_ALIGNED16(float projMat[16]);

		m_camera.getCameraProjectionMatrix(projMat);
		m_camera.getCameraViewMatrix(viewMat);
		
		btVector3 lightDirWorld(-5,200,-40);
		switch (m_upAxis)
		{
		case 1:
    			lightDirWorld = btVector3(-50.f,100,30);
    		break;
		case 2:
				lightDirWorld = btVector3(-50.f,30,100);
				break;
		default:{}
		};
		
		lightDirWorld.normalize();
		
		
		for (int i=0;i<rbWorld->getNumCollisionObjects();i++)
		{
			btCollisionObject* colObj = rbWorld->getCollisionObjectArray()[i];
			int colObjIndex = colObj->getUserIndex();
			int shapeIndex = colObj->getCollisionShape()->getUserIndex();
			if (colObjIndex>=0 && shapeIndex>=0)
			{
				
				TinyRenderObjectData* renderObj = 0;
				
				int* cptr = m_swInstances[colObjIndex];
				if (cptr)
				{
					int c = *cptr;
					TinyRenderObjectData** sptr = m_swRenderObjects[c];
					if (sptr)
					{
						renderObj = *sptr;
						//sync the object transform
						const btTransform& tr = colObj->getWorldTransform();
						tr.getOpenGLMatrix(modelMat);
				
						for (int i=0;i<4;i++)
						{
							for (int j=0;j<4;j++)
							{
								
								renderObj->m_projectionMatrix[i][j] = projMat[i+4*j];
								renderObj->m_modelMatrix[i][j] = modelMat[i+4*j];
								renderObj->m_viewMatrix[i][j] = viewMat[i+4*j];
								renderObj->m_localScaling = colObj->getCollisionShape()->getLocalScaling();
								renderObj->m_lightDirWorld = lightDirWorld;
							}
						}
						TinyRenderer::renderObject(*renderObj);
					}
				}
			}
		}
		
		
		static int counter=0;
		counter++;
		if ((counter&7)==0)
		{
			
			char filename[1024];
			sprintf(filename,"framebuf%d.tga",counter);
			m_rgbColorBuffer.flip_vertically();
			getFrameBuffer().write_tga_file(filename,true);
		}
		float color[4] = {1,1,1,1};
		
	}
	virtual void render(const btDiscreteDynamicsWorld* rbWorld)
	{
		OpenGLGuiHelper::render(rbWorld);

		//clear the color buffer
		TGAColor clearColor;
		clearColor.bgra[0] = 255;
		clearColor.bgra[1] = 255;
		clearColor.bgra[2] = 255;
		clearColor.bgra[3] = 255;

		clearBuffers(clearColor);

		ATTRIBUTE_ALIGNED16(btScalar modelMat[16]);
		ATTRIBUTE_ALIGNED16(float viewMat[16]);
		ATTRIBUTE_ALIGNED16(float projMat[16]);

		CommonRenderInterface* render = getRenderInterface();

		render->getActiveCamera()->getCameraProjectionMatrix(projMat);
		render->getActiveCamera()->getCameraViewMatrix(viewMat);

		btVector3 lightDirWorld(-5, 200, -40);
		switch (1)  //app->getUpAxis())
		{
			case 1:
				lightDirWorld = btVector3(-50.f, 100, 30);
				break;
			case 2:
				lightDirWorld = btVector3(-50.f, 30, 100);
				break;
			default:
			{
			}
		};

		lightDirWorld.normalize();

		for (int i = 0; i < rbWorld->getNumCollisionObjects(); i++)
		{
			btCollisionObject* colObj = rbWorld->getCollisionObjectArray()[i];
			int colObjIndex = colObj->getUserIndex();
			int shapeIndex = colObj->getCollisionShape()->getUserIndex();
			if (colObjIndex >= 0 && shapeIndex >= 0)
			{
				TinyRenderObjectData* renderObj = 0;

				int* cptr = m_swInstances[colObjIndex];
				if (cptr)
				{
					int c = *cptr;
					TinyRenderObjectData** sptr = m_swRenderObjects[c];
					if (sptr)
					{
						renderObj = *sptr;
						//sync the object transform
						const btTransform& tr = colObj->getWorldTransform();
						tr.getOpenGLMatrix(modelMat);

						for (int i = 0; i < 4; i++)
						{
							for (int j = 0; j < 4; j++)
							{
								renderObj->m_projectionMatrix[i][j] = projMat[i + 4 * j];
								renderObj->m_modelMatrix[i][j] = modelMat[i + 4 * j];
								renderObj->m_viewMatrix[i][j] = viewMat[i + 4 * j];
							}
						}
						renderObj->m_localScaling = colObj->getCollisionShape()->getLocalScaling();
						renderObj->m_lightDirWorld = lightDirWorld;
						renderObj->m_lightAmbientCoeff = 0.6;
						renderObj->m_lightDiffuseCoeff = 0.35;
						renderObj->m_lightSpecularCoeff = 0.05;
						TinyRenderer::renderObject(*renderObj);
					}
				}
			}
		}

		for (int y = 0; y < m_swHeight; ++y)
		{
			unsigned char* pi = m_image + (y)*m_swWidth * 3;
			for (int x = 0; x < m_swWidth; ++x)
			{
				const TGAColor& color = getFrameBuffer().get(x, y);
				pi[0] = color.bgra[2];
				pi[1] = color.bgra[1];
				pi[2] = color.bgra[0];
				pi += 3;
			}
		}
		render->activateTexture(m_textureHandle);
		render->updateTexture(m_textureHandle, m_image);

		static int counter = 0;
		counter++;
		if ((counter & 7) == 0)
		{
			char filename[1024];
			sprintf(filename, "framebuf%d.tga", counter);
			getFrameBuffer().write_tga_file(filename, true);
		}
		float color[4] = {1, 1, 1, 1};
		m_primRenderer->drawTexturedRect(0, 0, m_swWidth, m_swHeight, color, 0, 0, 1, 1, true);
	}
Esempio n. 7
0
//static bool first = true;	// TODO: this is a hack for testing.
void V4L2CaptureStream::threadMain()// throws CaptureException;
{
	int res;

	//if (first)
	{	printf("V4L2CaptureStream::threadMain()\n");
		fflush(stdout);
		streamThrottle.waitUntilStart();
		//first = false;
		printf("V4L2CaptureStream streamThrottle.waitUntilStart completed\n");
		fflush(stdout);
	}




	res = fg2_startCapture(fg);
	if (res != 0)
		FailWithException("fg2_startCapture failed", res);

	while (!disposing)
	{
	struct my_buffer* frame = NULL;

	// TODO: support double-buffering.
//	printf("V4L2CaptureStream::fg2_grab...\n");
	//fflush(stdout);	
    frame = getFrameBuffer( fg );
//	printf("V4L2CaptureStream::fg2_grab: %lx\n", (unsigned long) frame);
	//fflush(stdout);	
    if (frame == 0)
    	FailWithException("getFrameBuffer failed", -1);	// TODO: notify observer instead.


	void *data = frame->start;
	int width = format.getWidth();
	int height = format.getHeight();

	switch (formatTypeV4L2)
	{
		// TODO: other formats
       case V4L2_PIX_FMT_RGB24:
			if (observer != 0)
			{	
				Image image = Image(format, (unsigned char *) data, width * height * 3);
				observer->onNewImage(this, &image);	
			}			
			break;
       case V4L2_PIX_FMT_YUYV:
			if (observer != 0)
			{	
				// 4 bytes = 2 pixels
				Image image = Image(format, yuyvToRGB((unsigned char *) data, width, height), width * height * 3, true);
				observer->onNewImage(this, &image);	
			}			
			break;
       case V4L2_PIX_FMT_RGB32:
			if (observer != 0)
			{	
				Image image = Image(format, (unsigned char *) data, width * height * 4);
				observer->onNewImage(this, &image);	
			}			
			break;
 		case V4L2_PIX_FMT_YUV420:
	 		{	if (rgbbufsize == 0)
	 			{	rgbbufsize = width * height * 3;
	 				rgbbuf = new unsigned char[rgbbufsize];
	 			}
				yuv2rgb_buf((unsigned char *) data, width, height, rgbbuf);
				if (observer != 0)
				{	
					Image image = Image(format, rgbbuf, rgbbufsize);
					observer->onNewImage(this, &image);	
				}			
				
			}
			break;
		default:
			printf("unknown or unsupported: %i\n", formatTypeV4L2);
			//FailWithException("unknown or unsupported format", formatTypeV4L2);
	}
	giveBackFrameBuffer(fg, frame);
	}
	res = fg2_stopCapture(fg);
	if (res != 0)
		FailWithException("fg2_stopCapture failed", res);

	fg2_delete(&fg);
	if (rgbbuf != 0)
	{	delete[] rgbbuf;
		rgbbuf = 0;
	}
	disposed = true;
	

}
Esempio n. 8
0
	bool Pipeline::loadSetup(TiXmlElement *xml)
	{
		// Load textures
		for (TiXmlElement *element = xml->FirstChildElement("Texture");
		     element != 0;
		     element = element->NextSiblingElement("Texture"))
		{
			// Get texture name
			const char *name = element->Attribute("name");
			if (!name)
			{
				getManager()->getLog()->error("%s: Texture name missing.",
				                              getName().c_str());
				continue;
			}
			// Get texture format
			const char *formatstr = element->Attribute("format");
			TextureFormat::List format = TextureFormat::RGBA8;
			if (formatstr)
			{
				format = TextureFormat::fromString(formatstr);
				if (format == TextureFormat::Invalid)
				{
					getManager()->getLog()->error("%s: Invalid texture format \"%s\".",
					                              getName().c_str(),
					                              formatstr);
					continue;
				}
			}
			TargetTextureInfo texture;
			texture.name = name;
			parseSize(element, texture.relsize, texture.abssize);
			// Create texture resource
			unsigned int texturesize[2];
			texturesize[0] = (unsigned int)(texture.relsize[0] * targetsize[0])
			               + texture.abssize[0];
			texturesize[1] = (unsigned int)(texture.relsize[1] * targetsize[1])
			               + texture.abssize[1];
			Texture::Ptr texres = getManager()->createResource<Texture>("Texture");
			texres->set2D(texturesize[0], texturesize[1], format);
			texres->setMipmapsEnabled(false);
			// TODO: Configurable filtering
			texres->setFiltering(TextureFiltering::Nearest);
			texture.texture = texres;
			// Add the texture to the texture list
			targettextures.push_back(texture);
		}
		// Load frame buffer resources
		for (TiXmlElement *element = xml->FirstChildElement("FrameBuffer");
		     element != 0;
		     element = element->NextSiblingElement("FrameBuffer"))
		{
			// Get texture name
			const char *name = element->Attribute("name");
			if (!name)
			{
				getManager()->getLog()->error("%s: Texture name missing.",
				                              getName().c_str());
				continue;
			}
			FrameBufferInfo fb;
			fb.name = name;
			parseSize(element, fb.relsize, fb.abssize);
			// Get depthbuffer
			bool depthbuffer = false;
			const char *depthbufferstr = element->Attribute("depthbuffer");
			if (depthbufferstr && !strcmp(depthbufferstr, "true"))
			{
				depthbuffer = true;
			}
			// Create framebuffer
			fb.fb = getManager()->createResource<FrameBuffer>("FrameBuffer");
			unsigned int fbsize[2];
			fbsize[0] = (unsigned int)(fb.relsize[0] * targetsize[0])
			          + fb.abssize[0];
			fbsize[1] = (unsigned int)(fb.relsize[1] * targetsize[1])
			          + fb.abssize[1];
			fb.fb->setSize(fbsize[0], fbsize[1], depthbuffer);
			framebuffers.push_back(fb);
		}
		// Load render targets
		for (TiXmlElement *element = xml->FirstChildElement("RenderTarget");
		     element != 0;
		     element = element->NextSiblingElement("RenderTarget"))
		{
			// Get target name
			const char *name = element->Attribute("name");
			if (!name)
			{
				getManager()->getLog()->error("%s: RenderTarget name missing.",
				                              getName().c_str());
				continue;
			}
			// Get framebuffer
			const char *fbname = element->Attribute("framebuffer");
			if (!fbname)
			{
				getManager()->getLog()->error("%s: RenderTarget framebuffer name missing.",
				                              getName().c_str());
				continue;
			}
			FrameBuffer::Ptr fb = getFrameBuffer(fbname);
			if (!fb)
			{
				getManager()->getLog()->error("%s: RenderTarget framebuffer not found.",
				                              getName().c_str());
				continue;
			}
			// Create render target
			RenderTargetInfo target;
			target.target = getManager()->createResource<RenderTarget>("RenderTarget");
			target.name = name;
			target.target->setFrameBuffer(fb);
			// Depth buffer
			TiXmlElement *depthbufferelem = element->FirstChildElement("DepthBuffer");
			if (depthbufferelem)
			{
				const char *texname = depthbufferelem->Attribute("texture");
				if (!texname)
				{
					getManager()->getLog()->error("%s: DepthBuffer texture missing.",
					                              getName().c_str());
					continue;
				}
				Texture::Ptr texture = getTargetTexture(texname);
				if (!texture)
				{
					getManager()->getLog()->error("%s: DepthBuffer texture not found.",
					                              getName().c_str());
					continue;
				}
				target.target->setDepthBuffer(texture);
			}
			// Color buffers
			for (TiXmlElement *colorbufferelem = element->FirstChildElement("ColorBuffer");
			     colorbufferelem != 0;
			     colorbufferelem = colorbufferelem->NextSiblingElement("ColorBuffer"))
			{
				const char *texname = colorbufferelem->Attribute("texture");
				if (!texname)
				{
					getManager()->getLog()->error("%s: ColorBuffer texture missing.",
					                              getName().c_str());
					continue;
				}
				Texture::Ptr texture = getTargetTexture(texname);
				if (!texture)
				{
					getManager()->getLog()->error("%s: ColorBuffer texture not found.",
					                              getName().c_str());
					continue;
				}
				target.target->addColorBuffer(texture);
			}
			rendertargets.push_back(target);
		}
		return true;
	}
Esempio n. 9
0
/**
 @brief Update the device data (the data going back to the client).
*/
void CameraV4L2::WriteData()
{
    size_t image_size, size;
    unsigned char * ptr1, * ptr2;

    struct my_buffer *v4lBuffer = getFrameBuffer(fg);
    if (v4lBuffer==NULL)
        exit(1);
    ptr1 = (unsigned char *)v4lBuffer->start;
    ptr2 = this->data.image;

  // Compute size of image
    image_size = this->width * this->height * this->depth / 8;

  // Set the image properties
    this->data.width = htons(this->width);
    this->data.height = htons(this->height);
    this->data.bpp = this->depth;
    this->data.compression = PLAYER_CAMERA_COMPRESS_RAW;
    this->data.image_size = htonl(image_size);

    if (image_size > sizeof(this->data.image)){
        PLAYER_ERROR2("image_size <= sizeof(this->data.image) failed: %d > %d",
               image_size, sizeof(this->data.image));
    }
    assert(image_size <= sizeof(this->data.image));
    if (image_size > (size_t) v4lBuffer->length){
        PLAYER_WARN("Frame size is smaller then expected");
        image_size = (size_t) v4lBuffer->length;
    }
    //assert(image_size <= (size_t) v4lBuffer->length);


    if (!flip_rb) {
        memcpy(ptr2, ptr1, image_size);
    } else {
        int imgSize = ((this->width) * (this->height));
        int i;
        switch (v4l2_type_id)
        {
            case V4L2_PIX_FMT_RGB24:
            case V4L2_PIX_FMT_BGR24:
                for (i = 0; i < imgSize; i++)
                {
                    ptr2[0] = ptr1[2];
                    ptr2[1] = ptr1[1];
                    ptr2[2] = ptr1[0];
                    ptr1 += 3;
                    ptr2 += 3;
                }
                break;
            case V4L2_PIX_FMT_RGB32:
            case V4L2_PIX_FMT_BGR32:
                for (i = 0; i < imgSize; i++)
                {
                    ptr2[0] = ptr1[2];
                    ptr2[1] = ptr1[1];
                    ptr2[2] = ptr1[0];
                    ptr2[3] = ptr1[3];
                    ptr1 += 4;
                    ptr2 += 4;
                }
                break;
            default:
                memcpy(ptr2, ptr1, image_size);
        }
    }

  // Copy data to server
    size = sizeof(this->data) - sizeof(this->data.image) + image_size;

    struct timeval timestamp;
    timestamp.tv_sec = this->tsec;
    timestamp.tv_usec = this->tusec;
    PutData((void*) &this->data, size, &timestamp);

    giveBackFrameBuffer(fg, v4lBuffer);

    return;
}
Esempio n. 10
0
const void *VideoSourceImages::streamGetFrame(const void *inputBuffer, uint32 data_len, bool is_preroll, VDPosition frame_num, VDPosition target_sample) {
	// We may get a zero-byte frame if we already have the image.

	if (!data_len)
		return getFrameBuffer();

	int w, h;
	bool bHasAlpha;

	bool bIsPNG = false;
	bool bIsJPG = false;
	bool bIsBMP = false;
	bool bIsIFF = false;
	bool bIsTGA = false;

	bIsPNG = VDDecodePNGHeader(inputBuffer, data_len, w, h, bHasAlpha);
	if (!bIsPNG) {
		bIsJPG = VDIsJPEGHeader(inputBuffer, data_len);
		if (!bIsJPG) {
			bIsBMP = DecodeBMPHeader(inputBuffer, data_len, w, h, bHasAlpha);
			if (!bIsBMP) {
				bIsIFF = VDIsMayaIFFHeader(inputBuffer, data_len);
				if (!bIsIFF)
					bIsTGA = DecodeTGAHeader(inputBuffer, data_len, w, h, bHasAlpha);
			}
		}
	}

	if (!bIsBMP && !bIsTGA && !bIsJPG && !bIsPNG && !bIsIFF)
		throw MyError("Image file must be in PNG, Windows BMP, truecolor TARGA format, MayaIFF, or sequential JPEG format.");

	if (bIsJPG) {
		if (!mpJPEGDecoder)
			mpJPEGDecoder = VDCreateJPEGDecoder();
		mpJPEGDecoder->Begin(inputBuffer, data_len);
		mpJPEGDecoder->DecodeHeader(w, h);
	}

	VDPixmap pxIFF;
	if (bIsIFF) {
		if (!mpIFFDecoder)
			mpIFFDecoder = VDCreateImageDecoderIFF();
		pxIFF = mpIFFDecoder->Decode(inputBuffer, data_len);
		w = pxIFF.w;
		h = pxIFF.h;
	}

	// Check image header.

	VDAVIBitmapInfoHeader *pFormat = getImageFormat();

	if (getFrameBuffer()) {
		if (w != pFormat->biWidth || h != pFormat->biHeight) {
			vdfastvector<wchar_t> errBuf;

			throw MyError("Image \"%ls\" (%dx%d) doesn't match the image dimensions of the first image (%dx%d)."
					, mpParent->ComputeFilename(errBuf, frame_num), w, h, pFormat->biWidth, pFormat->biHeight);
		}

	} else {
		if (!AllocFrameBuffer(w * h * 4))
			throw MyMemoryError();

		pFormat->biSize				= sizeof(BITMAPINFOHEADER);
		pFormat->biWidth			= w;
		pFormat->biHeight			= h;
		pFormat->biPlanes			= 1;
		pFormat->biCompression		= 0xFFFFFFFFUL;
		pFormat->biBitCount			= 0;
		pFormat->biSizeImage		= 0;
		pFormat->biXPelsPerMeter	= 0;
		pFormat->biYPelsPerMeter	= 0;
		pFormat->biClrUsed			= 0;
		pFormat->biClrImportant		= 0;

		// special case for initial read in constructor

		return NULL;
	}

	if (bIsJPG) {
		int format;

		switch(mvbFrameBuffer.depth) {
		case 16:	format = IVDJPEGDecoder::kFormatXRGB1555;	break;
		case 24:	format = IVDJPEGDecoder::kFormatRGB888;		break;
		case 32:	format = IVDJPEGDecoder::kFormatXRGB8888;	break;
		}

		mpJPEGDecoder->DecodeImage((char *)mvbFrameBuffer.data + mvbFrameBuffer.pitch * (mvbFrameBuffer.h - 1), -mvbFrameBuffer.pitch, format);
		mpJPEGDecoder->End();
	}

	if (bIsIFF)
		VDPixmapBlt(getTargetFormat(), pxIFF);

	if (bIsBMP)
		DecodeBMP(inputBuffer, data_len, mvbFrameBuffer);
	if (bIsTGA)
		DecodeTGA(inputBuffer, data_len, mvbFrameBuffer);
	if (bIsPNG) {
		if (!mpPNGDecoder)
			mpPNGDecoder = VDCreateImageDecoderPNG();

		PNGDecodeError err = mpPNGDecoder->Decode(inputBuffer, data_len);

		if (err) {
			if (err == kPNGDecodeOutOfMemory)
				throw MyMemoryError();

			vdfastvector<wchar_t> errBuf;

			throw MyError("Error decoding \"%ls\": %ls\n", mpParent->ComputeFilename(errBuf, frame_num), VDLoadString(0, kVDST_PNGDecodeErrors, err));
		}

		VDPixmapBlt(VDAsPixmap(mvbFrameBuffer), mpPNGDecoder->GetFrameBuffer());
	}

	mCachedFrame = frame_num;

	return mpFrameBuffer;
}