VideoCamera* VideoCamera::create()
{
    VideoCamera* pRet = new VideoCamera();
    pRet->init();
    pRet->autorelease();
    return pRet;
}
Example #2
0
/*! Close a video camera stream
	@name M:close
*/
int lua_video_camera_close(lua_State *L) {
	VideoCamera *s = Glue<VideoCamera>::checkto(L, 1);
	if(s) {
		s->close();
	}
	else {
		luaL_error(L, "VideoCamera.close: invalid arguments");
	}
	return 1;
}
Example #3
0
void bordaiApp::keyDown( KeyEvent event ) {
	char c = event.getChar();
	if(c == 'f') {
		setFullScreen( !isFullScreen() );
	}
	if(c == 'p' || c == 'P') {
		mCamera.togglePause();
	}
	if(c == ' ') {
		mCamera.stopCapturing();
		mCamera.startCapturing(mCameraLensSize.x, mCameraLensSize.y);
	}
}
Example #4
0
/*! Set the pixel format
	Possible values are video.RGBA and video.RGB.
	@name M.format
	@LuaMethod GETSET
*/
int lua_video_camera_format(lua_State *L) {
	VideoCamera *s = Glue<VideoCamera>::checkto(L, 1);
	if(s) {
		if(lua::is<int>(L, 2)) {
			s->set_pixel_format((video::PixelFormat)lua::to<int>(L, 2));
		}
		else {
			lua::push<int>(L, (int)s->get_pixel_format());
		}
	}
	else {
		luaL_error(L, "VideoCamera.format: invalid arguments");
	}
	return 0;
}
Example #5
0
/*! Get the internal array data
	@ret The array
	@name M:array
*/
int lua_video_camera_array(lua_State *L) {
	VideoCamera *s = Glue<VideoCamera>::checkto(L, 1);
	if(s) {
		s->next_frame();

		lua_getfenv(L, 1);
		lua_pushstring(L, LUA_VIDEO_ARRAY_FIELD);
		lua_rawget(L, -2);
		lua_insert(L, -2);
		lua_pop(L, 1);
	}
	else {
		luaL_error(L, "VideoCamera.array: invalid arguments");
	}
	return 1;
}
Example #6
0
void bordaiApp::update() {
	setFrameRate(mFrameRate);
	mWindowSize = getWindowSize();
	mCamera.bufferCaptured(mHaarDetector, mStoryCardDetector);
	mScreenView.setAspectRatio(getWindowAspectRatio());
	mScreenView.setOrtho(0., mCameraLensSize.x, mCameraLensSize.y, 0., 0., 1.);	
}
Example #7
0
/*! Open a video camera stream
	@param [dim] The dimensions (defaults to 720x480)
	@name M:open
*/
int lua_video_camera_open(lua_State *L) {
	VideoCamera *s = Glue<VideoCamera>::checkto(L, 1);
	if(s) {
		if(lua_type(L, 2) == LUA_TSTRING) {
			const char *uid = lua_tostring(L, 2);
			int dim[] = {720, 480};
			lua::to_vec(L, 3, 2, dim);
			s->open(uid, dim[0], dim[1]);
		}
		else {
			int dim[] = {720, 480};
			lua::to_vec(L, 2, 2, dim);
			s->open(dim[0], dim[1]);
		}
	}
	else {
		luaL_error(L, "VideoCamera.open: invalid arguments");
	}
	return 1;
}
Example #8
0
void bordaiApp::setup() {
	mParams = params::InterfaceGl("bordai", Vec2i(300, 175));
	mParams.addParam("Screen width", &mWindowSize.x, "", true);
	mParams.addParam("Screen height", &mWindowSize.y, "", true);
	mParams.addParam("Camera width", &mCameraLensSize.x, "min=128 max=1024 step=64 keyIncr=W keyDecr=w");
	mParams.addParam("Camera height", &mCameraLensSize.y, "min=128 max=1024 step=64 keyIncr=H keyDecr=h");
	mParams.addSeparator();
	mParams.addText("Press space to apply new camera resolution");
	mParams.addText("Press 'p' to pause/play camera");
	mParams.addSeparator();
	mParams.addParam("Framerate", &mFrameRate, "min=5.0 max=70.0 step=5.0 keyIncr=+ keyDecr=-");
	
	mHaarDetector = HaarDetector( getResourcePath( "haarcascade_frontalface_alt2.xml" ) );
	mCamera.startCapturing(mCameraLensSize.x, mCameraLensSize.y);
}
Example #9
0
VideoCamera *VideoCamera::parseLine(Ogre::SceneManager *mSceneMgr, Ogre::Camera *camera, SerializedRig *truck, parsecontext_t &c)
{
	try
	{
		int nz=-1, ny=-1, nref=-1, ncam=-1, lookto=-1, texx=256, texy=256, crole=-1, cmode=-1;
		float fov=-1.0f, minclip=-1.0f, maxclip=-1.0f, offx=0.0f, offy=0.0f, offz=0.0f, rotx=0.0f, roty=0.0f, rotz=0.0f;
		char materialname[256] = "";
		char vidCamName[256] = "";
		
		Ogre::StringVector args;
		int n = truck->parse_args(c, args, 19);
		nref    = truck->parse_node_number(c, args[0]);
		nz      = truck->parse_node_number(c, args[1]);
		ny      = truck->parse_node_number(c, args[2]);
		ncam    = PARSEINT(args[3]);
		lookto  = PARSEINT(args[4]);
		offx    = PARSEREAL(args[5]);
		offy    = PARSEREAL(args[6]);
		offz    = PARSEREAL(args[7]);
		rotx    = PARSEREAL(args[8]);
		roty    = PARSEREAL(args[9]);
		rotz    = PARSEREAL(args[10]);
		fov     = PARSEREAL(args[11]);
		texx    = PARSEINT (args[12]);
		texy    = PARSEINT (args[13]);
		minclip = PARSEREAL(args[14]);
		maxclip = PARSEREAL(args[15]);
		crole   = PARSEINT (args[16]);
		cmode   = PARSEINT (args[17]);
		strncpy(materialname, args[18].c_str(), 255);
		if(n > 19)
			strncpy(vidCamName, args[19].c_str(), 255);
		else
			strncpy(vidCamName, materialname, 255); // fallback, use materialname

		//if (texx <= 0 || !isPowerOfTwo(texx) || texy <= 0 || !isPowerOfTwo(texy))
		// disabled isPowerOfTwo, as it can be a renderwindow now with custom resolution
		if (texx <= 0 || texy <= 0)
		{
			truck->parser_warning(c, "Wrong texture size definition. trying to continue ...");
			return 0;
		}

		if (minclip < 0 || minclip > maxclip || maxclip < 0)
		{
			truck->parser_warning(c, "Wrong clipping definition. trying to continue ...");
			return 0;
		}

		if(cmode < -2 )
		{
			truck->parser_warning(c, "Camera Mode setting incorrect, trying to continue ...");
			return 0;
		}

		if(crole < -1 || crole >1)
		{
			truck->parser_warning(c, "Camera Role (camera, trace, mirror) setting incorrect, trying to continue ...");
			return 0;
		}

		MaterialPtr mat = MaterialManager::getSingleton().getByName(materialname);
		if(mat.isNull())
		{
			truck->parser_warning(c, "unknown material: '"+String(materialname)+"', trying to continue ...");
			return 0;
		}

		// clone the material to stay unique
		String newMaterialName = String(truck->truckname) + materialname + "_" + TOSTRING(counter++);
		MaterialPtr matNew = mat->clone(newMaterialName);

		// we need to find and replace any materials that could come afterwards
		if(truck && truck->materialReplacer)
			truck->materialReplacer->addMaterialReplace(mat->getName(), newMaterialName);

		VideoCamera *v  = new VideoCamera(mSceneMgr, camera, truck);
		v->fov          = fov;
		v->minclip      = minclip;
		v->maxclip      = maxclip;
		v->nz           = nz;
		v->ny           = ny;
		v->nref         = nref;
		v->offset       = Vector3(offx, offy, offz);
		v->switchoff    = cmode;            // add performance switch off  ->meeds fix, only "always on" supported yet
		v->materialName = newMaterialName;
		v->vidCamName   = vidCamName;
		v->mirrorSize   = Vector2(texx, texy);

		if(crole != 1)                     //rotate camera picture 180°, skip for mirrors
			rotz += 180;

		v->rotation     = Quaternion(Degree(rotz), Vector3::UNIT_Z) * Quaternion(Degree(roty), Vector3::UNIT_Y) * Quaternion(Degree(rotx), Vector3::UNIT_X);

		if (ncam >= 0)                     // set alternative camposition (optional)
			v->camNode  = ncam;
		else
			v->camNode  = nref;

		if (lookto >= 0)                   // set alternative lookat position (optional)
		{
			v->lookat   = lookto;
			crole       = 0;               // this is a tracecam, overwrite mode setting
		}
		else
			v->lookat   = -1;

		v->camRole      = crole;	        // -1= camera, 0 = trackcam, 1 = mirror

		v->init();

		return v;
	} catch(ParseException &)
	{
		return 0;
	}
	return 0;
}
Example #10
0
void bordaiApp::draw() {
	gl::enable( GL_TEXTURE_2D );
	gl::enableAlphaBlending();
	
	gl::clear( Color::black() );
	
	if( mCamera.hasSomething() ) {
		
		gl::color( Color::white() );
		gl::setMatricesWindow( getWindowWidth(), getWindowHeight() );
		
		gl::pushMatrices();
		gl::setMatrices( mScreenView );
		gl::scale(Vec3f(1, 2./3., 1));

		mCamera.draw();
//		mHaarDetector.drawTrackings();
		mStoryCardDetector.drawTrackings();
		
		gl::popMatrices();
		
		vector<gl::Texture> storyTextures = mStoryCardDetector.mHistogramTextures;
		vector<gl::Texture> haarTextures = mHaarDetector.mHistogramTextures;
		
		const float amountOfHistogramTextures = storyTextures.size() + haarTextures.size();
		
		Vec3f viewTranslation(0, (float)mCameraLensSize.y * 2./3., 0);
		Vec3f storyHistogramTextureScale(1./amountOfHistogramTextures, 1./3., 1);
		
		for (vector<gl::Texture>::const_iterator aTexture = storyTextures.begin(); aTexture != storyTextures.end(); ++aTexture) {
			glPushMatrix();
			gl::setMatrices( mScreenView );
			gl::translate( viewTranslation );
			gl::scale( storyHistogramTextureScale );
			gl::color( Color::white() );
			gl::draw( *aTexture );
			aTexture -> disable();
			
			mStoryCardDetector.drawTrackings();
			
			glPopMatrix();
			viewTranslation.x += mCameraLensSize.x / amountOfHistogramTextures;
		}
		
		Vec3f haarHistogramTextureScale(2./amountOfHistogramTextures, 2./3., 1);
		
		for (vector<gl::Texture>::const_iterator aTexture = haarTextures.begin(); aTexture != haarTextures.end(); ++aTexture) {
			glPushMatrix();
			gl::setMatrices( mScreenView );
			gl::translate( viewTranslation );
			gl::scale( haarHistogramTextureScale );
			gl::color( Color::white() );
			gl::draw( *aTexture );
			aTexture -> disable();
			
			mHaarDetector.drawTrackings();
			
			glPopMatrix();
			viewTranslation += mCameraLensSize.x / amountOfHistogramTextures;
		}
	}
	
	params::InterfaceGl::draw();
}