Ejemplo n.º 1
0
void VideoPanel::draw(){

	if(!visible){
		return;
	}

	glActiveTextureARB(GL_TEXTURE1_ARB);
	glDisable(GL_TEXTURE_2D);

	glActiveTextureARB(GL_TEXTURE2_ARB);
	glDisable(GL_TEXTURE_2D);

	glActiveTextureARB(GL_TEXTURE3_ARB);
	glDisable(GL_TEXTURE_2D);

	glActiveTextureARB(GL_TEXTURE4_ARB);
	glDisable(GL_TEXTURE_2D);

	glActiveTextureARB(GL_TEXTURE5_ARB);
	glDisable(GL_TEXTURE_2D);

	glActiveTextureARB(GL_TEXTURE6_ARB);
	glDisable(GL_TEXTURE_2D);

	glActiveTextureARB(GL_TEXTURE7_ARB);
	glDisable(GL_TEXTURE_2D);

	glActiveTextureARB(GL_TEXTURE8_ARB);
	glDisable(GL_TEXTURE_2D);

	glActiveTextureARB(GL_TEXTURE0_ARB);
	glDisable(GL_TEXTURE_2D);

	glColor4f(color.x,color.y,color.z,color.w);


	if(shaderId==SHADER_NONE){
		shader.reset();
	}else{
		shader.set(shaderId);

		for(int lol=0; lol<shaderParameters.size(); lol++){
			shader.localParameterFragment(lol,shaderParameters[lol]);
		}
	}

	VideoSource* vs;

	float px,py,pz,sx,sy;

	pz=pos.z;

	if(percent){
		px=pos.x*conf->sizeX;
		py=pos.y*conf->sizeY;
		sx=scale.x*conf->sizeX;
		sy=scale.y*conf->sizeY;
	}else{
		px=pos.x;
		py=pos.y;
		sx=scale.x;
		sy=scale.y;
	}

	for(int i=0; i<layers.size(); i++){

		if(layers[i].substr(0,8)=="picture:"){
			GLuint texid=remapPictures[layers[i]];

			if(i==0){
				glActiveTextureARB(GL_TEXTURE0_ARB);
			}else if(i==1){
				glActiveTextureARB(GL_TEXTURE1_ARB);
			}else if(i==2){
				glActiveTextureARB(GL_TEXTURE2_ARB);
			}else if(i==3){
				glActiveTextureARB(GL_TEXTURE3_ARB);
			}



			glEnable(GL_TEXTURE_2D);
			glBindTexture(GL_TEXTURE_2D,texid);

		}else{

			vs=remap[layers[i]];

			if(vs!=NULL){
				if(i==0){
					glActiveTextureARB(GL_TEXTURE0_ARB);
				}else if(i==1){
					glActiveTextureARB(GL_TEXTURE1_ARB);
				}else if(i==2){
					glActiveTextureARB(GL_TEXTURE2_ARB);
				}else if(i==3){
					glActiveTextureARB(GL_TEXTURE3_ARB);
				}

				glEnable(GL_TEXTURE_2D);
				glBindTexture(GL_TEXTURE_2D,vs->texid);

				vs->getFrame();

				#ifdef VID_SOFTWARE_RESIZE
				glTexSubImage2D (GL_TEXTURE_2D, 0, 0, 0, vs->widthp2, vs->heightp2, GL_BGR_EXT, GL_UNSIGNED_BYTE, vs->data);
				#else
				gluBuild2DMipmaps(GL_TEXTURE_2D, 3, vs->width, vs->height, GL_BGR_EXT , GL_UNSIGNED_BYTE, vs->data);
				#endif
			}
		}
	
	}

	float tx=1;

	glBegin(GL_QUADS);
		glTexCoord2d(0,0); glVertex3f(px		,py,pz);
		glTexCoord2d(tx,0); glVertex3f(px+sx	,py,pz);
		glTexCoord2d(tx,-tx); glVertex3f(px+sx	,py-sy,pz);
		glTexCoord2d(0,-tx); glVertex3f(px		,py-sy,pz);
	glEnd();

}
Ejemplo n.º 2
0
bool VideoPanel::load(int layer, String source){
	//VideoSource
	
	while(layer>layers.size()){
		layers.pushBack("");
	}

	if(source==""){
		layers[layer-1]="";
		return true;

	}

	layers[layer-1]=source;

	if(source.substr(0,7)=="camera:"){
		VideoSource* vs;

		if(!remap.find(source)){
			remap[source]=new VideoSource;
			vs=remap[source];
		}else{
			return true;
		}

		short driver=0;

		if(source.substr(7,-1)=="1"){
			driver=0;
		}else if(source.substr(7,-1)=="2"){
			driver=1;
		}


		#ifdef VID_SOFTWARE_RESIZE
		vs->powerOf2=true;
		#endif

		if(!vs->sourceCapture(driver)){

			console().write("error, could not access camera");
			return false;
		}else{
			//console().write("successfully connected to camera");
		}

		glGenTextures(1, (GLuint*)&vs->texid);
		glBindTexture(GL_TEXTURE_2D,vs->texid);
		
		vs->getFrame();


		#ifdef VID_SOFTWARE_RESIZE
			glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, vs->widthp2,vs->heightp2, 0,  GL_BGR_EXT, GL_UNSIGNED_BYTE, blank);
			glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);	// Linear Filtered
			glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
		#else
			gluBuild2DMipmaps(GL_TEXTURE_2D, 3, vs->width,vs->height, GL_BGR_EXT , GL_UNSIGNED_BYTE, vs->data);
			glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
			glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
		#endif


		
		glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_REPEAT);
		glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_REPEAT);

	}else if(source.substr(0,5)=="file:"){
		VideoSource* vs;

		if(!remap.find(source)){
			vs=new VideoSource;
			remap[source]=vs;
		}else{
			return true;
		}


		#ifdef VID_SOFTWARE_RESIZE
		vs->powerOf2=true;
		#endif

		String x=source.substr(5,-1);

		if(  !vs->sourceAvi(x.c_str())  ){
			console().write("error loading video");
			return false;
		}else{
			//console().write("video loaded successfully");
		}

		

		glGenTextures(1, (GLuint*)&vs->texid);
		glBindTexture(GL_TEXTURE_2D,vs->texid);
		
		vs->getFrame();

		#ifdef VID_SOFTWARE_RESIZE
		glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, vs->widthp2,vs->heightp2, 0,  GL_BGR_EXT, GL_UNSIGNED_BYTE, blank);
		glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);	// Linear Filtered
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
		#else
			gluBuild2DMipmaps(GL_TEXTURE_2D, 3, remap[source]->width, remap[source]->height, GL_BGR_EXT , GL_UNSIGNED_BYTE, remap[source]->data);
			glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
			glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
		#endif

		glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_REPEAT);
		glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_REPEAT);

	}else if(source.substr(0,8)=="picture:"){
		String file=source.substr(8,-1);

		int g=texture.load(file);

		ImageData im=texture.info(g);

		if(g!=-1){

			remapPictures[source]=g;

			console().write("picture '"+file+"', loaded successfully");
		}else{

			console().write("Error loading file '"+file+"', "+im.error);
			return false;
		}
		
	}else{

		console().write("error, unknown video source type '"+source+"', should be file:, picture: or camera:");
		return false;
	}


	return true;

}
Ejemplo n.º 3
0
int main( int argc, char* argv[])
{
    // On déclare notre pointeur sur SourceVideo
    VideoSource *src;
    CvVideoWriter *writer = 0;
    int isColor = 1;
    int fps     = 30;  // or 30
    int frameW  = 640; // 744 for firewire cameras
    int frameH  = 480; // 480 for firewire cameras
    writer=cvCreateVideoWriter("out.avi",CV_FOURCC('P','I','M','1'),
                                   fps,cvSize(frameW,frameH),isColor);


    if( argc > 1 ) {
        // Initialisation : fichier vidéo
        string path(argv[1]);
        src = new VideoFile( path, (argc > 2) );
    }
    else {
        // Initialisation : webcam
        src = new Camera( 0 );
    }

    // Initialisation du flux vidéo
    try {
        src->open();
    }
    catch( Exception &e ) {
        // Si une exception se produit, on l'affiche et on quitte.
        cout << e.what() << endl;
        delete src;
        return 10;
    }

    // Si tout va bien, on affiche les informations du flux vidéo.
    cout << src->getInfos() << endl;

    cvNamedWindow( "video", CV_WINDOW_AUTOSIZE );
    Image img;
    char key = 'a';

    // Début de la mesure du frame rate
    debut_mesure = getTimeMillis();

    while( key != 'q' ) {
        try {
            src->getFrame( img );
        }
        catch(Exception &e) {
            cout << "\n" << e.what() << endl;
            break;
        }
        /*CvScalar scalaire;
        scalaire.val[0] = 120;
        scalaire.val[1] = scalaire.val[2] = 0;
        img.colorFilter(scalaire);*/

        img.colorPaint2(top_left,bottom_right);
        if (bottom_right.x < 720) {
        	bottom_right.x++;
        }
        if (bottom_right.y < 576) {
        	bottom_right.y++;
        }
        if (top_left.x > 0) {
        	top_left.x--;
        }
        if (top_left.y > 0) {
        	top_left.y--;
        }

        //img.colorBlacknWhite();

        cvShowImage( "video", img );
        cvWriteFrame(writer,img);
        key = cvWaitKey( 10 );

        // Affichage du frame rate
        cout << "\rFrame Rate : " << setw(5);
        cout << left << setprecision(4);
        cout << calculFrameRate() << " FPS" << flush;
    }

    cout << endl;
    cvDestroyWindow( "video" );
    delete src;
    return 0;
}