void draw() { drawVideo(0 ,0 ,textures[0],clips[0]); drawVideo(400,0 ,textures[1],clips[1]); drawVideo(0 ,300,textures[2],clips[2]); drawVideo(400,300,textures[3],clips[3]); }
//-------------------------------------------------------------- void testApp::draw(){ if(streamer.wantsNewFrame()) { streamer.beginGrab(); drawVideo(); streamer.endGrab(); } drawVideo(); }
void CAR::ARDraw(unsigned char * dat,int len) { if(!bAR)return; //控制AR是否显示 if(len>1200)return; for(int i=0; i<AR_WIDTH && i<len;i++) { video_data[i]=dat[i]>gate?dat[i]:gate; } drawVideo(ARcolor);//画线 hasLine1 = true; }
void CAR::ARDraw() { VIDEODATATYPE * p; //fprintf(debugfp,"bAR=%s\n",(bAR? "true":"false")); //fprintf(debugfp,"gate=%d\n",gate); if(!bAR)return; //控制AR是否显示 videoCount += 1; if(videoCount<oncevn)return;// 视频数目不够,直接返回 videoCount %= oncevn; p = dispdat->videodata; float tmp = 0; /* if(distance == range) {*/ for(int i=0;i<AR_WIDTH;i++) { video_data[i]=*(p+(int)tmp)>gate?*(p+(int)tmp):gate; tmp += 2 * (float)distance/range; // p+=int(tmp);// 抽取 } //} //else //{ // int tmp[SAMPLE_NUM]; // for(int i=0;i<AR_WIDTH;i++) // { // tmp[i]=*p; // p+=2;// 抽取 // } // for(int i=0;i<AR_WIDTH;i++) // { // video_data[i]=tmp[int((float)i/range*distance)]; // } //} drawVideo(ARcolor);//画线 hasLine1 = true; }
// OpenGL callback which updates all video images on the screen void display(void) { glClear(GL_COLOR_BUFFER_BIT); glPushMatrix(); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(0.0, 640.0, 0.0, 241.0, 1.0, -1.0); // Window size is 640x241 glMatrixMode(GL_MODELVIEW); glLoadIdentity(); // Update each video stream on the display for (size_t i = 0; i < streamVector.size(); ++i) { drawVideo(streamVector[i]); } glPopMatrix(); glutSwapBuffers(); glutPostRedisplay(); }
void NavigationGUI::display() { glClearColor(1, 1, 1, 0); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); drawVideo(); if (displayOverlay) { drawFeedStatus(); drawGPS(); if(displayTilt) { drawTilt(); } drawBattery(); drawSignal(); drawUltrasonic(); drawVolts(); drawADC(); drawLidarTilt(); } glutSwapBuffers(); }
void MyGlWindow::draw() { // the draw method must be private glClearColor(0,0,0,0); // clear the window to black glClear(GL_COLOR_BUFFER_BIT); // clear the window showAndHide(); glDisable(GL_LIGHTING); switch (_windowMode) { case MODE_VIDEO: drawVideo(false); break; case MODE_3D_MAP: glEnable(GL_LIGHTING); draw3DMap(); break; case MODE_2D_MAP: draw2DMap(); break; case MODE_SPHERE: drawSphere(); break; case MODE_CALIBRATION: drawCalibration(); break; case MODE_ROTATIONS: drawRotations(); break; default: exit(0); } //if ( snapshot->value() ) // drawSnapshotScreen(); //redraw(); };
void Galvanic::draw(){ if(video.isLoaded()) drawVideo(); drawMeasures(); }
void playVideo(const string &file) { //myFillRect(screen, NULL, 0); //SDL_GL_SwapBuffers(); SMPEG *mpeg; SMPEG_Info info; mpeg = SMPEG_new(file.c_str(), &info, false); bool hasAudio = (info.has_audio > 0); if ( SMPEG_error(mpeg) ) { dout << "MPEG error: " << SMPEG_error(mpeg) << endl; exit(1); } int done = 0; SDL_Surface *videoSurface = SDL_AllocSurface( SDL_SWSURFACE, nearestPow2(info.width), nearestPow2(info.height), 32, 0x000000FF, 0x0000FF00, 0x00FF0000, 0xFF000000 ); if ( !videoSurface ) { dout << "Failed to allocate memory for video playback" << endl; exit(1); } SMPEG_enablevideo(mpeg, 1); SDL_mutex *mutex = SDL_CreateMutex(); SMPEG_setdisplay(mpeg, videoSurface, mutex, videoUpdate ); SMPEG_scaleXY(mpeg, info.width, info.height); //SMPEG_setdisplayregion(mpeg, 0, 0, info.width, info.height); //SMPEG_setdisplay(mpeg, screen, NULL, update); if(hasAudio) { SDL_AudioSpec audiofmt; Uint16 format; int freq, channels; /* Tell SMPEG what the audio format is */ Mix_QuerySpec(&freq, &format, &channels); audiofmt.format = format; audiofmt.freq = freq; audiofmt.channels = channels; SMPEG_actualSpec(mpeg, &audiofmt); /* Hook in the MPEG music mixer */ Mix_HookMusic(SMPEG_playAudioSDL, mpeg); SMPEG_enableaudio(mpeg, 1); SMPEG_setvolume(mpeg, 100); } else { Mix_PauseMusic(); SMPEG_enableaudio(mpeg, 0); } glBlendFunc(GL_ONE, GL_ZERO); glEnable(GL_TEXTURE_2D); SMPEG_play(mpeg); while( !done && SMPEG_status( mpeg ) == SMPEG_PLAYING ) { SDL_Event event; while ( SDL_PollEvent(&event) ) { switch (event.type) { case SDL_KEYDOWN: { if ( event.key.keysym.sym == SDLK_SPACE ) { done = 1; } break; } case SDL_QUIT: { exit(1); } default: break; } } if(drawVideoFrame) { SDL_mutexP(mutex); drawVideoFrame = false; drawVideo(videoSurface, info.width, info.height); //printf("draw in %i\n", time(NULL)); SDL_mutexV(mutex); SDL_GL_SwapBuffers(); } } SMPEG_stop(mpeg); if(hasAudio) { Mix_HookMusic(NULL, NULL); } else { Mix_ResumeMusic(); } SDL_FreeSurface(videoSurface); myFillRect(screen, NULL, 0); SDL_GL_SwapBuffers(); SMPEG_delete(mpeg); }
void Scene::paintGL() { glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT ); glMatrixMode( GL_PROJECTION ); glLoadIdentity(); glOrtho( 0, RESOLUTION_WIDTH, 0, RESOLUTION_HEIGHT, 1, 1000 ); glMatrixMode( GL_MODELVIEW ); glLoadIdentity(); // Inicio: Gráfico de cámara glEnable( GL_TEXTURE_2D ); glColor3f( 1, 1, 1 ); glBindTexture( GL_TEXTURE_2D, textures->at( 0 )->id ); glBegin( GL_QUADS ); glTexCoord2f( 0, 0 ); glVertex3f( 0, RESOLUTION_HEIGHT, -999 ); glTexCoord2f( 1, 0 ); glVertex3f( RESOLUTION_WIDTH, RESOLUTION_HEIGHT, -999 ); glTexCoord2f( 1, 1 ); glVertex3f( RESOLUTION_WIDTH, 0, -999 ); glTexCoord2f( 0, 1 ); glVertex3f( 0, 0, -999 ); glEnd(); glDisable( GL_TEXTURE_2D ); // Fin: Gráfico de cámara glMatrixMode( GL_PROJECTION ); double projectionMatrix[16]; cv::Size2i sceneSize( RESOLUTION_WIDTH, RESOLUTION_HEIGHT ); cv::Size2i openGlSize( RESOLUTION_WIDTH, RESOLUTION_HEIGHT ); cameraParameters->glGetProjectionMatrix( sceneSize, openGlSize, projectionMatrix, 0.05, 10 ); glLoadMatrixd( projectionMatrix ); glMatrixMode( GL_MODELVIEW ); double modelview_matrix[16]; // Inicio: Gráfico en marcadores bool musicDetected = false; ThreeDWriter textoParaCredencial(80, "Arial", -0.04f, 0.045f, -0.001f, 0.3f, 0.02f, 0.0001f); QString comando_sql; QString textoParaMostrar; QVector<QStringList> vDatos; for( int i = 0 ; i < detectedMarkers.size() ; i++ ) { detectedMarkers.operator []( i ).glGetModelViewMatrix( modelview_matrix ); glLoadMatrixd( modelview_matrix ); glTranslatef( xTranslationOption, yTranslationOption, zTranslationOption ); if( rotateOption ) { emit message( "Rotando" ); zRotationOption += rotationVelocityOption; } glRotatef( xRotationOption, 1, 0, 0 ); glRotatef( yRotationOption, 0, 1, 0 ); glRotatef( zRotationOption, 0, 0, 1 ); /* ||**************************************************************************************************************|| || || || MÉTODOS LISTOS PARA USAR EN MARCADORES: || || || || DrawSheet - DrawBox - DrawCamera - DrawCameraBox - DrawModel - DrawVideo - DrawWebImage - ListenWebMusic || || Siempre agregar "+ scaleOption" al parámetro percentage para que la escala pueda ser modificada || || || || *************************************************************************************************************|| || || || RECORDAR: Ajustar la escala que se envía como parámetro ( percentage ). || || IMPORTANTE: Para que los modelos se vean bien, las imagenes y el sonido, agregar antes de usar: || || || || DrawBox: glRotatef( -90, 0, 1, 0 ); || || DrawCamera: glRotatef( 90, 0, 1, 0 ); || || DrawCameraBox: glRotatef( 90, 0, 1, 0 ); || || DrawSheet: glRotatef( 90, 0, 1, 0 ); || || DrawWebImage: glRotatef( 180, 0, 0, 1 ); glRotatef( -90, 0, 1, 0 ); || || DrawVideo: glRotatef( 90, 0, 1, 0 ); glRotatef( 180, 0, 0, 1 ); || || ListenWebMusic: glRotatef( 180, 0, 0, 1 ); drawWebImage(); || || musicDetected = true; webMusicPlayer->setVolume( detectedMarkers.at( i ).getArea() / 1000 ); || || || || House: glRotatef( -90, 0, 1, 0 ); || || IPhone: glTranslatef( 0.032, 0, -0.025 ); glRotatef( 90, 1, 0, 0 ); || || Man: glTranslatef( 0, 0, -0.012 ); glRotatef( -90, 1, 0, 0 ); glRotatef( 90, 0, 0, 1 ); || || Oil: glRotatef( 90, 1, 0, 0 ); || || Wagen: glRotatef( 90, 1, 0, 0 ); glRotatef( 90, 0, 0, 1 ); || || Woman: glRotatef( 90, 1, 0, 0 ); glRotatef( 90, 0, 0, 1 ); || || || || *************************************************************************************************************|| */ switch( detectedMarkers.at( i ).id ) { case 0: { glRotatef( 270, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); drawCamera( 130 + scaleOption ); emit message( "Dibujando Cámara" ); break; } case 1: { glRotatef( 90, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); drawCameraBox( 130 + scaleOption ); emit message( "Dibujando Cubo de la Cámara" ); break; } case 2: { glRotatef( 90+180, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); drawWebImage( 150 + scaleOption ); break; } case 3: { glRotatef( -90, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); drawWebMusicImage( 150 + scaleOption ); musicDetected = true; int volume = detectedMarkers.at( i ).getArea() / 150; if( volume > 100 ) volume = 100; webMusicPlayer->setVolume( volume ); emit message( "Volumen audio: " + QString::number( volume ) ); listenWebMusic(); break; } case 4: { glRotatef( 90, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); drawBox( "Danger.jpg", 100 + scaleOption ); emit message( "Dibujando la Famosa Caja Danger" ); break; } case 5: { glRotatef( 180, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); glTranslatef( -0.03, 0.05, 0); drawModel( "IPhone.3ds", 70 + scaleOption ); emit message( "Dibujando Modelo 3d: IPhone" ); break;} case 6: { glTranslatef( 0, 0.02, 0 ); glRotatef( -90, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); glRotatef( 90+180, 1, 0, 0 ); drawModel( "Man.3ds", 3 + scaleOption ); emit message( "Dibujando Modelo 3d: Man" ); break; } case 7: { glRotatef( 90, 0, 1, 0 ); glRotatef( 90, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); drawModel( "Oil.3ds", 150 + scaleOption ); emit message( "Dibujando Modelo 3d: Oil" ); break; } case 8: { glRotatef( 270, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); int volume = detectedMarkers.at( i ).getArea() / 150; if( volume > 100 ) volume = 100; emit message( "Volumen video: " + QString::number( volume ) ); drawVideo( "trailer-RF7.mp4", volume, 200 + 2*scaleOption ); break; } case 9: { glRotatef( 90, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); drawBox( "Ubp.png", 150 + scaleOption ); emit message( "Dibujando Cubo de la UBP" ); break; } case 10: { if ( ! seguirRostroOption) actualizaServoSiCorresponde(detectedMarkers.at( i ).getCenter().x, 50); glBegin(GL_LINES); glColor3f (1, 0, 0); glVertex3f( 0, 0, 0); glVertex3f( 0, 100, 0); glColor3f (0, 0, 1); glVertex3f( 0, 0, 0); glVertex3f( -100, 0, 0); glEnd(); emit message( "Control de Servo" ); break; } case 11: { glRotatef( 270, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); int volume = detectedMarkers.at( i ).getArea() / 150; if( volume > 100 ) volume = 100; emit message( "Volumen video: " + QString::number( volume ) ); drawVideo( "stic.mp4", volume, 200 + 2*scaleOption ); break; } case 12: { glRotatef( 270, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); int volume = detectedMarkers.at( i ).getArea() / 150; if( volume > 100 ) volume = 100; emit message( "Volumen video: " + QString::number( volume ) ); drawVideo( "trailer-relato.mp4", volume, 200 + 2*scaleOption ); break; } case 13: comando_sql = "SELECT sorteo FROM invitados WHERE apellido = 'House'"; vDatos = adminDB.ejecutarComandoSelect(comando_sql); for (int i=0 ; i<vDatos.size() ; i++) { textoParaMostrar = vDatos.at(i).at(0); } glRotatef( 90, 1, 0, 0 ); glRotatef( 270, 0, 1, 0 ); glTranslatef( 0, 0, 0.005 ); drawSheet( "drhouse.jpg", 130 + scaleOption ); glLoadMatrixd( modelview_matrix ); // Cargamos la matriz inicial glRotatef( 90, 0, 0, 1 ); glRotated(180, 0, 0, 5); textoParaCredencial.writeText(textoParaMostrar); break; case 14: comando_sql = "SELECT sorteo FROM invitados WHERE apellido = 'Messi'"; vDatos = adminDB.ejecutarComandoSelect(comando_sql); for (int i=0 ; i<vDatos.size() ; i++) { textoParaMostrar = vDatos.at(i).at(0); } glRotatef( 90, 1, 0, 0 ); glRotatef( 270, 0, 1, 0 ); glTranslatef( 0, 0, 0.005 ); drawSheet( "messi.png", 130 + scaleOption ); glLoadMatrixd( modelview_matrix ); // Cargamos la matriz inicial glRotatef( 90, 0, 0, 1 ); glRotated(180, 0, 0, 5); textoParaCredencial.writeText(textoParaMostrar); break; case 17: { glRotatef( -90, 0, 0, 1 ); glRotatef( 90, 1, 0, 0 ); drawModel( "House.3ds", 40 + scaleOption ); emit message( "Dibujando Modelo 3d: Casa" ); break; } case 15: { glRotatef( 90, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); drawBox( "caja.bmp", 100 + scaleOption ); emit message( "Dibujando Cubo de la UBP" ); break; } case 16: { glRotatef( 90, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); drawBox( "caja.bmp", 100 + scaleOption ); emit message( "Dibujando Cubo de la UBP" ); break; } case 18: { glRotatef( 90, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); drawBox( "caja.bmp", 100 + scaleOption ); emit message( "Dibujando Cubo de la UBP" ); break; } case 19: { glRotatef( 90, 1, 0, 0 ); glRotatef( 90, 0, 1, 0 ); drawBox( "caja.bmp", 100 + scaleOption ); emit message( "Dibujando Cubo de la UBP" ); break; } default: { break; } } } if( musicDetected ) musicActive = true; else if( musicActive ) { emit message( "<div style=\"color:red;\">Marcador no detectado, la música se pausará</div>" ); webMusicPlayer->setVolume( webMusicPlayer->volume() - 1 ); if( webMusicPlayer->volume() <= 0 ) { emit message( "Musica pausada" ); webMusicPlayer->pause(); musicActive = false; } } // La siguiente linea se ejecuta siempre. Habria que ingeniarsela de otra forma para bajar el volumen decreaseVideosVolume(); // Fin: Gráfico en marcadores glFlush(); }
int main(int argc, char *argv[]) { AVCodecContext* video_dec_ctx = NULL; AVCodec* video_dec = NULL; AVPacket pkt; AVFrame *frame = NULL; int read_eos = 0; int decode_count = 0; int render_count = 0; int video_stream_index = -1, i; uint8_t *frame_copy = NULL; FILE *dump_yuv = NULL; // parse command line parameters process_cmdline(argc, argv); if (!input_file) { ERROR("no input file specified\n"); return -1; } // libav* init av_register_all(); // open input file AVFormatContext* pFormat = NULL; if (avformat_open_input(&pFormat, input_file, NULL, NULL) < 0) { ERROR("fail to open input file: %s by avformat\n", input_file); return -1; } if (avformat_find_stream_info(pFormat, NULL) < 0) { ERROR("fail to find out stream info\n"); return -1; } av_dump_format(pFormat,0,input_file,0); // find out video stream for (i = 0; i < pFormat->nb_streams; i++) { if (pFormat->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { video_dec_ctx = pFormat->streams[i]->codec; video_stream_index = i; break; } } ASSERT(video_dec_ctx && video_stream_index>=0); // open video codec video_dec = avcodec_find_decoder(video_dec_ctx->codec_id); video_dec_ctx->coder_type = render_mode ? render_mode -1 : render_mode; // specify output frame type if (avcodec_open2(video_dec_ctx, video_dec, NULL) < 0) { ERROR("fail to open codec\n"); return -1; } // decode frames one by one av_init_packet(&pkt); while (1) { if(read_eos == 0 && av_read_frame(pFormat, &pkt) < 0) { read_eos = 1; } if (read_eos) { pkt.data = NULL; pkt.size = 0; } if (pkt.stream_index == video_stream_index) { frame = av_frame_alloc(); int got_picture = 0,ret = 0; ret = avcodec_decode_video2(video_dec_ctx, frame, &got_picture, &pkt); if (ret < 0) { // decode fail (or decode finished) DEBUG("exit ...\n"); break; } if (read_eos && ret>=0 && !got_picture) { DEBUG("ret=%d, exit ...\n", ret); break; // eos has been processed } decode_count++; if (got_picture) { switch (render_mode) { case 0: // dump raw video frame to disk file case 1: { // draw raw frame data as texture // assumed I420 format int height[3] = {video_dec_ctx->height, video_dec_ctx->height/2, video_dec_ctx->height/2}; int width[3] = {video_dec_ctx->width, video_dec_ctx->width/2, video_dec_ctx->width/2}; int plane, row; if (render_mode == 0) { if (!dump_yuv) { char out_file[256]; sprintf(out_file, "./dump_%dx%d.I420", video_dec_ctx->width, video_dec_ctx->height); dump_yuv = fopen(out_file, "ab"); if (!dump_yuv) { ERROR("fail to create file for dumped yuv data\n"); return -1; } } for (plane=0; plane<3; plane++) { for (row = 0; row<height[plane]; row++) fwrite(frame->data[plane]+ row*frame->linesize[plane], width[plane], 1, dump_yuv); } } else { // glTexImage2D doesn't handle pitch, make a copy of video data frame_copy = malloc(video_dec_ctx->height * video_dec_ctx->width * 3 / 2); unsigned char* ptr = frame_copy; for (plane=0; plane<3; plane++) { for (row=0; row<height[plane]; row++) { memcpy(ptr, frame->data[plane]+row*frame->linesize[plane], width[plane]); ptr += width[plane]; } } drawVideo((uintptr_t)frame_copy, 0, video_dec_ctx->width, video_dec_ctx->height, 0); } } break; case 2: // draw video frame as texture with drm handle case 3: // draw video frame as texture with dma_buf handle drawVideo((uintptr_t)frame->data[0], render_mode -1, video_dec_ctx->width, video_dec_ctx->height, (uintptr_t)frame->data[1]); break; default: break; } render_count++; } } } if (frame) av_frame_free(&frame); if (frame_copy) free(frame_copy); if (dump_yuv) fclose(dump_yuv); deinit_egl(); PRINTF("decode %s ok, decode_count=%d, render_count=%d\n", input_file, decode_count, render_count); return 0; }
void Optical::draw(){ if(video.isLoaded()) drawVideo(); drawMeasures(); }