void pix_filmOS :: openMess(t_symbol *filename, int format) { // if (filename==x_filename)return; x_filename=filename; if (format)m_colorspace=format; char buf[MAXPDSTRING]; canvas_makefilename(const_cast<t_canvas*>(getCanvas()), filename->s_name, buf, MAXPDSTRING); // Clean up any open files closeMess(); m_haveMovie = GEM_MOVIE_NONE; realOpen(buf); if (m_haveMovie == GEM_MOVIE_NONE)return; #ifndef __APPLE__ createBuffer(); prepareTexture(); #endif t_atom ap[3]; SETFLOAT(ap, m_numFrames); SETFLOAT(ap+1, m_xsize); SETFLOAT(ap+2, m_ysize); m_newFilm = 1; post("loaded file: %s with %d frames (%dx%d)", buf, m_numFrames, m_xsize, m_ysize); outlet_list(m_outNumFrames, 0, 3, ap); }
void BloomEffect::apply(const sf::RenderTexture& input, sf::RenderTarget& output){ prepareTexture(input.getSize()); filterBright(input, mBrightnessTexture); downSample(mBrightnessTexture, mFirstPassTextures[0]); blurMultipass(mFirstPassTextures); downSample(mFirstPassTextures[0], mSecondPassTextures[0]); blurMultipass(mSecondPassTextures); add(mFirstPassTextures[0], mSecondPassTextures[0], mFirstPassTextures[1]); mFirstPassTextures[1].display(); add(input, mFirstPassTextures[1], output); }
int main(int argc, char** argv){ printf("START\n"); int w,h,id; unsigned char* data; printf("STAGE LoadImage\n"); // load image first so that window opens with image size id = ilLoadImage("textures.jpg"); // image not loaded if (id == 0) return(2); printf("STAGE BindImage\n"); ilBindImage(id); w = ilGetInteger(IL_IMAGE_WIDTH); h = ilGetInteger(IL_IMAGE_HEIGHT); data = ilGetData(); printf("STAGE GLUT 1/2\n"); GLUTBackendInit(argc, argv); bool fullScreen = true; printf("STAGE GLUT 2/2\n"); if (!GLUTBackendCreateWindow(WINDOW_WIDTH, WINDOW_HEIGHT, 60, fullScreen, "AngTest")){ return 0x11; } printf("STAGE AngTest\n"); AngTest* aTest = new AngTest(); printf("STAGE Init\n"); if (!aTest->Init()){ return 0x12; } prepareTexture(w,h,data); //showAtt(); //just for showing off some pic data printf("STAGE Run\n"); aTest->Run(); printf("STAGE delete\n"); delete aTest; printf("STOP\n"); return 0; }
/** @brief Iterate Functional Test cases. * * @return Iteration result. */ tcu::TestCase::IterateResult TextureCubeMapArrayETC2Support::iterate(void) { prepareFramebuffer(); prepareProgram(); prepareVertexArrayObject(); prepareTexture(); draw(); if (isRenderedImageValid()) m_testCtx.setTestResult(QP_TEST_RESULT_PASS, "Pass"); else m_testCtx.setTestResult(QP_TEST_RESULT_FAIL, "Fail"); clean(); return STOP; }
EGLXTransportSurfaceClient::EGLXTransportSurfaceClient(const PlatformBufferHandle handle, const IntSize& size, bool hasAlpha) : GLTransportSurfaceClient() , m_image(0) , m_size(size) , m_totalBytes(0) { if (!handle) return; m_handle = handle; XWindowAttributes attr; if (!XGetWindowAttributes(NativeWrapper::nativeDisplay(), m_handle, &attr)) return; createTexture(); GLPlatformSurface::SurfaceAttributes sharedSurfaceAttributes = GLPlatformSurface::Default; if (hasAlpha) sharedSurfaceAttributes = GLPlatformSurface::SupportAlpha; EGLConfigSelector configSelector(sharedSurfaceAttributes); EGLConfig config = configSelector.surfaceClientConfig(XVisualIDFromVisual(attr.visual)); m_eglImage = adoptPtr(new EGLTextureFromPixmap(m_handle, hasAlpha, config)); if (!m_eglImage->isValid() || eglGetError() != EGL_SUCCESS) destroy(); if (m_eglImage) return; m_totalBytes = m_size.width() * m_size.height() * 4; #if USE(OPENGL_ES_2) m_format = GraphicsContext3D::RGBA; static bool bgraSupported = GLPlatformContext::supportsGLExtension("GL_EXT_texture_format_BGRA8888"); if (bgraSupported) m_format = GraphicsContext3D::BGRA; #endif createTexture(); prepareTexture(); }
void SimpleTextureScene::initialise() { m_funcs = m_context->functions(); m_funcs->initializeOpenGLFunctions(); // mplayer's output is definitely easier to parse than ffmpeg's... QProcess movieFileParameters; movieFileParameters.start("mplayer", QStringList() << "-identify" << "-vo" << "null" << "-ao" << "null" << "-frames" << "0" << "-vc" << "null" << "--" << m_movieFile, QIODevice::ReadOnly); movieFileParameters.waitForFinished(); QString mplayerOutput = QString::fromLocal8Bit(movieFileParameters.readAllStandardOutput()); QRegularExpression widthRe("^ID_VIDEO_WIDTH=(.*)", QRegularExpression::MultilineOption); QRegularExpressionMatch widthMatch = widthRe.match(mplayerOutput); if (widthMatch.hasMatch()) m_frameSize.setWidth(widthMatch.captured(1).toInt()); QRegularExpression heightRe("^ID_VIDEO_HEIGHT=(.*)", QRegularExpression::MultilineOption); QRegularExpressionMatch heightMatch = heightRe.match(mplayerOutput); if (heightMatch.hasMatch()) m_frameSize.setHeight(heightMatch.captured(1).toInt()); if (m_frameSize.width() <= 0 || m_frameSize.height() <= 0) qFatal("Cannot determine the input file frame size!"); qDebug() << "Got frame size:" << m_frameSize; // Set the clear color to black glClearColor( 0.0f, 0.0f, 0.0f, 1.0f ); // Prepare a complete shader program... prepareShaderProgram( ":/shaders/simpletexture.vert", ":/shaders/simpletexture.frag" ); // Prepare the vertex, texture and index buffers prepareVertexBuffers(); // Prepare the VAO prepareVertexArrayObject(); // Prepare the texture data itself (textures and pixel unpack buffer objects) prepareTexture(); // Link texture unit 0 to the "ySampler" variable in the fragment shader m_shader.setUniformValue( "ySampler", 0 ); // Link texture unit 1 to the "uvSampler" variable in the fragment shader m_shader.setUniformValue( "uvSampler", 1 ); m_videoDecoder.start("ffmpeg", QStringList() << "-i" << m_movieFile << "-f" << "rawvideo" << "-vcodec" << "rawvideo" << "-pix_fmt" << "nv12" << "-an" << "-ss" << "180" // jump to 3m << "-", QIODevice::ReadOnly); m_videoDecoder.closeWriteChannel(); m_videoDecoder.closeReadChannel(QProcess::StandardError); }
// The MAIN function, from here we start the application and run the game loop int main() { // start glfw and glew with default settings assert(start_gl()); // Build and compile our shader program Shader sphereShader("shaders/shader.vs", "shaders/shader.frag"); Shader lampShader("shaders/shader.vs", "shaders/lamp.frag"); /////// Sphere vertices, normals and indices generation ////////////////////////////////////////// std::vector<GLfloat> sphere_verts, q2Verts, cone_verts; std::vector<GLint> sphere_idx; generateCone(&cone_verts, stacks, slices); generateSphere( &sphere_verts, &q2Verts, &sphere_idx, stacks, slices, radius); std::vector<GLint> cone_idx(sphere_idx); ///////////////// DECLARATIONS //////////////////////// GLuint sphere_VBO, sphere_VAO, sphere_EBO, normal_VAO, normal_VBO, cone_VAO, cone_VBO, cone_EBO; ///////////////// GET VAO READY FOR CONE //////////////////////////////////////////////////////// GLuint aLoc[3] = {0}; GLint size[3] = {3}; GLsizei vStride[3] = {3 * sizeof(GLfloat)}; const void* vOffset[3] = {(GLvoid*)0}; prepareVAO(&cone_VAO, &cone_VBO, &cone_EBO, cone_verts, cone_idx, 1, aLoc, size, vStride, vOffset); ///////////////// GET VAO READY FOR SPHERE ////////////////////////////////////////////////////// aLoc[0] = 0; aLoc[1] = 1; aLoc[2] = 2; size[0] = size[1] = 3; size[2] = 2; vStride[0] = vStride[1] = vStride[2] = 8 * sizeof(GLfloat); vOffset[0] = (GLvoid*)0; vOffset[1] = (GLvoid*)(3 * sizeof(GLfloat)); vOffset[2] = (GLvoid*)(6 * sizeof(GLfloat)); prepareVAO(&sphere_VAO, &sphere_VBO, &sphere_EBO, sphere_verts, sphere_idx, 3, aLoc, size, vStride, vOffset); ///////////////// GET VAO READY FOR NORMALS (Q2) //////////////////////////////////////////////// aLoc[0] = 0; size[0] = 3; vStride[0] = 3 * sizeof(GLfloat); vOffset[0] = (GLvoid*)0; prepareVAO(&normal_VAO, &normal_VBO, nullptr, q2Verts, std::vector<GLint>() , 1, aLoc, size, vStride, vOffset); ///////////////// GET Textures ready //////////////////////////////////////////////////////////// GLuint texture1; int width, height, comp; prepareTexture(&texture1, "images/earth.jpg", &width, &height, &comp); ///////////////// The positions for the spheres in q4 //////////////////////////////////////////// // where the cubes will appear in the world space glm::vec3 cubePositions[] = { glm::vec3(1.5f, 0.0f, 0.0f), glm::vec3(1.0f, 0.0f, 0.0f) }; ///////////////// Uniform variables for MVP in VS ///////////////////////////////////////////////// GLint modelLoc = glGetUniformLocation(sphereShader.Program, "model"); GLint viewLoc = glGetUniformLocation(sphereShader.Program, "view"); GLint projLoc = glGetUniformLocation(sphereShader.Program, "projection"); // The question number to switch GLint q = glGetUniformLocation(sphereShader.Program, "q"); // uniforms for lighting GLint objectColorLoc = glGetUniformLocation(sphereShader.Program, "objectColor"); GLint lightColorLoc = glGetUniformLocation(sphereShader.Program, "lightColor"); GLint lightPosLoc = glGetUniformLocation(sphereShader.Program, "lightPos"); GLint viewPosLoc = glGetUniformLocation(sphereShader.Program, "viewPos"); // Main loop while (!glfwWindowShouldClose(window)) { GLfloat currentFrame = glfwGetTime(); deltaTime = currentFrame - lastFrame; lastFrame = currentFrame; // Check if any events have been activated (key pressed, mouse moved) glfwPollEvents(); do_movement(); // Clear the color buffer glClearColor(0.2f, 0.3f, 0.3f, 1.0f); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); lightPos.x = sin(glfwGetTime()) * 0.1; lightPos.y = cos(glfwGetTime()) * 0.1; drawSphere(&sphereShader, &sphere_VAO, &sphere_idx, &normal_VAO, &sphere_verts, &cone_VAO, &cone_verts, &cone_idx, &lampShader, &objectColorLoc, &lightColorLoc, &lightPosLoc, &viewPosLoc, &q, &texture1, 2, cubePositions, &modelLoc, &viewLoc, &projLoc); // Swap the screen buffers glfwSwapBuffers(window); } // Deallocate glDeleteVertexArrays(1, &sphere_VAO); glDeleteBuffers(1, &sphere_VBO); glDeleteBuffers(1, &sphere_EBO); glDeleteVertexArrays(1, &normal_VAO); glDeleteVertexArrays(1, &normal_VBO); glDeleteVertexArrays(1, &cone_VAO); glDeleteBuffers(1, &cone_VBO); glDeleteBuffers(1, &cone_EBO); // Terminate GLFW glfwDestroyWindow(window); glfwTerminate(); return EXIT_SUCCESS; }
void TglTessellator::tessellate(const TColorFunction *cf, const bool antiAliasing, TRegionOutline &outline, TRaster32P texture) { //QMutexLocker sl(m_mutex); checkErrorsByGL; glEnable(GL_TEXTURE_2D); glColor4d(1, 1, 1, 1); checkErrorsByGL; TextureInfoForGL texInfo; int pow2Lx = tcg::numeric_ops::GE_2Power((unsigned int)texture->getLx()); int pow2Ly = tcg::numeric_ops::GE_2Power((unsigned int)texture->getLy()); TAffine aff; if (texture->getLx() != pow2Lx || texture->getLy() != pow2Ly) { TRaster32P r(pow2Lx, pow2Ly); aff = TScale((double)pow2Lx / texture->getLx(), (double)pow2Ly / texture->getLy()); TRop::resample(r, texture, aff.place(texture->getCenterD(), r->getCenterD())); texture = r; glPushMatrix(); tglMultMatrix(aff.inv()); } // If GL_BRGA isn't present make a proper texture to use (... obsolete?) texture->lock(); TRasterP texImage = prepareTexture(texture, texInfo); checkErrorsByGL; if (texImage != texture) texImage->lock(); assert(texImage->getLx() == texImage->getWrap()); GLuint texId; glGenTextures(1, &texId); // Generate a texture name checkErrorsByGL; glBindTexture(GL_TEXTURE_2D, texId); // Bind it 'active' checkErrorsByGL; glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); // These must be invoked glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); // on a bound texture glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); // glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); // checkErrorsByGL; glTexEnvf(GL_TEXTURE_ENV, // This too ? GL_TEXTURE_ENV_MODE, // Better here anyway GL_MODULATE); // checkErrorsByGL; glPixelStorei(GL_UNPACK_ROW_LENGTH, 0); checkErrorsByGL; glTexImage2D(GL_TEXTURE_2D, 0, // one level only texInfo.internalformat, // pixel channels count texInfo.width, // width texInfo.height, // height 0, // border size texInfo.type, // pixel format // crappy names texInfo.format, // pixel data type // oh, SO much texImage->getRawData()); checkErrorsByGL; texture->unlock(); if (texImage != texture) texImage->unlock(); TglTessellator::GLTess glTess; gluTessCallback(glTess.m_tess, GLU_TESS_VERTEX, (GluCallback)tessellateTexture); checkErrorsByGL; //------------------------// if (aff != TAffine()) doTessellate(glTess, cf, antiAliasing, outline, aff); // Tessellate & render else doTessellate(glTess, cf, antiAliasing, outline); // Tessellate & render checkErrorsByGL; //------------------------// if (aff != TAffine()) glPopMatrix(); glDeleteTextures(1, &texId); // Delete & unbind texture checkErrorsByGL; glDisable(GL_TEXTURE_2D); checkErrorsByGL; }
///////////////////////////////////////////////////////// // really open the file ! (OS dependent) // ///////////////////////////////////////////////////////// void pix_movieDS::realOpen(char *filename) { WCHAR WideFileName[MAXPDSTRING]; HRESULT RetVal; AM_MEDIA_TYPE MediaType; BOOL bFrameTime = TRUE; GUID Guid; // Convert c-string to Wide string. memset(&WideFileName, 0, MAXPDSTRING * 2); if (0 == MultiByteToWideChar(CP_ACP, 0, filename, strlen(filename), WideFileName, MAXPDSTRING)) { error("Unable to load %s", filename); return; } // Add a file source filter to the filter graph. RetVal = FilterGraph->AddSourceFilter(WideFileName, L"SOURCE", &VideoFilter); if (RetVal != S_OK || NULL == VideoFilter) { error("Unable to render %s", filename); return; } // Create an instance of the sample grabber filter. The filter allows frames to be // buffered from a video source. RetVal = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&SampleFilter); if (RetVal != S_OK || NULL == SampleFilter) { error("Unable to create SampleFilter interface %d", RetVal); return; } // Add sample grabber filter to the filter graph. RetVal = FilterGraph->AddFilter(SampleFilter, L"Sample Grabber"); if (RetVal != S_OK) { error("Unable to add SampleFilter %d", RetVal); return; } // Find an interface to the SampleGrabber from the SampleGrabber filter. The // SampleGrabber allows frames to be grabbed from the filter. SetBufferSamples(TRUE) // tells the SampleGrabber to buffer the frames. SetOneShot(FALSE) tells the // SampleGrabber to continuously grab frames. has GetCurrentBuffer() method RetVal = SampleFilter->QueryInterface(IID_ISampleGrabber, (void **)&SampleGrabber); if (RetVal != S_OK || NULL == SampleGrabber) { error("Unable to create SampleGrabber interface %d", RetVal); return; } // Set the media type that the SampleGrabber wants. // MEDIATYPE_Video selects only video and not interleaved audio and video // MEDIASUBTYPE_RGB24 is the colorspace and format to deliver frames // MediaType.formattype is GUID_NULLsince it is handled later to get file info memset(&MediaType, 0, sizeof(AM_MEDIA_TYPE)); MediaType.majortype = MEDIATYPE_Video; MediaType.subtype = MEDIASUBTYPE_RGB24; MediaType.formattype = GUID_NULL; RetVal = SampleGrabber->SetMediaType(&MediaType); // Set the SampleGrabber to return continuous frames RetVal = SampleGrabber->SetOneShot(FALSE); if (RetVal != S_OK) { error("Unable to setup sample grabber %d", RetVal); return; } // Set the SampleGrabber to copy the data to a buffer. This only set to FALSE when a // callback is used. RetVal = SampleGrabber->SetBufferSamples(TRUE); if (RetVal != S_OK) { error("Unable to setup sample grabber %d", RetVal); return; } // Create the Null Renderer interface. The Null Renderer is used to disable rendering of a // video stream to a window. RetVal = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&NullFilter); if (RetVal != S_OK || NULL == NullFilter) { error("Unable to create NullFilter interface %d", RetVal); return; } // Add the Null Renderer filter to the FilterGraph RetVal = FilterGraph->AddFilter(NullFilter, L"NullRenderer"); if (RetVal != S_OK) { error("Unable to add NullFilter %d", RetVal); return; } // DS filter chain is FileSource -> SampleGrabber -> NullRenderer // DS can put any neeeded filters in the chain for format or colorspace conversion // decompression or other transforms // Connect the SampleFilter to the VideoFilter RetVal = movieConnectFilters(FilterGraph, VideoFilter, SampleFilter); if (RetVal != S_OK) { error("Unable to connect filters %d", RetVal); return; } // Connect the NullFilter to the SampleFilter RetVal = movieConnectFilters(FilterGraph, SampleFilter, NullFilter); if (RetVal != S_OK) { error("Unable to connect filters %d", RetVal); return; } // Set the time format to frames Guid = TIME_FORMAT_FRAME; RetVal = MediaSeeking->SetTimeFormat(&Guid); if (RetVal != S_OK) { // If frame time format not available, default to 100 nanosecond increments. bFrameTime = FALSE; Guid = TIME_FORMAT_MEDIA_TIME; RetVal = MediaSeeking->SetTimeFormat(&Guid); if (RetVal != S_OK) { error("Unable to set video time format %d", RetVal); return; } } // Get the duration of the video. Format will be in previously set time format. This is // compatible with the value returned from GetCurrentPosition RetVal = MediaSeeking->GetDuration(&m_Duration); if (RetVal != S_OK) { error("Unable to get video duration %d", RetVal); return; } // Set the number of frames based on the time format used. if (TRUE == bFrameTime) { m_numFrames = m_Duration; } else { LONGLONG OutFormat; GUID OutGuid; OutGuid = TIME_FORMAT_FRAME; Guid = TIME_FORMAT_MEDIA_TIME; //converts from 100 nanosecond format to number of frames MediaSeeking->ConvertTimeFormat(&OutFormat, &OutGuid, m_Duration, &Guid); m_numFrames = OutFormat; } // Get the format of the connected media. RetVal = SampleGrabber->GetConnectedMediaType(&MediaType); if (RetVal != S_OK) { error("Unable to get media type %d", RetVal); return; } // The SampleGrabber will only return video of the the 'FORMAT_VideoInfo' type. if (FORMAT_VideoInfo == MediaType.formattype && MediaType.pbFormat != NULL) { // Format returned is specific to the formattype. VIDEOINFOHEADER *VideoInfo = (VIDEOINFOHEADER *)MediaType.pbFormat; // Get size of the image from the BitmapInfoHeader returned in the VIDEOINFOHEADER. m_xsize = VideoInfo->bmiHeader.biWidth; m_ysize = VideoInfo->bmiHeader.biHeight; m_csize = 3; } else { error("Invalid media type returned %s", filename); return; } // Allocate video buffer if valid sizes returned. if (m_xsize > 0 && m_ysize > 0 && m_csize > 0) { if (m_frame != NULL) { delete [] m_frame; } m_frame = new BYTE[m_xsize * m_ysize * m_csize]; if (NULL == m_frame) { error("Unable to allocate memory for the video buffer %s", filename); return; } } // Release the MediaType.pbFormat data FreeMediaType(MediaType); IBaseFilter *DVFilter; // If DV video is used, set the quality to 720 x 480. RetVal = FilterGraph->FindFilterByName(L"DV Video Decoder", &DVFilter); if (S_OK == RetVal && DVFilter != NULL) { IIPDVDec *IPDVDec; // Find the IIPDVDec interface RetVal = DVFilter->QueryInterface(IID_IIPDVDec, (void **)&IPDVDec); if (S_OK == RetVal && IPDVDec != NULL) { // Set the property to DVRESOLUTION_FULL IPDVDec->put_IPDisplay(DVRESOLUTION_FULL); // Release the interface IPDVDec->Release(); } // Release the interface DVFilter->Release(); } post("xsize %d ysize %d csize %",m_xsize, m_ysize, m_csize); // Setup the pixBlock data based on the media type. // this is a guess at the fast past for pixels on Windows m_pixBlock.image.xsize = m_xsize; m_pixBlock.image.ysize = m_ysize; m_pixBlock.image.csize = m_csize; m_pixBlock.image.format = GL_BGR_EXT; m_pixBlock.image.type = GL_UNSIGNED_BYTE; // Start the video stream RetVal = MediaControl->Run(); if (RetVal != S_OK && RetVal != S_FALSE) { error("Unable to start video %d", RetVal); return; } // Wait for the video to begin playing. while (TRUE) { OAFilterState FilterState; // Get the state and ensure it's not in an intermediate state RetVal = MediaControl->GetState(0, &FilterState); if (RetVal != S_OK && RetVal != VFW_S_STATE_INTERMEDIATE) { error("Unable to run video %d", RetVal); return; } // Ensure the video is running else if (RetVal == S_OK && State_Running == FilterState) { break; } } // Sets the tex coords prepareTexture(); // Set the last frame to -1 so it will show the first frame. m_LastFrame = -1; m_haveMovie = TRUE; }