void SoundManager::poll() { static ticks_t lastUpdate = 0; ticks_t now = g_clock.millis(); if(now - lastUpdate < POLL_DELAY) return; lastUpdate = now; ensureContext(); for(auto it = m_sources.begin(); it != m_sources.end();) { SoundSourcePtr source = *it; source->update(); if(!source->isPlaying()) it = m_sources.erase(it); else ++it; } for(auto it : m_channels) { it.second->update(); } if(m_context) { alcProcessContext(m_context); } }
void Mesh::renderShadowMap( const Matrix4f& model, const Matrix4f& view, const Matrix4f& projection, bool useRawZ) const { ensureContext(); ensureUpdatedVertexArray(); m_vertexArray.bind(); const Shader& shader = BurnShaders::getShader(BurnShaders::VSM); shader.setUniform("gModelMatrix", model); shader.setUniform("gViewMatrix", view); shader.setUniform("gProjectionMatrix", projection); shader.setUniform("gUseRawZ", useRawZ); shader.activate(); if(m_renderTechnique == PLAIN){ glDrawArrays( GL_TRIANGLES, 0, m_vertexCount); }else{ m_indexBuffer.bind(GL_ELEMENT_ARRAY_BUFFER); glDrawElements( GL_TRIANGLES, m_indexCount, GL_UNSIGNED_SHORT, (void*)0); } m_vertexArray.unbind(); }
SoundSourcePtr SoundManager::play(std::string filename, float fadetime, float gain) { if(!m_audioEnabled) return nullptr; ensureContext(); if(gain == 0) gain = 1.0f; filename = resolveSoundFile(filename); SoundSourcePtr soundSource = createSoundSource(filename); if(!soundSource) { g_logger.error(stdext::format("unable to play '%s'", filename)); return nullptr; } soundSource->setName(filename); soundSource->setRelative(true); soundSource->setGain(gain); if(fadetime > 0) soundSource->setFading(StreamSoundSource::FadingOn, fadetime); soundSource->play(); m_sources.push_back(soundSource); return soundSource; }
void SoundManager::terminate() { ensureContext(); for(auto it = m_streamFiles.begin(); it != m_streamFiles.end();++it) { auto& future = it->second; future.wait(); } m_streamFiles.clear(); m_sources.clear(); m_buffers.clear(); m_channels.clear(); m_audioEnabled = false; alcMakeContextCurrent(nullptr); if(m_context) { alcDestroyContext(m_context); m_context = nullptr; } if(m_device) { alcCloseDevice(m_device); m_device = nullptr; } }
SoundChannelPtr SoundManager::getChannel(int channel) { ensureContext(); if(!m_channels[channel]) m_channels[channel] = SoundChannelPtr(new SoundChannel(channel)); return m_channels[channel]; }
Decode_Status VaapiDecoderH264::decodeCurrentPicture() { Decode_Status status; if (!m_currentPicture) return DECODE_SUCCESS; status = ensureContext(m_currentPicture->m_pps); if (status != DECODE_SUCCESS) goto error; if (!markingPicture(m_currentPicture)) goto error; if (!m_currentPicture->decodePicture()) goto error; if (!storeDecodedPicture(m_currentPicture)) goto error; return DECODE_SUCCESS; error: delete m_currentPicture; m_currentPicture = NULL; return DECODE_FAIL; }
void QVGEGLWindowSurfaceDirect::beginPaint(QWidget *widget) { QEglContext *context = ensureContext(widget); if (context) { context->makeCurrent(windowSurface); isPaintingActive = true; } }
GLContext *GLContext::create(const ContextSettings &settings, const WindowBase *owner, uint32 bpp) { ensureContext(); GLContext *context = new ContextBase(sharedContext, settings, owner, bpp); context->init(); return context; }
GLContext *GLContext::create(const ContextSettings &settings, uint32 width, uint32 height) { ensureContext(); GLContext *context = new ContextBase(sharedContext, settings, width, height); context->init(); return context; }
VertexArray::~VertexArray() { if(--(*m_count) == 0){ ensureContext(); for(std::map<void*, ThreadId>::iterator it = m_threadIds.begin(); it != m_threadIds.end(); ++it){ glDeleteVertexArrays( 1, &(it->second.id)); } delete m_count; } }
GlContext* GlContext::create(const ContextSettings& settings, const WindowImpl* owner, unsigned int bitsPerPixel) { // Make sure that there's an active context (context creation may need extensions, and thus a valid context) ensureContext(); // Create the context GlContext* context = new ContextType(sharedContext, settings, owner, bitsPerPixel); context->initialize(); return context; }
GlContext* GlContext::create(const ContextSettings& settings, unsigned int width, unsigned int height) { // Make sure that there's an active context (context creation may need extensions, and thus a valid context) ensureContext(); // Create the context GlContext* context = new ContextType(sharedContext, settings, width, height); context->initialize(); return context; }
void SoundManager::stopAll() { ensureContext(); for(const SoundSourcePtr& source : m_sources) { source->stop(); } for(auto it : m_channels) { it.second->stop(); } }
void SoundManager::setAudioEnabled(bool enable) { if(m_audioEnabled == enable) return; m_audioEnabled = enable; if(!enable) { ensureContext(); for(const SoundSourcePtr& source : m_sources) { source->stop(); } } }
Decode_Status VaapiDecoderH264::decodeSlice(H264NalUnit * nalu) { Decode_Status status; VaapiPictureH264 *picture; VaapiSliceH264 *slice = NULL; H264SliceHdr *sliceHdr; H264SliceHdr tmpSliceHdr; H264ParserResult result; /* parser the slice header info */ memset((void *) &tmpSliceHdr, 0, sizeof(tmpSliceHdr)); result = h264_parser_parse_slice_hdr(&m_parser, nalu, &tmpSliceHdr, true, true); if (result != H264_PARSER_OK) { status = getStatus(result); goto error; } /* check info and reset VA resource if necessary */ status = ensureContext(tmpSliceHdr.pps); if (status != DECODE_SUCCESS) return status; /* construct slice and parsing slice header */ slice = new VaapiSliceH264(m_VADisplay, m_VAContext, nalu->data + nalu->offset, nalu->size); sliceHdr = &(slice->m_sliceHdr); memcpy((void *) sliceHdr, (void *) &tmpSliceHdr, sizeof(*sliceHdr)); if (isNewPicture(nalu, sliceHdr)) { status = decodePicture(nalu, sliceHdr); if (status != DECODE_SUCCESS) goto error; } if (!fillSlice(slice, nalu)) { status = DECODE_FAIL; goto error; } m_currentPicture->addSlice((VaapiSlice *) slice); return DECODE_SUCCESS; error: if (slice) delete slice; return status; }
void visitArea(const utymap::entities::Area& area) { Style style = context_.styleProvider.forElement(area, context_.quadKey.levelOfDetail); // NOTE this might happen if relation contains not a building if (!isBuilding(style)) return; bool justCreated = ensureContext(area); polygon_->addContour(toPoints(area.coordinates)); build(area, style); completeIfNecessary(justCreated); }
VertexArray::ThreadId* VertexArray::getThreadId() const { if(m_threadIds.find(Thread::current()) == m_threadIds.end()){ ThreadId tid; ensureContext(); glGenVertexArrays( 1, &tid.id); m_threadIds[Thread::current()] = tid; } return &m_threadIds[Thread::current()]; }
void QVGEGLWindowSurfaceVGImage::endPaint (QWidget *widget, const QRegion& region, QImage *image) { Q_UNUSED(region); Q_UNUSED(image); QEglContext *context = ensureContext(widget); if (context) { if (backBufferSurface != EGL_NO_SURFACE) { if (isPaintingActive) vgFlush(); context->lazyDoneCurrent(); } isPaintingActive = false; } }
void Sprite::render(const Shader& shader) const { // We need an OpenGL context ensureContext(); // Our data has to be uploaded ensureUpdatedVertexArray(); // Use shader shader.activate(); // Render m_vertexArray.bind(); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); m_vertexArray.unbind(); }
void SoundManager::poll() { static ticks_t lastUpdate = 0; ticks_t now = g_clock.millis(); if(now - lastUpdate < POLL_DELAY) return; lastUpdate = now; ensureContext(); for(auto it = m_streamFiles.begin(); it != m_streamFiles.end();) { StreamSoundSourcePtr source = it->first; auto& future = it->second; if(future.is_ready()) { SoundFilePtr sound = future.get(); if(sound) source->setSoundFile(sound); else source->stop(); it = m_streamFiles.erase(it); } else { ++it; } } for(auto it = m_sources.begin(); it != m_sources.end();) { SoundSourcePtr source = *it; source->update(); if(!source->isPlaying()) it = m_sources.erase(it); else ++it; } for(auto it : m_channels) { it.second->update(); } if(m_context) { alcProcessContext(m_context); } }
bool QVGEGLWindowSurfaceDirect::scroll(QWidget *widget, const QRegion& area, int dx, int dy) { #ifdef QVG_BUFFER_SCROLLING QEglContext *context = ensureContext(widget); if (context) { context->makeCurrent(windowSurface); QRect scrollRect = area.boundingRect(); int sx = scrollRect.x(); int sy = size.height() - scrollRect.y() - scrollRect.height(); vgSeti(VG_SCISSORING, VG_FALSE); vgCopyPixels(sx + dx, sy - dy, sx, sy, scrollRect.width(), scrollRect.height()); context->lazyDoneCurrent(); return true; } #endif return false; }
void SoundManager::preload(std::string filename) { filename = resolveSoundFile(filename); auto it = m_buffers.find(filename); if(it != m_buffers.end()) return; ensureContext(); SoundFilePtr soundFile = SoundFile::loadSoundFile(filename); // only keep small files if(!soundFile || soundFile->getSize() > MAX_CACHE_SIZE) return; SoundBufferPtr buffer = SoundBufferPtr(new SoundBuffer); if(buffer->fillBuffer(soundFile)) m_buffers[filename] = buffer; }
void QVGEGLWindowSurfaceDirect::endPaint (QWidget *widget, const QRegion& region, QImage *image) { Q_UNUSED(region); Q_UNUSED(image); QEglContext *context = ensureContext(widget); if (context) { if (needToSwap) { if (!isPaintingActive) context->makeCurrent(windowSurface); context->swapBuffers(windowSurface); context->lazyDoneCurrent(); } else if (isPaintingActive) { vgFlush(); context->lazyDoneCurrent(); } isPaintingActive = false; } }
void SoundManager::terminate() { ensureContext(); m_sources.clear(); m_buffers.clear(); m_channels.clear(); m_audioEnabled = false; alcMakeContextCurrent(nullptr); if(m_context) { alcDestroyContext(m_context); m_context = nullptr; } if(m_device) { alcCloseDevice(m_device); m_device = nullptr; } }
VertexArray& VertexArray::operator=(const VertexArray& other) { if(this == &other) return *this; if(--(*m_count) == 0){ ensureContext(); for(std::map<void*, ThreadId>::iterator it = m_threadIds.begin(); it != m_threadIds.end(); ++it){ glDeleteVertexArrays( 1, &(it->second.id)); } delete m_count; } m_threadIds = other.m_threadIds; m_count = other.m_count; ++(*m_count); return *this; }
Decode_Status VaapiDecoderH265::decodeSlice(H265NalUnit *nalu) { SharedPtr<H265SliceHdr> currSlice(new H265SliceHdr(), h265SliceHdrFree); H265SliceHdr* slice = currSlice.get(); H265ParserResult result; Decode_Status status; memset(slice, 0, sizeof(H265SliceHdr)); result = h265_parser_parse_slice_hdr(m_parser, nalu, slice); if (result == H265_PARSER_ERROR) { return DECODE_INVALID_DATA; } if (result == H265_PARSER_BROKEN_LINK) { return DECODE_SUCCESS; } status = ensureContext(slice->pps->sps); if (status != DECODE_SUCCESS) { return status; } if (slice->first_slice_segment_in_pic_flag) { status = decodeCurrent(); if (status != DECODE_SUCCESS) return status; m_current = createPicture(slice, nalu); if (m_noRaslOutputFlag && isRasl(nalu)) return DECODE_SUCCESS; if (!m_current || !m_dpb.init(m_current, slice, nalu, m_newStream)) return DECODE_INVALID_DATA; if (!fillPicture(m_current, slice) || !fillIqMatrix(m_current, slice)) return DECODE_FAIL; } if (!m_current) return DECODE_FAIL; if (!fillSlice(m_current, slice, nalu)) return DECODE_FAIL; if (!slice->dependent_slice_segment_flag) std::swap(currSlice, m_prevSlice); return status; }
void QVGEGLWindowSurfaceVGImage::beginPaint(QWidget *widget) { QEglContext *context = ensureContext(widget); if (context) { if (recreateBackBuffer || backBufferSurface == EGL_NO_SURFACE) { // Create a VGImage object to act as the back buffer // for this window. We have to create the VGImage with a // current context, so activate the main surface for the window. context->makeCurrent(mainSurface()); recreateBackBuffer = false; if (backBufferSurface != EGL_NO_SURFACE) { eglDestroySurface(QEgl::display(), backBufferSurface); backBufferSurface = EGL_NO_SURFACE; } if (backBuffer != VG_INVALID_HANDLE) { vgDestroyImage(backBuffer); } VGImageFormat format = qt_vg_config_to_vg_format(context); backBuffer = vgCreateImage (format, size.width(), size.height(), VG_IMAGE_QUALITY_FASTER); if (backBuffer != VG_INVALID_HANDLE) { // Create an EGL surface for rendering into the VGImage. backBufferSurface = eglCreatePbufferFromClientBuffer (QEgl::display(), EGL_OPENVG_IMAGE, (EGLClientBuffer)(backBuffer), context->config(), NULL); if (backBufferSurface == EGL_NO_SURFACE) { vgDestroyImage(backBuffer); backBuffer = VG_INVALID_HANDLE; } } } if (backBufferSurface != EGL_NO_SURFACE) context->makeCurrent(backBufferSurface); else context->makeCurrent(mainSurface()); isPaintingActive = true; } }
Decode_Status VaapiDecoderVP9::decode(const Vp9FrameHdr* hdr, const uint8_t* data, uint32_t size, uint64_t timeStamp) { Decode_Status ret; ret = ensureContext(hdr); if (ret != DECODE_SUCCESS) return ret; PicturePtr picture = createPicture(timeStamp); if (!picture) return DECODE_MEMORY_FAIL; if (!picture->getSurface()->resize(hdr->width, hdr->height)) { ERROR("resize to %dx%d failed", hdr->width, hdr->height); return DECODE_MEMORY_FAIL; } if (hdr->show_existing_frame) { SurfacePtr& surface = m_reference[hdr->frame_to_show]; if (!surface) { ERROR("frame to show is invalid, idx = %d", hdr->frame_to_show); return DECODE_SUCCESS; } picture->setSurface(surface); return outputPicture(picture); } if (!ensurePicture(picture, hdr)) return DECODE_FAIL; if (!ensureSlice(picture, data, size)) return DECODE_FAIL; ret = picture->decode(); if (ret != DECODE_SUCCESS) return ret; updateReference(picture, hdr); if (hdr->show_frame) return outputPicture(picture); return DECODE_SUCCESS; }
void visitRelation(const utymap::entities::Relation& relation) { if (relation.elements.empty()) return; bool justCreated = ensureContext(relation); Style style = context_.styleProvider.forElement(relation, context_.quadKey.levelOfDetail); if (isMultipolygon(style) && isBuilding(style)) { MultiPolygonVisitor visitor(polygon_); for (const auto& element : relation.elements) element->accept(visitor); build(relation, style); } else { for (const auto& element : relation.elements) element->accept(*this); } completeIfNecessary(justCreated); }
void Sprite::render(const Matrix4f& view, const Matrix4f& projection) const { // We must have a loaded texture to render if(!m_texture.isLoaded()) return; // We need an OpenGL context ensureContext(); // Our data has to be uploaded ensureUpdatedVertexArray(); // Create Transformable that takes Rectangle's dimensions into account Transformable2D t; t.setPosition(getPosition()); t.setRotation(getRotation()); t.setScale(getScale() * getDimensions()); // Setup shader const Shader& shader = BurnShaders::getShader(BurnShaders::SPRITE2D); shader.resetTextureUnitCounter(); shader.setUniform("gModelMatrix", t.getModelMatrix()); shader.setUniform("gViewMatrix", view); shader.setUniform("gProjectionMatrix", projection); shader.setUniform("gColor", getColor()); shader.setUniform("gUvStart", m_uvStart); shader.setUniform("gUvEnd", m_uvEnd); shader.bindTexture("gTextureSampler", m_texture); shader.activate(); // Render m_vertexArray.bind(); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); m_vertexArray.unbind(); }