Example #1
0
bool GLResource::black( Frame *dst )
{
	int w = dst->profile.getVideoWidth();
	int h = dst->profile.getVideoHeight();

	FBO *fbo = getFBO( w, h, GL_RGBA );
	if ( !fbo )
		return false;

	glBindFramebuffer( GL_FRAMEBUFFER, fbo->fbo() );
	glClear( GL_COLOR_BUFFER_BIT );
	glBindFramebuffer( GL_FRAMEBUFFER, 0 );

	dst->setFBO( fbo );
	return true;
}
Example #2
0
void Composer::movitRender( Frame *dst, bool update )
{
	int i, j, start=0;
	Frame *f;
	//QTime time;
	//time.start();
	
	Profile projectProfile = sampler->getProfile();

	// find the lowest frame to process
	for ( j = 0 ; j < dst->sample->frames.count(); ++j ) {
		if ( (f = dst->sample->frames[j]->frame) ) {
			start = j;
			break;
		}
	}

	// build a "description" of the required chain
	// processing frames from bottom to top
	i = start;
	double pts = sampler->currentPTS();
	QStringList currentDescriptor;
	int ow = projectProfile.getVideoWidth();
	int oh = projectProfile.getVideoHeight();
	while ( (f = getNextFrame( dst, i )) ) {
		FrameSample *sample = dst->sample->frames[i - 1];
		// input and filters
		movitFrameDescriptor( "-", f, &sample->videoFilters, currentDescriptor, &projectProfile );
		// transition
		if ( sample->transitionFrame.frame && !sample->transitionFrame.videoTransitionFilter.isNull() ) {
			// filters applied on first transition frame, if any
			currentDescriptor.append( "--" + sample->transitionFrame.videoTransitionFilter->getDescriptorFirst( pts, f, &projectProfile ) );
			movitFrameDescriptor( "->", sample->transitionFrame.frame, &sample->transitionFrame.videoFilters, currentDescriptor, &projectProfile );
			// filters applied on second transition frame, if any
			currentDescriptor.append( "-->" + sample->transitionFrame.videoTransitionFilter->getDescriptorSecond( pts, sample->transitionFrame.frame, &projectProfile ) );
			currentDescriptor.append( "-<" + sample->transitionFrame.videoTransitionFilter->getDescriptor( pts, sample->transitionFrame.frame, &projectProfile ) );
		}
		// overlay
		if ( (i - 1) > start )
			currentDescriptor.append( GLOverlay().getDescriptor( pts, f, &projectProfile ) );
		ow = f->glWidth;
		oh = f->glHeight;
	}
	// background
	currentDescriptor.append( movitBackground.getDescriptor( pts, NULL, &projectProfile ) );
	// output
	if (outputResize.width() > 0) {
		currentDescriptor.append( QString("Resized output %1 %2").arg( outputResize.width() ).arg( outputResize.height() ) );
	}
	else {
		currentDescriptor.append( QString("OUTPUT %1 %2").arg( ow ).arg( oh ) );
	}

	// rebuild the chain if neccessary
	if ( currentDescriptor !=  movitChain.descriptor ) {
		for ( int k = 0; k < currentDescriptor.count(); k++ )
			printf("%s\n", currentDescriptor[k].toLocal8Bit().data());
		movitChain.descriptor = currentDescriptor;
		movitChain.reset();
		movitChain.chain = new EffectChain( projectProfile.getVideoSAR() * projectProfile.getVideoWidth(), projectProfile.getVideoHeight(), movitPool );

		i = start;
		Effect *last, *current = NULL;
		
		while ( (f = getNextFrame( dst, i )) ) {
			last = current;
			// input and filters
			MovitBranch *branch;
			FrameSample *sample = dst->sample->frames[i - 1];
			current = movitFrameBuild( f, &sample->videoFilters, &branch );
			// transition
			if ( sample->transitionFrame.frame && !sample->transitionFrame.videoTransitionFilter.isNull() ) {
				QList<Effect*> el = sample->transitionFrame.videoTransitionFilter->getMovitEffects();
				
				// filters applied on first transition frame, if any
				QList<Effect*> first = sample->transitionFrame.videoTransitionFilter->getMovitEffectsFirst();
				for ( int l = 0; l < first.count(); ++l )
					current = movitChain.chain->add_effect( first.at( l ) );
				
				MovitBranch *branchTrans;
				Effect *currentTrans = movitFrameBuild( sample->transitionFrame.frame, &sample->transitionFrame.videoFilters, &branchTrans );
				// filters applied on second transition frame, if any
				QList<Effect*> second = sample->transitionFrame.videoTransitionFilter->getMovitEffectsSecond();
				for ( int l = 0; l < second.count(); ++l )
					currentTrans = movitChain.chain->add_effect( second.at( l ) );
				
				branchTrans->filters.append( new MovitFilter( el ) );
				for ( int l = 0; l < el.count(); ++l )
					current = movitChain.chain->add_effect( el.at( l ), current, currentTrans );
			}
			// overlay
			if ( last ) {
				GLOverlay *overlay = new GLOverlay();
				QList<Effect*> el = overlay->getMovitEffects();
				branch->overlay = new MovitFilter( el, overlay );
				for ( int l = 0; l < el.count(); ++l )
					current = movitChain.chain->add_effect( el.at( l ), last, current );
			}
		}
		// background
		QList<Effect*> el = movitBackground.getMovitEffects();
		movitChain.chain->add_effect( el[0] );
		// output resizer
		if (outputResize.width() > 0) {
			Effect *e = new ResampleEffect();
			e->set_int( "width", outputResize.width() );
			e->set_int( "height", outputResize.height() );
			movitChain.chain->add_effect( e );
		}
		// output
		movitChain.chain->set_dither_bits( 8 );
		ImageFormat output_format;
		output_format.color_space = COLORSPACE_sRGB;
		output_format.gamma_curve = GAMMA_REC_709;
		movitChain.chain->add_output( output_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED );
		movitChain.chain->finalize();
	}

	// update inputs data and filters parameters
	i = start, j = 0;
	int w = projectProfile.getVideoWidth();
	int h = projectProfile.getVideoHeight();
	while ( (f = getNextFrame( dst, i )) ) {
		f->glWidth = f->profile.getVideoWidth();
		f->glHeight = f->profile.getVideoHeight();
		f->glSAR = f->profile.getVideoSAR();
		f->glOVD = 0;
		f->glOVDTransformList.clear();
		
		// input and filters
		MovitBranch *branch = movitChain.branches[ j++ ];
		branch->input->process( f, &gl );
		int vf = 0;
		FrameSample *sample = dst->sample->frames[i - 1];
		for ( int k = 0; k < branch->filters.count(); ++k ) { 
			if ( !branch->filters[k]->filter )
				sample->videoFilters[vf++]->process( branch->filters[k]->effects, pts, f, &projectProfile );
			else
				branch->filters[k]->filter->process( branch->filters[k]->effects, pts, f, &projectProfile );
		}
		// transition
		if ( sample->transitionFrame.frame && !sample->transitionFrame.videoTransitionFilter.isNull() ) {
			sample->transitionFrame.frame->glWidth = sample->transitionFrame.frame->profile.getVideoWidth();
			sample->transitionFrame.frame->glHeight = sample->transitionFrame.frame->profile.getVideoHeight();
			sample->transitionFrame.frame->glSAR = sample->transitionFrame.frame->profile.getVideoSAR();
			MovitBranch *branchTrans = movitChain.branches[ j++ ];
			branchTrans->input->process( sample->transitionFrame.frame, &gl );
			int tvf = 0;
			int k;
			for ( k = 0; k < branchTrans->filters.count() - 1; ++k ) { 
				if ( !branchTrans->filters[k]->filter )
					sample->transitionFrame.videoFilters[tvf++]->process( branchTrans->filters[k]->effects, pts, sample->transitionFrame.frame, &projectProfile );
				else
					branchTrans->filters[k]->filter->process( branchTrans->filters[k]->effects, pts, sample->transitionFrame.frame, &projectProfile );
			}
			sample->transitionFrame.videoTransitionFilter->process( branchTrans->filters[k]->effects, pts, f, sample->transitionFrame.frame, &projectProfile );
		}
		// overlay
		if ( branch->overlay && branch->overlay->filter )
			branch->overlay->filter->process( branch->overlay->effects, pts, f, f, &projectProfile );
		
		w = f->glWidth;
		h = f->glHeight;
	}

	// render
	waitFence();
	// output resizer
	if (outputResize.width() > 0) {
		w = outputResize.width();
		h = outputResize.height();
	}
	FBO *fbo = gl.getFBO( w, h, GL_RGBA );
	movitChain.chain->render_to_fbo( fbo->fbo(), w, h );
	
	dst->glWidth = w;
	dst->glHeight = h;
	dst->glSAR = projectProfile.getVideoSAR();
	if ( !update ) {
		dst->setVideoFrame( Frame::GLTEXTURE, w, h, dst->glSAR,
						projectProfile.getVideoInterlaced(), projectProfile.getVideoTopFieldFirst(),
						pts, projectProfile.getVideoFrameDuration() );
	}
	dst->setFBO( fbo );
	dst->setFence( gl.getFence() );
	composerFence = gl.getFence();
	glFlush();
	
	//qDebug() << "elapsed" << time.elapsed();
}
Example #3
0
void GraphicsContext3DInternal::swapBuffers()
{
    if (s_loggingEnabled)
        m_webGLFPSTimer->tick();

    LOGWEBGL("+swapBuffers()");

    MutexLocker lock(m_fboMutex);
    FBO* fbo = m_currentFBO;
    if (fbo == 0)
        return;

    makeContextCurrent();

    bool mustRestoreFBO = (m_boundFBO != fbo->fbo());
    if (mustRestoreFBO) {
        glBindFramebuffer(GL_FRAMEBUFFER, fbo->fbo());
    }

    // Create the fence sync and notify the sync thread
    fbo->setSync();
    //[CAPPFIX_WEB_WEBGL] - Improve UI Response Begin
    if (!m_needImproveUIResponseMode)
    //[CAPPFIX_WEB_WEBGL_END]
#if USE(SHARED_TEXTURE_WEBGL)
        glFinish();
#else
        glFlush();
#endif

    m_queuedBuffers.append(fbo);
    m_threadCondition.broadcast();

#if USE(SHARED_TEXTURE_WEBGL)
    // Dequeue a new buffer
    fbo = dequeueBuffer();
#else
    //[CAPPFIX_WEB_WEBGL] - Improve UI Response Begin
    int tick = 0;
    if (!m_needImproveUIResponseMode)
        tick = getTickCount();
    //[CAPPFIX_WEB_WEBGL_END]
    // Dequeue a new buffer
    fbo = dequeueBuffer();

    //[CAPPFIX_WEB_WEBGL] - Improve UI Response Begin
    if (!m_needImproveUIResponseMode &&  getTickCount() - tick > SWP_WebGL_IUR_Time) {
        m_needImproveUIResponseMode = true;
        LOGWEBGL("On needImproveUIResponseMode");
    }
    //[CAPPFIX_WEB_WEBGL_END]
#endif
    m_currentFBO = fbo;

    if (!mustRestoreFBO) {
        m_boundFBO = m_currentFBO->fbo();
    }
    glBindFramebuffer(GL_FRAMEBUFFER, m_boundFBO);
    m_canvasDirty = false;
    m_layerComposited = true;
    LOGWEBGL("-swapBuffers()");
}