Exemplo n.º 1
0
void QuickTimeSampleApp::draw()
{
	glClearColor( 0.0f, 0.0f, 0.0f, 0.0f );
	glClear( GL_COLOR_BUFFER_BIT );

	if( mMovie && mTexture ) {
		mTexture.draw( 0, 0, getWindowWidth(), getWindowHeight() );
	}
}
Exemplo n.º 2
0
void ARTestApp::update()
{

	ARMarkerInfo    *marker_info;					// Pointer to array holding the details of detected markers.
    int             marker_num;						// Count of number of markers detected.
    int             j, k;
	
	// Grab a video frame.
#if defined( USE_AR_VIDEO )
	ARUint8 *image;
	if ((image = arVideoGetImage()) != NULL) {
#else
	if( mCapture->checkNewFrame() ) {
#endif

#if defined( USE_AR_VIDEO )
		gARTImage = image;	// Save the fetched image.
		mTexture->enableAndBind();
#else
		const fli::Surface8u &surface( mCapture->getSurface() );
		mTexture->update( surface );
		gARTImage = const_cast<uint8_t*>( surface.getData() );
#endif

		gCallCountMarkerDetect++; // Increment ARToolKit FPS counter.
		
		// Detect the markers in the video frame.
		if (arDetectMarker(gARTImage, gARTThreshhold, &marker_info, &marker_num) < 0) {
			exit(-1);
		}
		
		// check for known patterns
		for( int i = 0; i < objectnum; i++ ) {
			k = -1;
			for( j = 0; j < marker_num; j++ ) {
				if( object[i].id == marker_info[j].id) {

					/* you've found a pattern */
					if( k == -1 ) k = j;
					else /* make sure you have the best pattern (highest confidence factor) */
						if( marker_info[k].cf < marker_info[j].cf ) k = j;
				}
			}
			if( k == -1 ) {
				object[i].visible = 0;
				continue;
			}
			/* calculate the transform for each marker */
			if( object[i].visible == 0 ) {
				arGetTransMat(&marker_info[k],
							  object[i].marker_center, object[i].marker_width,
							  object[i].trans);
			}
			else {
				arGetTransMatCont(&marker_info[k], object[i].trans,
							  object[i].marker_center, object[i].marker_width,
							  object[i].trans);
			}
			object[i].visible = 1;
		}
	}

	if( mLockedMode >= 0 ) {
		for( int i = 0; i < objectnum; i++ ) {
			object[i].visible = 0;
		}
		object[mLockedMode].visible = 1;
	}

	for( int mod = 0; mod < objectnum; ++mod )
		mModules[mod]->update( this, object[mod].visible );
}

void ARTestApp::draw()
{
    GLdouble p[16];
	GLdouble m[16];
	
	// Select correct buffer for this context.
	glClearColor( 0, 0, 0, 1 ); // Clear the buffers for new frame.
	gl::enableDepthWrite();
	glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT ); // Clear the buffers for new frame.
	
	gl::disableDepthRead();
	gl::disableDepthWrite();
	gl::enableAlphaBlending();

	if( object[0].visible || object[1].visible || object[2].visible ) 
		mCurrentAlpha += ( 0.0f - mCurrentAlpha ) * 0.05f;
	else
		mCurrentAlpha += ( 1.0f - mCurrentAlpha ) * 0.05f;
	
	gl::setMatricesScreenOrtho( getWindowWidth(), getWindowHeight() );
	// draw the camera image centered
	glColor4f( 1, 1, 1, 1 );//0.2f + mCurrentAlpha * 0.8f );
	float width = ( getWindowHeight() * ( mTexture->getWidth() / (float)mTexture->getHeight() ) );
	mTexture->draw( ( getWindowWidth() - width ) / 2.0f, 0, width, getWindowHeight() );
	glDisable( mTexture->getTarget() );
	
#if defined( USE_AR_VIDEO )	
	arVideoCapNext();
	gARTImage = NULL; // Image data is no longer valid after calling arVideoCapNext().
#endif				

	// Projection transformation.
	arglCameraFrustumRH( &gARTCparam, VIEW_DISTANCE_MIN, VIEW_DISTANCE_MAX, p );
	glMatrixMode( GL_PROJECTION );
	glLoadMatrixd( p );
		
	// Calculate the camera position relative to the marker.
	// Replace VIEW_SCALEFACTOR with 1.0 to make one drawing unit equal to 1.0 ARToolKit units (usually millimeters).
	for( int mod = 0; mod < objectnum; ++mod ) {
		if( object[mod].visible ) {
			arglCameraViewRH( object[mod].trans, m, VIEW_SCALEFACTOR );
			glMatrixMode(GL_MODELVIEW);
			glLoadMatrixd( m );
			fli::Matrix44d mvd( m );
			mModules[mod]->draw( this, mvd * Vec4d( 0, 0, 0, 1 ) );
		}
	}
}