void Raytracer::ProgressivePhotonMapping(int SPPMIter) {
	if (camera->GetAlgorithm() != "SPPM")
		GenerateImage("picture_RT.bmp");
	
	int storedHitpoints = hitpointMap->GetStoredHitpoints();
	printf("Stored Hitpoints: %d\n", storedHitpoints);
	printf("HitpointMap Balancing...\n");
	hitpointMap->Balance();
	
	Color** rtColor = new Color*[H];
	for (int i = 0; i < H; i++) {
		rtColor[i] = new Color[W];
		for (int j = 0; j < W; j++)
			rtColor[i][j] = camera->GetColor(i, j);
	}
	
	Photontracer* photontracer = new Photontracer;
	photontracer->SetScene( scene );
	photontracer->SetHitpointMap(hitpointMap);
	for (int iter = 1; iter <= camera->GetIterations(); iter++) {
		photontracer->Run(SPPMIter * camera->GetIterations());
		Hitpoint* hitpoints = hitpointMap->GetHitpoints();
		
		hitpointMap->MaintainHitpoints();
		for (int r = 0; r < H; r++)
			for (int c = 0; c < W; c++)
				camera->SetColor(r, c, rtColor[r][c]);
		
		double minR2 = camera->GetSampleDist(), maxR2 = 0;
		double minNum = 1000000000, maxNum = 0;
		for (int i = 1; i <= storedHitpoints; i++) {
			int r = hitpoints[i].rc / W;
			int c = hitpoints[i].rc % W;
			Color color = camera->GetColor(r, c) + hitpoints[i].color * hitpoints[i].weight * (4.0 / (hitpoints[i].R2 * camera->GetEmitPhotons() * iter));
			camera->SetColor(r, c, color.Confine());
			
			if (hitpoints[i].R2 < minR2) minR2 = hitpoints[i].R2;
			if (hitpoints[i].R2 > maxR2) maxR2 = hitpoints[i].R2;
			if (hitpoints[i].num < minNum) minNum = hitpoints[i].num;
			if (hitpoints[i].num > maxNum) maxNum = hitpoints[i].num;
		}
		printf("Iter=%d, Num=%.2lf~%.2lf, Radius=%.6lf~%.6lf\n", iter, minNum, maxNum, sqrt(minR2), sqrt(maxR2));
		
		if (iter % 10 == 0)
			GenerateImage(output);
	}
	
	if (camera->GetAlgorithm() == "SPPM")
		GenerateImage(output);
	
	delete photontracer;
	for (int i = 0; i < H; i++)
		delete[] rtColor[i];
	delete[] rtColor;
}
Exemplo n.º 2
0
// Initialize OpenGL state
void init() {
	// Texture setup
    glEnable(GL_TEXTURE_2D);
    glGenTextures( 1, &texture);
    glBindTexture(GL_TEXTURE_2D, texture);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);

    // Other
    glClearColor(0,0,0,0);
    glMatrixMode(GL_PROJECTION);      // Jakub>Select the Projection matrix for operation
    glLoadIdentity();                 // Jakub>Reset Projection matrix
    gluOrtho2D(-1,1,-1,1);
    glLoadIdentity();
    glColor3f(1,1,1);

    //File readers
    loadFromFiles();

    if (!image) {
        GenerateImage();
    }
}
Exemplo n.º 3
0
int main(int argc, char* argv[]) {
  ImageType::Pointer image = ImageType::New();
  GenerateImage(image);

  QuickView viewer;
  viewer.AddImage(image.GetPointer());
  viewer.Visualize();

  return EXIT_SUCCESS;
}
Exemplo n.º 4
0
// Generate and display the image.
void display() {
    // Call user image generation
    GenerateImage();
    // Copy image to texture memory
    glBindTexture(GL_TEXTURE_2D, texture);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, TEX_SIZE, TEX_SIZE, 0, GL_RGB, GL_UNSIGNED_BYTE, image);
    // Clear screen buffer
    glClear(GL_COLOR_BUFFER_BIT);
    // Render a quad
    glBegin(GL_QUADS);
        glTexCoord2f(1,0); glVertex2f(1,-1);
        glTexCoord2f(1,1); glVertex2f(1,1);
        glTexCoord2f(0,1); glVertex2f(-1,1);
        glTexCoord2f(0,0); glVertex2f(-1,-1);
    glEnd();
    // Display result
    glFlush();
    glutPostRedisplay();
    glutSwapBuffers();
}
Exemplo n.º 5
0
/*
==================
idImage::MakeDefault

the default image will be grey with a white box outline
to allow you to see the mapping coordinates on a surface
==================
*/
void idImage::MakeDefault()
{
	int		x, y;
	byte	data[DEFAULT_SIZE][DEFAULT_SIZE][4];
	
	if( com_developer.GetBool() )
	{
		// grey center
		for( y = 0 ; y < DEFAULT_SIZE ; y++ )
		{
			for( x = 0 ; x < DEFAULT_SIZE ; x++ )
			{
				data[y][x][0] = 32;
				data[y][x][1] = 32;
				data[y][x][2] = 32;
				data[y][x][3] = 255;
			}
		}
		
		// white border
		for( x = 0 ; x < DEFAULT_SIZE ; x++ )
		{
			data[0][x][0] =
				data[0][x][1] =
					data[0][x][2] =
						data[0][x][3] = 255;
						
			data[x][0][0] =
				data[x][0][1] =
					data[x][0][2] =
						data[x][0][3] = 255;
						
			data[DEFAULT_SIZE - 1][x][0] =
				data[DEFAULT_SIZE - 1][x][1] =
					data[DEFAULT_SIZE - 1][x][2] =
						data[DEFAULT_SIZE - 1][x][3] = 255;
						
			data[x][DEFAULT_SIZE - 1][0] =
				data[x][DEFAULT_SIZE - 1][1] =
					data[x][DEFAULT_SIZE - 1][2] =
						data[x][DEFAULT_SIZE - 1][3] = 255;
		}
	}
	else
	{
		for( y = 0 ; y < DEFAULT_SIZE ; y++ )
		{
			for( x = 0 ; x < DEFAULT_SIZE ; x++ )
			{
				data[y][x][0] = 0;
				data[y][x][1] = 0;
				data[y][x][2] = 0;
				data[y][x][3] = 0;
			}
		}
	}
	
	GenerateImage( ( byte* )data,
				   DEFAULT_SIZE, DEFAULT_SIZE,
				   TF_DEFAULT, TR_REPEAT, TD_DEFAULT );
				   
	defaulted = true;
}
Exemplo n.º 6
0
bool igApp::GenerateVideo( const wxString& videoPath )
{
	bool success = false;
	wxProgressDialog* progressDialog = nullptr;
	AVCodec* codec = nullptr;
	AVCodecContext* codecContext = nullptr;
	wxFile videoFile;
	AVFrame* frame = nullptr;
	int ret = 0;

	do
	{
		if( options.frameCount == 0 )
			break;

		avcodec_register_all();

		AVCodecID codecId = AV_CODEC_ID_MPEG4; //AV_CODEC_ID_H264; //AV_CODEC_ID_MPEG1VIDEO;
		codec = avcodec_find_encoder( codecId );
		if( !codec )
		{
			wxMessageBox( "Failed to find codec for MPEG format." );
			break;
		}

		codecContext = avcodec_alloc_context3( codec );
		if( !codecContext )
		{
			wxMessageBox( "Failed to allocate codec context." );
			break;
		}

		codecContext->bit_rate = 400000;		// What is this?
		codecContext->width = options.imageSize.GetWidth();
		codecContext->height = options.imageSize.GetHeight();
		codecContext->time_base.num = 1;
		codecContext->time_base.den = options.frameRate;
		codecContext->gop_size = 10;		// Huh?!
		codecContext->max_b_frames = 1;		// Wha?!
		codecContext->pix_fmt = AV_PIX_FMT_YUV420P;
	
		ret = avcodec_open2( codecContext, codec, NULL );
		if( ret < 0 )
		{
			wxMessageBox( "Failed to open codec: " + GetFFMpegError( ret ) );
			break;
		}

		frame = av_frame_alloc();
		frame->width = codecContext->width;
		frame->height = codecContext->height;

		ret = av_image_alloc( frame->data, frame->linesize, frame->width, frame->height, codecContext->pix_fmt, 32 );
		if( ret < 0 )
		{
			wxMessageBox( "Failed to allocate frame: " + GetFFMpegError( ret ) );
			break;
		}

		if( !videoFile.Open( videoPath, wxFile::write ) )
		{
			wxMessageBox( "Failed to open file \"" + videoPath + "\" for writing." );
			break;
		}
		
		progressDialog = new wxProgressDialog(
							"Video Generation In Progress", "Generating Video...",
							options.frameCount, 0,
							wxPD_APP_MODAL | wxPD_CAN_ABORT | wxPD_ESTIMATED_TIME | wxPD_AUTO_HIDE );

		AVPacket packet;
		
		int frameIndex;
		for( frameIndex = 0; frameIndex < options.frameCount; frameIndex++ )
		{
			if( !GenerateImage( frameIndex, false, true ) )
				break;

			if( !StuffImageInFrame( frame, codecContext->pix_fmt ) )
				break;

			frame->pts = frameIndex;

			av_init_packet( &packet );
			packet.data = nullptr;
			packet.size = 0;

			// Encode the frame!
			int gotPacket = 0;
			ret = avcodec_encode_video2( codecContext, &packet, frame, &gotPacket );
			if( ret < 0 )
			{
				wxMessageBox( wxString::Format( "Failed to encode frame %d: ", frameIndex + 1 ) + GetFFMpegError( ret ) );
				break;
			}

			wxString progressMsg = wxString::Format( "Writing frame %d/%d...", frameIndex + 1, options.frameCount );
			if( gotPacket )
				progressMsg += wxString::Format( " (packet size = %5d)", packet.size );
			progressDialog->Update( frameIndex, progressMsg );

			if( gotPacket )
			{
				int bytesWritten = videoFile.Write( packet.data, packet.size );
				int packetSize = packet.size;

				av_packet_unref( &packet );

				if( packetSize != bytesWritten )
				{
					wxMessageBox( "Failed to write packet!" );
					break;
				}

				videoFile.Flush();
			}

			if( progressDialog->WasCancelled() )
				break;
		}

		if( frameIndex < options.frameCount )
			break;

		// Now encode the delayed frames.
		while( true )
		{
			int gotPacket = 0;
			ret = avcodec_encode_video2( codecContext, &packet, NULL, &gotPacket );
			if( ret < 0 )
			{
				wxMessageBox( "Failed to encode delay frame: " + GetFFMpegError( ret ) );
				break;
			}

			if( !gotPacket )
				break;
			else
			{
				videoFile.Write( packet.data, packet.size );
				videoFile.Flush();
				av_packet_unref( &packet );
			}
		}

		success = true;
	}
	while( false );

	delete progressDialog;

	if( success )
	{
		uint8_t endcode[] = { 0, 0, 1, 0xb7 };
		videoFile.Write( endcode, sizeof( endcode ) );
	}

	videoFile.Close();

	if( frame )
	{
		av_freep( &frame->data[0] );
		av_frame_free( &frame );
	}

	if( codecContext )
	{
		avcodec_close( codecContext );
		av_free( codecContext );
	}

	return success;
}
void Raytracer::Run() {
	scene->CreateScene( input );
	camera = scene->GetCamera();
	
	int SPPMIteration = (camera->GetAlgorithm() == "SPPM") ? camera->GetDofSample() : 1;
	for (int iter = 0; iter < SPPMIteration; iter++) {
		if (camera->GetAlgorithm() == "SPPM")
			printf("SPPM Iteration= %d\n", iter);
		
		if (camera->GetAlgorithm() == "PPM" || camera->GetAlgorithm() == "SPPM") {
			hitpointMap = new HitpointMap(camera->GetMaxHitpoints());
			hitpointMap->SetReduction(camera->GetReduction());
		}
		
		if (camera->GetAlgorithm() == "PM" || camera->GetAlgorithm() == "PPM" || camera->GetAlgorithm() == "SPPM") {
			Photontracer* photontracer = new Photontracer;
			photontracer->SetScene( scene );
			photonmap = photontracer->CalnPhotonmap();
			delete photontracer;
		}
		
		if (camera->GetAlgorithm() == "PM")
			printf("Stored Photons= %d\n", photonmap->GetStoredPhotons());
		
		H = camera->GetH();
		W = camera->GetW();

		sample = new int*[H];
		for ( int i = 0 ; i < H ; i++ )
			sample[i] = new int[W];
		
		MultiThreadSampling(2 * iter * RT_MAX_THREADS);
		if (camera->GetAperture() < EPS) {
			if (camera->GetAlgorithm() == "PPM" || camera->GetAlgorithm() == "SPPM") {
				int storedHitpoints = hitpointMap->GetStoredHitpoints();
				Hitpoint* hitpoints = hitpointMap->GetHitpoints();
				for (int i = 1; i <= storedHitpoints; i++) {
					int r = hitpoints[i].rc / W;
					int c = hitpoints[i].rc % W;
					if (!((r == 0 || sample[r][c] == sample[r - 1][c])&&(r == H - 1 || sample[r][c] == sample[r + 1][c])&&(c == 0 || sample[r][c] == sample[r][c - 1])&&(c == W - 1 || sample[r][c] == sample[r][c + 1])))
						hitpoints[i].weight /= 5;
				}
			}
			MultiThreadResampling((2 * iter + 1) * RT_MAX_THREADS);
		}
		
		for ( int i = 0 ; i < H ; i++ )
			delete[] sample[i];
		delete[] sample;
		
		GenerateImage(output);
		
		if (camera->GetAlgorithm() == "PPM" || camera->GetAlgorithm() == "SPPM")
			ProgressivePhotonMapping(iter);
		
		if (hitpointMap != NULL) {
			delete hitpointMap;
			hitpointMap = NULL;
		}
		if (photonmap != NULL) {
			delete photonmap;
			photonmap = NULL;
		}
	}
}
Exemplo n.º 8
0
/**
 * Performs exposure and grabs a single image.
 * This function should block during the actual exposure and return immediately afterwards
 * (i.e., before readout).  This behavior is needed for proper synchronization with the shutter.
 * Required by the MM::Camera API.
 */
int MUCamSource::SnapImage()
{
    GenerateImage();
    return DEVICE_OK;
}