Exemplo n.º 1
0
void GraphicEngine::dumpScreenJPEG( std::string fileName )
{
    int w = _display->getLayoutMgr()->screenWidth();
    int h = _display->getLayoutMgr()->screenHeight();

    // width * height * RGB
    unsigned char * data = new unsigned char[ w * h * 3];

    _display->draw( _scene, 0xFFFFFFFF, 0xFFFFFFFF );

    glReadBuffer( GL_BACK );

    glPixelStorei( GL_UNPACK_ALIGNMENT, 1 );
    glPixelStorei( GL_PACK_ALIGNMENT, 1 );
    glReadPixels( 0, 0, w, h, GL_RGB, GL_UNSIGNED_BYTE, data );

    // Flip data top to bottom.
    unsigned char * flipdat = new unsigned char[ w * h * 3];
    int scanLen = 3 * w;
    for ( int i = 0 ; i < h; i++ )
    {
        unsigned char* srcLine = &data[ i * scanLen ];
        unsigned char* dstLine = &flipdat[ (h - i - 1) * scanLen ];
        memcpy(  dstLine, srcLine, scanLen );
    }
    delete data;

    writeJPEG( fileName.c_str(), w, h, flipdat );

    delete flipdat;
}
Exemplo n.º 2
0
void VspGlWindow::screenGrab()
{
	int nStartX = 0;
	int nStartY = 0;
	int nWidth = w(); 
	int nHeight = h();
	int nPixW = nWidth  - nStartX;
	int nPixH = nHeight - nStartY;

	unsigned char *pRGB = new unsigned char [3 * (nPixW+1) * (nPixH+1) ];

	glReadBuffer( GL_BACK );   
	glReadPixels( nStartX, nStartY, nPixW, nPixH, GL_RGB, GL_UNSIGNED_BYTE, pRGB);

	unsigned char *pRGBFlip = new unsigned char [3 * (nPixW+1) * (nPixH+1) ];

	int scanLen = 3 * (nPixW+1);

	for ( int i = 0 ; i < nPixH+1 ; i++ )
	{
		unsigned char* srcLine = &pRGB[i*scanLen];
		unsigned char* dstLine = &pRGBFlip[(nPixH - i)*scanLen];

		memcpy(  dstLine, srcLine, scanLen );
	}
	delete pRGB;

	writeJPEG( screenGrabFileName.get_char_star(), nPixW+1, nPixH+1, pRGBFlip ); 

	delete pRGBFlip;

}
static Bool
JPEGImageToFile (CompDisplay *d,
		 const char  *path,
		 const char  *name,
		 const char  *format,
		 int         width,
		 int         height,
		 int         stride,
		 void        *data)
{
    Bool status = FALSE;
    char *fileName;
    FILE *file;

    /* Not a JPEG */
    if (strcasecmp (format, "jpeg") != 0 && strcasecmp (format, "jpg") != 0)
    {
	JPEG_DISPLAY (d);
	UNWRAP (jd, d, imageToFile);
	status = (*d->imageToFile) (d, path, name, format,
				    width, height, stride, data);
	WRAP (jd, d, imageToFile, JPEGImageToFile);
	return status;
    }

    /* Is a JPEG */
    fileName = createFilename (path, name);
    if (!fileName)
	return FALSE;

    file = fopen (fileName, "wb");
    if (file)
    {
	status = writeJPEG (d, data, file, width, height, stride);
	fclose (file);
    }

    free (fileName);
    return status;
}
Exemplo n.º 4
0
int main(int argc, char **argv) {
	po::options_description options("Command line options");
	po::options_description hidden_options("Hiden options");
	po::variables_map vm, vm_temp;

	options.add_options()
		("help", "Print information on how to use this program\n")
		("config", po::value<std::string>(),
		    "Load the configuration file 'arg' as an additional source of command line parameters. "
			"Should contain one parameter per line in key=value format. The command line takes precedence "
			"when an argument is specified multiple times.\n")
		("saturation", po::value<float>(),
		    "Saturation threshold of the sensor: the ratio of the sensor's theoretical dynamic "
			"range, at which saturation occurs in practice (in [0,1]). Estimated automatically if not specified.\n")
		("fitexptimes", "On some cameras, the exposure times in the EXIF tags can't be trusted. Use "
		    "this parameter to estimate them automatically for the current image sequence\n")
		("exptimes", po::value<std::string>(),
		    "Override the EXIF exposure times with a manually specified sequence of the "
			"format 'time1,time2,time3,..'\n")
		("nodemosaic", "If specified, the raw Bayer grid is exported as a grayscale EXR file\n")
		("colormode", po::value<EColorMode>()->default_value(ESRGB, "sRGB"),
			"Output color space (one of 'native'/'sRGB'/'XYZ')\n")
		("sensor2xyz", po::value<std::string>(),
			"Matrix that transforms from the sensor color space to XYZ tristimulus values\n")
		("scale", po::value<float>(),
			"Optional scale factor that is applied to the image\n")
		("crop", po::value<std::string>(),
			"Crop to a rectangular area. 'arg' should be specified in the form x,y,width,height\n")
		("resample", po::value<std::string>(),
			"Resample the image to a different resolution. 'arg' can be "
			"a pair of integers like 1188x790 or the max. resolution ("
			"maintaining the aspect ratio)\n")
		("rfilter", po::value<std::string>()->default_value("lanczos"),
			"Resampling filter used by the --resample option (available choices: "
			"'tent' or 'lanczos')\n")
		("wbalpatch", po::value<std::string>(),
		    "White balance the image using a grey patch occupying the region "
			"'arg' (specified as x,y,width,height). Prints output suitable for --wbal\n")
		("wbal", po::value<std::string>(),
		    "White balance the image using floating point multipliers 'arg' "
			"specified as r,g,b\n")
		("vcal", "Calibrate vignetting correction given a uniformly illuminated image\n")
		("vcorr", po::value<std::string>(),
		    "Apply the vignetting correction computed using --vcal\n")
		("flip", po::value<std::string>()->default_value(""), "Flip the output image along the "
		  "specified axes (one of 'x', 'y', or 'xy')\n")
		("rotate", po::value<int>()->default_value(0), "Rotate the output image by 90, 180 or 270 degrees\n")
		("format", po::value<std::string>()->default_value("half"),
		  "Choose the desired output file format -- one of 'half' (OpenEXR, 16 bit HDR / half precision), "
		  "'single' (OpenEXR, 32 bit / single precision), 'jpeg' (libjpeg, 8 bit LDR for convenience)\n")
		("output", po::value<std::string>()->default_value("output.exr"),
			"Name of the output file in OpenEXR format. When only a single RAW file is processed, its "
			"name is used by default (with the ending replaced by .exr/.jpeg");

	hidden_options.add_options()
		("input-files", po::value<std::vector<std::string>>(), "Input files");

	po::options_description all_options;
	all_options.add(options).add(hidden_options);
	po::positional_options_description positional;
	positional.add("input-files", -1);

	try {
		/* Temporary command line parsing pass */
		po::store(po::command_line_parser(argc, argv)
			.options(all_options).positional(positional).run(), vm_temp);

		/* Is there a configuration file */
		std::string config = "hdrmerge.cfg";

		if (vm_temp.count("config"))
			config = vm_temp["config"].as<std::string>();

		if (fexists(config)) {
			std::ifstream settings(config, std::ifstream::in);
			po::store(po::parse_config_file(settings, all_options), vm);
			settings.close();
		}

		po::store(po::command_line_parser(argc, argv)
			.options(all_options).positional(positional).run(), vm);
		if (vm.count("help") || !vm.count("input-files")) {
			help(argv, options);
			return 0;
		}
		po::notify(vm);
	} catch (po::error &e) {
		cerr << "Error while parsing command line arguments: " << e.what() << endl << endl;
		help(argv, options);
		return -1;
	}

	try {
		EColorMode colormode = vm["colormode"].as<EColorMode>();
		std::vector<int> wbalpatch      = parse_list<int>(vm, "wbalpatch", { 4 });
		std::vector<float> wbal         = parse_list<float>(vm, "wbal", { 3 });
		std::vector<int> resample       = parse_list<int>(vm, "resample", { 1, 2 }, ", x");
		std::vector<int> crop           = parse_list<int>(vm, "crop", { 4 });
		std::vector<float> sensor2xyz_v = parse_list<float>(vm, "sensor2xyz", { 9 });
		std::vector<float> vcorr        = parse_list<float>(vm, "vcorr", { 3 });

		if (!wbal.empty() && !wbalpatch.empty()) {
			cerr << "Cannot specify --wbal and --wbalpatch at the same time!" << endl;
			return -1;
		}

		float sensor2xyz[9] = {
			0.412453f, 0.357580f, 0.180423f,
			0.212671f, 0.715160f, 0.072169f,
			0.019334f, 0.119193f, 0.950227f
		};

		if (!sensor2xyz_v.empty()) {
			for (int i=0; i<9; ++i)
				sensor2xyz[i] = sensor2xyz_v[i];
		} else if (colormode != ENative) {
			cerr << "*******************************************************************************" << endl
				 << "Warning: no sensor2xyz matrix was specified -- this is necessary to get proper" << endl
				 << "sRGB / XYZ output. To acquire this matrix, convert any one of your RAW images" << endl
				 << "into a DNG file using Adobe's DNG converter on Windows / Mac (or on Linux," << endl
				 << "using the 'wine' emulator). The run" << endl
				 << endl
				 << "  $ exiv2 -pt the_image.dng 2> /dev/null | grep ColorMatrix2" << endl
				 << "  Exif.Image.ColorMatrix2 SRational 9  <sequence of ratios>" << endl
				 << endl
				 << "The sequence of a rational numbers is a matrix in row-major order. Compute its" << endl
				 << "inverse using a tool like MATLAB or Octave and add a matching entry to the" << endl
				 << "file hdrmerge.cfg (creating it if necessary), like so:" << endl
				 << endl
				 << "# Sensor to XYZ color space transform (Canon EOS 50D)" << endl
				 << "sensor2xyz=1.933062 -0.1347 0.217175 0.880916 0.725958 -0.213945 0.089893 " << endl
				 << "-0.363462 1.579612" << endl
				 << endl
				 << "-> Providing output in the native sensor color space, as no matrix was given." << endl
				 << "*******************************************************************************" << endl
				 << endl;

			colormode = ENative;
		}

		std::vector<std::string> exposures = vm["input-files"].as<std::vector<std::string>>();
		float scale = 1.0f;
		if (vm.count("scale"))
			scale = vm["scale"].as<float>();

		/// Step 1: Load RAW
		ExposureSeries es;
		for (size_t i=0; i<exposures.size(); ++i)
			es.add(exposures[i]);
		es.check();
		if (es.size() == 0)
			throw std::runtime_error("No input found / list of exposures to merge is empty!");

		std::vector<float> exptimes;
		std::map<float, float> exptimes_map;
		if (vm.count("exptimes")) {
			std::string value = vm["exptimes"].as<std::string>();

			if (value.find("->") == std::string::npos) {
				/* Normal list of exposure times, load directly */
				exptimes = parse_list<float>(vm, "exptimes", { es.size() });
			} else {
				/* Map of exposure time replacement values */
				std::vector<std::string> map_str = parse_list<std::string>(vm, "exptimes", { }, ",");
				for (size_t i=0; i<map_str.size(); ++i) {
					std::vector<std::string> v;
					boost::algorithm::iter_split(v, map_str[i], boost::algorithm::first_finder("->"));
					if (v.size() != 2)
						throw std::runtime_error("Unable to parse the 'exptimes' parameter");
					try {
						exptimes_map[boost::lexical_cast<float>(boost::trim_copy(v[0]))] = boost::lexical_cast<float>(boost::trim_copy(v[1]));
					} catch (const boost::bad_lexical_cast &) {
						throw std::runtime_error("Unable to parse the 'exptimes' argument!");
					}
				}
			}
		}
		es.load();

		/// Precompute relative exposure + weight tables
		float saturation = 0;
		if (vm.count("saturation"))
			saturation = vm["saturation"].as<float>();
		es.initTables(saturation);

		if (!exptimes.empty()) {
			cout << "Overriding exposure times: [";

			for (size_t i=0; i<exptimes.size(); ++i) {
				cout << es.exposures[i].toString() << "->" << exptimes[i];
				es.exposures[i].exposure = exptimes[i];
				if (i+1 < exptimes.size())
					cout << ", ";
			}
			cout << "]" << endl;
		}

		if (!exptimes_map.empty()) {
			cout << "Overriding exposure times: [";
			for (size_t i=0; i<es.exposures.size(); ++i) {
				float from = es.exposures[i].exposure, to = 0;
				for (std::map<float, float>::const_iterator it = exptimes_map.begin(); it != exptimes_map.end(); ++it) {
					if (std::abs((it->first - from) / from) < 1e-5f) {
						if (to != 0)
							throw std::runtime_error("Internal error!");
						to = it->second;
					}
				}
				if (to == 0)
					throw std::runtime_error((boost::format("Specified an exposure time replacement map, but couldn't find an entry for %1%") % from).str());

				cout << es.exposures[i].toString() << "->" << to;
				if (i+1 < es.exposures.size())
					cout << ", ";
				es.exposures[i].exposure = to;
			}
			cout << "]" << endl;
		}


		if (vm.count("fitexptimes")) {
			es.fitExposureTimes();
			if (vm.count("exptimes"))
				cerr << "Note: you specified --exptimes and --fitexptimes at the same time. The" << endl
				     << "The test file exptime_showfit.m now compares these two sets of exposure" << endl
					 << "times, rather than the fit vs EXIF." << endl << endl;
		}

		/// Step 1: HDR merge
		es.merge();

		/// Step 3: Demosaicing
		bool demosaic = vm.count("nodemosaic") == 0;
		if (demosaic)
			es.demosaic(sensor2xyz);

		/// Step 4: Transform colors
		if (colormode != ENative) {
			if (!demosaic) {
				cerr << "Warning: you requested XYZ/sRGB output, but demosaicing was explicitly disabled! " << endl
					 << "Color processing is not supported in this case -- writing raw sensor colors instead." << endl;
			} else {
				es.transform_color(sensor2xyz, colormode == EXYZ);
			}
		}

		/// Step 5: White balancing
		if (!wbal.empty()) {
			float scale[3] = { wbal[0], wbal[1], wbal[2] };
			es.whitebalance(scale);
		} else if (wbalpatch.size()) {
			es.whitebalance(wbalpatch[0], wbalpatch[1], wbalpatch[2], wbalpatch[3]);
		}

		/// Step 6: Scale
		if (scale != 1.0f)
			es.scale(scale);

		/// Step 7: Remove vignetting
		if (vm.count("vcal")) {
			if (vm.count("vcorr")) {
				cerr << "Warning: only one of --vcal and --vcorr can be specified at a time. Ignoring --vcorr" << endl;
			}

			if (demosaic)
				es.vcal();
			else
				cerr << "Warning: Vignetting correction requires demosaicing. Ignoring.." << endl;
		} else if (!vcorr.empty()) {
			if (demosaic)
				es.vcorr(vcorr[0], vcorr[1], vcorr[2]);
			else
				cerr << "Warning: Vignetting correction requires demosaicing. Ignoring.." << endl;
		}

		/// Step 8: Crop
		if (!crop.empty())
			es.crop(crop[0], crop[1], crop[2], crop[3]);

		/// Step 9: Resample
		if (!resample.empty()) {
			int w, h;

			if (resample.size() == 1) {
				float factor = resample[0] / (float) std::max(es.width, es.height);
				w = (int) std::round(factor * es.width);
				h = (int) std::round(factor * es.height);
			} else {
				w = resample[0];
				h = resample[1];
			}

			if (demosaic) {
				std::string rfilter = boost::to_lower_copy(vm["rfilter"].as<std::string>());
				if (rfilter == "lanczos") {
					es.resample(LanczosSincFilter(), w, h);
				} else if (rfilter == "tent") {
					es.resample(TentFilter(), w, h);
				} else {
					cout << "Invalid resampling filter chosen (must be 'lanczos' / 'tent')" << endl;
					return -1;
				}
			} else {
				cout << "Warning: resampling a non-demosaiced image does not make much sense -- ignoring." << endl;
			}
		}

		/// Step 10: Flip / rotate
		ERotateFlipType flipType = flipTypeFromString(
			vm["rotate"].as<int>(), vm["flip"].as<std::string>());

		if (flipType != ERotateNoneFlipNone) {
			uint8_t *t_buf;
			size_t t_width, t_height;

			if (demosaic) {
				rotateFlip((uint8_t *) es.image_demosaiced, es.width, es.height,
					t_buf, t_width, t_height, 3*sizeof(float), flipType);
				delete[] es.image_demosaiced;
				es.image_demosaiced = (float3 *) t_buf;
				es.width = t_width;
				es.height = t_height;
			}
		}

		/// Step 11: Write output
		std::string output = vm["output"].as<std::string>();
		std::string format = boost::to_lower_copy(vm["format"].as<std::string>());

		if (vm["output"].defaulted() && exposures.size() == 1 && exposures[0].find("%") == std::string::npos) {
			std::string fname = exposures[0];
			size_t spos = fname.find_last_of(".");
			if (spos != std::string::npos)
				output = fname.substr(0, spos) + ".exr";
		}

		if (format == "jpg")
			format = "jpeg";

		if (format == "jpeg" && boost::ends_with(output,  ".exr"))
			output = output.substr(0, output.length()-4) + ".jpg";

		if (demosaic) {
			if (format == "half" || format == "single")
				writeOpenEXR(output, es.width, es.height, 3,
					(float *) es.image_demosaiced, es.metadata, format == "half");
			else if (format == "jpeg")
				writeJPEG(output, es.width, es.height, (float *) es.image_demosaiced);
			else
				throw std::runtime_error("Unsupported --format argument");
		} else {
			if (format == "half" || format == "single")
				writeOpenEXR(output, es.width, es.height, 1,
					(float *) es.image_merged, es.metadata, format == "half");
			else if (format == "jpeg")
				throw std::runtime_error("Tried to export the raw Bayer grid "
					"as a JPEG image -- this is not allowed.");
			else
				throw std::runtime_error("Unsupported --format argument");
		}
	} catch (const std::exception &ex) {
		cerr << "Encountered a fatal error: " << ex.what() << endl;
		return -1;
	}

	return 0;
}
Exemplo n.º 5
0
void StreamMediaSink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, struct timeval presentationTime, unsigned /*durationInMicroseconds*/)
{
	// We've just received a frame of data.  (Optionally) print out information about it:
#ifdef DEBUG_PRINT_EACH_RECEIVED_FRAME
	if (m_fStreamId != NULL) {
		envir() << "Stream \"" << m_fStreamId << "\"; ";
	}

	envir() << m_fSubsession.mediumName() << "/" << m_fSubsession.codecName() << ":\tReceived " << frameSize << " bytes";

	if (numTruncatedBytes > 0) {
		envir() << " (with " << numTruncatedBytes << " bytes truncated)";
	}

	char uSecsStr[6 + 1]; // used to output the 'microseconds' part of the presentation time
	sprintf(uSecsStr, "%06u", (unsigned) presentationTime.tv_usec);
	envir() << ".\tPresentation time: " << (int) presentationTime.tv_sec << "." << uSecsStr;

	if (m_fSubsession.rtpSource() != NULL && !m_fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) {
		envir() << "!"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized
	}

#ifdef DEBUG_PRINT_NPT
	envir() << "\tNPT: " << m_fSubsession.getNormalPlayTime(presentationTime);
#endif
	envir() << "\n";
#endif

	m_avPacket.size = frameSize + 4;
	m_avPacket.data = m_buffer;
	int gotFrame = 0;
	int len = 0;

	while (m_avPacket.size > 0) {
		len = avcodec_decode_video2(m_avCodecContext, m_avFrame, &gotFrame, &m_avPacket);
		if (len < 0) {
			break;
		}
		if (gotFrame) {
			envir() << "Decoded Frame: " << ++m_idx << " Picture Type: " << av_get_picture_type_char(m_avFrame->pict_type) << " Key Frame: " << m_avFrame->key_frame << "\n";
			envir() << "showFrame: " << showFrame() << "\n";

			SDL_PollEvent(&m_event);
			switch (m_event.type) {
				case SDL_QUIT:
					SDL_Quit();
					exit(0);
					break;
				default:
					break;
			}
#if defined(WRITE_RAW)
			if (m_avFrame->key_frame) {
				writeRaw(m_idx);
			}
#endif
#if defined(WRITE_JPEG)
			//if (m_avFrame->pict_type == AV_PICTURE_TYPE_I) {
			writeJPEG(m_idx);
			//}
#endif
		}
		if (m_avPacket.data) {
			m_avPacket.size -= len;
			m_avPacket.data += len;
		}
	}

	// Then continue, to request the next frame of data:
	continuePlaying();
}