示例#1
0
static int
vips_scale_build( VipsObject *object )
{
	VipsConversion *conversion = VIPS_CONVERSION( object );
	VipsScale *scale = (VipsScale *) object;
	VipsImage **t = (VipsImage **) vips_object_local_array( object, 7 );

	double mx;
	double mn;

	if( VIPS_OBJECT_CLASS( vips_scale_parent_class )->build( object ) )
		return( -1 );

	if( vips_stats( scale->in, &t[0], NULL ) )
		return( -1 );
	mn = *VIPS_MATRIX( t[0], 0, 0 );
	mx = *VIPS_MATRIX( t[0], 1, 0 );

	if( mn == mx ) {
		/* Range of zero: just return black.
		 */
		if( vips_black( &t[1], scale->in->Xsize, scale->in->Ysize, 
			"bands", scale->in->Bands,
			NULL ) ||
			vips_image_write( t[1], conversion->out ) )
			return( -1 );
	}
	else if( scale->log ) { 
		double f = 255.0 / log10( 1.0 + pow( mx, scale->exp ) );

		if( vips_pow_const1( scale->in, &t[2], scale->exp, NULL ) ||
			vips_linear1( t[2], &t[3], 1.0, 1.0, NULL ) ||
			vips_log10( t[3], &t[4], NULL ) ||
			vips_linear1( t[4], &t[5], f, 0.0, 
				"uchar", TRUE, 
				NULL ) ||
			vips_image_write( t[5], conversion->out ) )
			return( -1 );
	}
	else {
		double f = 255.0 / (mx - mn);

		/* Add .5 to get round-to-nearest.
		 */
		double a = -(mn * f) + 0.5;

		if( vips_linear1( scale->in, &t[2], f, a, 
			"uchar", TRUE, 
			NULL ) ||
			vips_image_write( t[2], conversion->out ) )
			return( -1 );
	}

	return( 0 );
}
示例#2
0
文件: point.c 项目: jcupitt/libvips
static int
vips_point_build( VipsObject *object )
{
	VipsCreate *create = VIPS_CREATE( object );
	VipsPoint *point = VIPS_POINT( object );
	VipsPointClass *class = VIPS_POINT_GET_CLASS( point );
	VipsImage **t = (VipsImage **) vips_object_local_array( object, 4 );

	VipsImage *in;

	if( VIPS_OBJECT_CLASS( vips_point_parent_class )->build( object ) )
		return( -1 );

	t[0] = vips_image_new();
	vips_image_init_fields( t[0],
		point->width, point->height, 1,
		VIPS_FORMAT_FLOAT, VIPS_CODING_NONE, class->interpretation,
		1.0, 1.0 );
	vips_image_pipelinev( t[0], 
		VIPS_DEMAND_STYLE_ANY, NULL );
	if( vips_image_generate( t[0], 
		NULL, vips_point_gen, NULL, point, NULL ) )
		return( -1 );
	in = t[0];

	if( point->uchar ) {
		float min = class->min;
		float max = class->max;
		float range = max - min;

		if( vips_linear1( in, &t[2], 
			255.0 / range, -min * 255.0 / range, 
			"uchar", TRUE,
			NULL ) )
			return( -1 );
		in = t[2];

		/* We don't want FOURIER or whatever in this case.
		 */
		in->Type = VIPS_INTERPRETATION_MULTIBAND;
	}
示例#3
0
static int
vips_smartcrop_attention( VipsSmartcrop *smartcrop, 
	VipsImage *in, int *left, int *top )
{
	/* From smartcrop.js.
	 */
	static double skin_vector[] = {-0.78, -0.57, -0.44};
	static double ones[] = {1.0, 1.0, 1.0};

	VipsImage **t = (VipsImage **) 
		vips_object_local_array( VIPS_OBJECT( smartcrop ), 24 );

	double hscale;
	double vscale;
	double sigma;
	double max;
	int x_pos;
	int y_pos;

	/* The size we shrink to gives the precision with which we can place
	 * the crop
	 */
	hscale = 32.0 / in->Xsize;
	vscale = 32.0 / in->Ysize;
	sigma = VIPS_MAX( sqrt( pow( smartcrop->width * hscale, 2 ) +
		pow( smartcrop->height * vscale, 2 ) ) / 10, 1.0 );
	if ( vips_resize( in, &t[17], hscale,
		"vscale", vscale,
		NULL ) )
		return( -1 );

	/* Simple edge detect.
	 */
	if( !(t[21] = vips_image_new_matrixv( 3, 3,
		 0.0, -1.0,  0.0, 
		-1.0,  4.0, -1.0, 
		 0.0, -1.0,  0.0 )) )
		return( -1 );

	/* Convert to XYZ and just use the first three bands.
	 */
	if( vips_colourspace( t[17], &t[0], VIPS_INTERPRETATION_XYZ, NULL ) ||
		vips_extract_band( t[0], &t[1], 0, "n", 3, NULL ) )
		return( -1 );

	/* Edge detect on Y. 
	 */
	if( vips_extract_band( t[1], &t[2], 1, NULL ) ||
		vips_conv( t[2], &t[3], t[21], 
			"precision", VIPS_PRECISION_INTEGER,
			NULL ) ||
		vips_linear1( t[3], &t[4], 5.0, 0.0, NULL ) ||
		vips_abs( t[4], &t[14], NULL ) )
		return( -1 );

	/* Look for skin colours. Taken from smartcrop.js.
	 */
	if( 
		/* Normalise to magnitude of colour in XYZ.
		 */
		pythagoras( smartcrop, t[1], &t[5] ) ||
		vips_divide( t[1], t[5], &t[6], NULL ) ||

		/* Distance from skin point.
		 */
		vips_linear( t[6], &t[7], ones, skin_vector, 3, NULL ) ||
		pythagoras( smartcrop, t[7], &t[8] ) ||

		/* Rescale to 100 - 0 score.
		 */
		vips_linear1( t[8], &t[9], -100.0, 100.0, NULL ) ||

		/* Ignore dark areas.
		 */
		vips_more_const1( t[2], &t[10], 5.0, NULL ) ||
		!(t[11] = vips_image_new_from_image1( t[10], 0.0 )) ||
		vips_ifthenelse( t[10], t[9], t[11], &t[15], NULL ) )
		return( -1 );

	/* Look for saturated areas.
	 */
	if( vips_colourspace( t[1], &t[12], 
		VIPS_INTERPRETATION_LAB, NULL ) ||
		vips_extract_band( t[12], &t[13], 1, NULL ) ||
		vips_ifthenelse( t[10], t[13], t[11], &t[16], NULL ) )
		return( -1 );

	/* Sum, blur and find maxpos.
	 *
	 * The amount of blur is related to the size of the crop
	 * area: how large an area we want to consider for the scoring
	 * function.
	 */

	if( vips_sum( &t[14], &t[18], 3, NULL ) ||
		vips_gaussblur( t[18], &t[19], sigma, NULL ) ||
		vips_max( t[19], &max, "x", &x_pos, "y", &y_pos, NULL ) )
		return( -1 ); 

	/* Centre the crop over the max.
	 */
	*left = VIPS_CLIP( 0, 
		x_pos / hscale - smartcrop->width / 2, 
		in->Xsize - smartcrop->width );
	*top = VIPS_CLIP( 0, 
		y_pos / vscale - smartcrop->height / 2, 
		in->Ysize - smartcrop->height ); 

	return( 0 ); 
}