static int producer_get_frame( mlt_producer producer, mlt_frame_ptr frame, int index ) { // Generate a frame *frame = mlt_frame_init( MLT_PRODUCER_SERVICE( producer ) ); mlt_profile profile = mlt_service_profile( MLT_PRODUCER_SERVICE( producer ) ); if ( *frame != NULL ) { // Obtain properties of frame mlt_properties frame_properties = MLT_FRAME_PROPERTIES( *frame ); // Update time code on the frame mlt_frame_set_position( *frame, mlt_producer_frame( producer ) ); mlt_properties_set_int( frame_properties, "progressive", 1 ); mlt_properties_set_double( frame_properties, "aspect_ratio", mlt_profile_sar( profile ) ); mlt_properties_set_int( frame_properties, "meta.media.width", profile->width ); mlt_properties_set_int( frame_properties, "meta.media.height", profile->height ); // Configure callbacks mlt_frame_push_service( *frame, producer ); mlt_frame_push_get_image( *frame, producer_get_image ); mlt_frame_push_audio( *frame, producer ); mlt_frame_push_audio( *frame, producer_get_audio ); } // Calculate the next time code mlt_producer_prepare_next( producer ); return 0; }
static int get_image_b( mlt_frame b_frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable ) { mlt_transition self = mlt_frame_pop_service( b_frame ); mlt_frame a_frame = mlt_frame_pop_frame( b_frame ); mlt_properties a_props = MLT_FRAME_PROPERTIES( a_frame ); mlt_properties b_props = MLT_FRAME_PROPERTIES( b_frame ); // Set scaling from A frame if not already provided. if ( !mlt_properties_get( b_props, "rescale.interp" ) ) { const char *rescale = mlt_properties_get( a_props, "rescale.interp" ); if ( !rescale || !strcmp( rescale, "none" ) ) rescale = "nearest"; mlt_properties_set( b_props, "rescale.interp", rescale ); } // Ensure sane aspect ratio if ( mlt_frame_get_aspect_ratio( b_frame ) == 0.0 ) mlt_frame_set_aspect_ratio( b_frame, mlt_profile_sar( mlt_service_profile( MLT_TRANSITION_SERVICE(self) ) ) ); mlt_properties_pass_list( b_props, a_props, "consumer_deinterlace, deinterlace_method, consumer_tff" ); return mlt_frame_get_image( b_frame, image, format, width, height, writable ); }
mlt_frame mlt_frame_init( mlt_service service ) { // Allocate a frame mlt_frame this = calloc( sizeof( struct mlt_frame_s ), 1 ); if ( this != NULL ) { mlt_profile profile = mlt_service_profile( service ); // Initialise the properties mlt_properties properties = &this->parent; mlt_properties_init( properties, this ); // Set default properties on the frame mlt_properties_set_position( properties, "_position", 0.0 ); mlt_properties_set_data( properties, "image", NULL, 0, NULL, NULL ); mlt_properties_set_int( properties, "width", profile? profile->width : 720 ); mlt_properties_set_int( properties, "height", profile? profile->height : 576 ); mlt_properties_set_int( properties, "normalised_width", profile? profile->width : 720 ); mlt_properties_set_int( properties, "normalised_height", profile? profile->height : 576 ); mlt_properties_set_double( properties, "aspect_ratio", mlt_profile_sar( NULL ) ); mlt_properties_set_data( properties, "audio", NULL, 0, NULL, NULL ); mlt_properties_set_data( properties, "alpha", NULL, 0, NULL, NULL ); // Construct stacks for frames and methods this->stack_image = mlt_deque_init( ); this->stack_audio = mlt_deque_init( ); this->stack_service = mlt_deque_init( ); } return this; }
static void draw_cross( uint8_t* image, mlt_profile profile, int line_width ) { int x = 0; int y = 0; int i = 0; // Draw a horizontal line i = line_width; while( i-- ) { y = ( profile->height - line_width ) / 2 + i; x = profile->width - 1; while( x-- ) { mix_pixel( image, profile->width, x, y, LINE_PIXEL_VALUE, 1.0 ); } } // Draw a vertical line line_width = lrint((float)line_width * mlt_profile_sar( profile )); i = line_width; while( i-- ) { x = ( profile->width - line_width ) / 2 + i; y = profile->height - 1; while( y-- ) { mix_pixel( image, profile->width, x, y, LINE_PIXEL_VALUE, 1.0 ); } } }
int producer_get_frame( mlt_producer producer, mlt_frame_ptr frame, int index ) { // Generate a frame *frame = mlt_frame_init( MLT_PRODUCER_SERVICE( producer ) ); if ( *frame != NULL ) { // Obtain properties of frame and producer mlt_properties properties = MLT_FRAME_PROPERTIES( *frame ); // Obtain properties of producer mlt_properties producer_props = MLT_PRODUCER_PROPERTIES( producer ); // Set the producer on the frame properties mlt_properties_set_data( properties, "producer_frei0r", producer, 0, NULL, NULL ); // Update timecode on the frame we're creating mlt_frame_set_position( *frame, mlt_producer_position( producer ) ); // Set producer-specific frame properties mlt_properties_set_int( properties, "progressive", 1 ); mlt_profile profile = mlt_service_profile( MLT_PRODUCER_SERVICE( producer ) ); mlt_properties_set_double( properties, "aspect_ratio", mlt_profile_sar( profile ) ); // Push the get_image method mlt_frame_push_get_image( *frame, producer_get_image ); } // Calculate the next timecode mlt_producer_prepare_next( producer ); return 0; }
mlt_producer mlt_producer_new( mlt_profile profile ) { mlt_producer self = malloc( sizeof( struct mlt_producer_s ) ); if ( self ) { if ( mlt_producer_init( self, NULL ) == 0 ) { mlt_properties_set_data( MLT_PRODUCER_PROPERTIES( self ), "_profile", profile, 0, NULL, NULL ); mlt_properties_set_double( MLT_PRODUCER_PROPERTIES( self ), "aspect_ratio", mlt_profile_sar( profile ) ); } } return self; }
static void draw_ring( uint8_t* image, mlt_profile profile, int radius, int line_width ) { float sar = mlt_profile_sar( profile ); int x_center = profile->width / 2; int y_center = profile->height / 2; int max_radius = radius + line_width; int a = max_radius + 1; int b = 0; line_width += 1; // Compensate for aliasing. // Scan through each pixel in one quadrant of the circle. while( a-- ) { b = ( max_radius / sar ) + 1.0; while( b-- ) { // Use Pythagorean theorem to determine the distance from this pixel to the center. float a2 = a*a; float b2 = b*sar*b*sar; float c = sqrtf( a2 + b2 ); float distance = c - radius; if( distance > 0 && distance < line_width ) { // This pixel is within the ring. float mix = 1.0; if( distance < 1.0 ) { // Antialias the outside of the ring mix = distance; } else if( (float)line_width - distance < 1.0 ) { // Antialias the inside of the ring mix = (float)line_width - distance; } // Apply this value to all 4 quadrants of the circle. mix_pixel( image, profile->width, x_center + b, y_center - a, RING_PIXEL_VALUE, mix ); mix_pixel( image, profile->width, x_center - b, y_center - a, RING_PIXEL_VALUE, mix ); mix_pixel( image, profile->width, x_center + b, y_center + a, RING_PIXEL_VALUE, mix ); mix_pixel( image, profile->width, x_center - b, y_center + a, RING_PIXEL_VALUE, mix ); } } } }
static int get_image_a( mlt_frame a_frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable ) { mlt_transition self = mlt_frame_pop_service( a_frame ); mlt_properties a_props = MLT_FRAME_PROPERTIES( a_frame ); // All transitions get scaling const char *rescale = mlt_properties_get( a_props, "rescale.interp" ); if ( !rescale || !strcmp( rescale, "none" ) ) mlt_properties_set( a_props, "rescale.interp", "nearest" ); // Ensure sane aspect ratio if ( mlt_frame_get_aspect_ratio( a_frame ) == 0.0 ) mlt_frame_set_aspect_ratio( a_frame, mlt_profile_sar( mlt_service_profile( MLT_TRANSITION_SERVICE(self) ) ) ); return mlt_frame_get_image( a_frame, image, format, width, height, writable ); }
static void apply_profile_properties( mlt_consumer self, mlt_profile profile, mlt_properties properties ) { mlt_event_block( self->event_listener ); mlt_properties_set_double( properties, "fps", mlt_profile_fps( profile ) ); mlt_properties_set_int( properties, "frame_rate_num", profile->frame_rate_num ); mlt_properties_set_int( properties, "frame_rate_den", profile->frame_rate_den ); mlt_properties_set_int( properties, "width", profile->width ); mlt_properties_set_int( properties, "height", profile->height ); mlt_properties_set_int( properties, "progressive", profile->progressive ); mlt_properties_set_double( properties, "aspect_ratio", mlt_profile_sar( profile ) ); mlt_properties_set_int( properties, "sample_aspect_num", profile->sample_aspect_num ); mlt_properties_set_int( properties, "sample_aspect_den", profile->sample_aspect_den ); mlt_properties_set_double( properties, "display_ratio", mlt_profile_dar( profile ) ); mlt_properties_set_int( properties, "display_aspect_num", profile->display_aspect_num ); mlt_properties_set_int( properties, "display_aspect_num", profile->display_aspect_num ); mlt_properties_set_int( properties, "colorspace", profile->colorspace ); mlt_event_unblock( self->event_listener ); }
mlt_producer producer_colour_init( mlt_profile profile, mlt_service_type type, const char *id, char *colour ) { mlt_producer producer = calloc( 1, sizeof( struct mlt_producer_s ) ); if ( producer != NULL && mlt_producer_init( producer, NULL ) == 0 ) { // Get the properties interface mlt_properties properties = MLT_PRODUCER_PROPERTIES( producer ); // Callback registration producer->get_frame = producer_get_frame; producer->close = ( mlt_destructor )producer_close; // Set the default properties mlt_properties_set( properties, "resource", ( !colour || !strcmp( colour, "" ) ) ? "0x000000ff" : colour ); mlt_properties_set( properties, "_resource", "" ); mlt_properties_set_double( properties, "aspect_ratio", mlt_profile_sar( profile ) ); return producer; } free( producer ); return NULL; }
static mlt_frame filter_process( mlt_filter filter, mlt_frame frame ) { mlt_properties frame_properties = MLT_FRAME_PROPERTIES( frame ); if( mlt_frame_is_test_card( frame ) ) { // The producer does not generate video. This filter will create an // image on the producer's behalf. mlt_profile profile = mlt_service_profile( MLT_PRODUCER_SERVICE( mlt_frame_get_original_producer( frame ) ) ); mlt_properties_set_int( frame_properties, "progressive", 1 ); mlt_properties_set_double( frame_properties, "aspect_ratio", mlt_profile_sar( profile ) ); mlt_properties_set_int( frame_properties, "meta.media.width", profile->width ); mlt_properties_set_int( frame_properties, "meta.media.height", profile->height ); // Tell the framework that there really is an image. mlt_properties_set_int( frame_properties, "test_image", 0 ); // Push a callback to create the image. mlt_frame_push_get_image( frame, create_image ); } mlt_frame_push_service( frame, filter ); mlt_frame_push_get_image( frame, filter_get_image ); return frame; }
static int transition_get_image( mlt_frame a_frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable ) { // Get the b frame from the stack mlt_frame b_frame = mlt_frame_pop_frame( a_frame ); // Get the transition object mlt_transition transition = mlt_frame_pop_service( a_frame ); // Get the properties of the transition mlt_properties properties = MLT_TRANSITION_PROPERTIES( transition ); // Get the properties of the a frame mlt_properties a_props = MLT_FRAME_PROPERTIES( a_frame ); // Get the properties of the b frame mlt_properties b_props = MLT_FRAME_PROPERTIES( b_frame ); // Image, format, width, height and image for the b frame uint8_t *b_image = NULL; mlt_image_format b_format = mlt_image_rgb24a; int b_width; int b_height; // Assign the current position mlt_position position = mlt_transition_get_position( transition, a_frame ); int mirror = mlt_properties_get_position( properties, "mirror" ); int length = mlt_transition_get_length( transition ); if ( mlt_properties_get_int( properties, "always_active" ) ) { mlt_properties props = mlt_properties_get_data( b_props, "_producer", NULL ); mlt_position in = mlt_properties_get_int( props, "in" ); mlt_position out = mlt_properties_get_int( props, "out" ); length = out - in + 1; } // Obtain the normalised width and height from the a_frame mlt_profile profile = mlt_service_profile( MLT_TRANSITION_SERVICE( transition ) ); int normalised_width = profile->width; int normalised_height = profile->height; double consumer_ar = mlt_profile_sar( mlt_service_profile( MLT_TRANSITION_SERVICE(transition) ) ); // Structures for geometry struct mlt_geometry_item_s result; if ( mirror && position > length / 2 ) position = abs( position - length ); // Fetch the a frame image *format = mlt_image_rgb24a; mlt_frame_get_image( a_frame, image, format, width, height, 1 ); // Calculate the region now mlt_service_lock( MLT_TRANSITION_SERVICE( transition ) ); composite_calculate( transition, &result, normalised_width, normalised_height, ( float )position ); mlt_service_unlock( MLT_TRANSITION_SERVICE( transition ) ); // Fetch the b frame image result.w = ( result.w * *width / normalised_width ); result.h = ( result.h * *height / normalised_height ); result.x = ( result.x * *width / normalised_width ); result.y = ( result.y * *height / normalised_height ); // Request full resolution of b frame image. b_width = mlt_properties_get_int( b_props, "meta.media.width" ); b_height = mlt_properties_get_int( b_props, "meta.media.height" ); mlt_properties_set_int( b_props, "rescale_width", b_width ); mlt_properties_set_int( b_props, "rescale_height", b_height ); // Suppress padding and aspect normalization. char *interps = mlt_properties_get( a_props, "rescale.interp" ); if ( interps ) interps = strdup( interps ); mlt_properties_set( b_props, "rescale.interp", "none" ); // This is not a field-aware transform. mlt_properties_set_int( b_props, "consumer_deinterlace", 1 ); mlt_frame_get_image( b_frame, &b_image, &b_format, &b_width, &b_height, 0 ); // Check that both images are of the correct format and process if ( *format == mlt_image_rgb24a && b_format == mlt_image_rgb24a ) { float x, y; float dx, dy; float dz; float sw, sh; uint8_t *p = *image; // Get values from the transition float scale_x = mlt_properties_get_double( properties, "scale_x" ); float scale_y = mlt_properties_get_double( properties, "scale_y" ); int scale = mlt_properties_get_int( properties, "scale" ); int b_alpha = mlt_properties_get_int( properties, "b_alpha" ); float geom_scale_x = (float) b_width / result.w; float geom_scale_y = (float) b_height / result.h; float cx = result.x + result.w / 2.0; float cy = result.y + result.h / 2.0; float lower_x = - cx; float lower_y = - cy; float x_offset = (float) b_width / 2.0; float y_offset = (float) b_height / 2.0; affine_t affine; interpp interp = interpBL_b32; int i, j; // loop counters affine_init( affine.matrix ); // Compute the affine transform get_affine( &affine, transition, ( float )position ); dz = MapZ( affine.matrix, 0, 0 ); if ( ( int )abs( dz * 1000 ) < 25 ) { if ( interps ) free( interps ); return 0; } // Factor scaling into the transformation based on output resolution. if ( mlt_properties_get_int( properties, "distort" ) ) { scale_x = geom_scale_x * ( scale_x == 0 ? 1 : scale_x ); scale_y = geom_scale_y * ( scale_y == 0 ? 1 : scale_y ); } else { // Determine scale with respect to aspect ratio. double consumer_dar = consumer_ar * normalised_width / normalised_height; double b_ar = mlt_properties_get_double( b_props, "aspect_ratio" ); double b_dar = b_ar * b_width / b_height; if ( b_dar > consumer_dar ) { scale_x = geom_scale_x * ( scale_x == 0 ? 1 : scale_x ); scale_y = geom_scale_x * ( scale_y == 0 ? 1 : scale_y ); } else { scale_x = geom_scale_y * ( scale_x == 0 ? 1 : scale_x ); scale_y = geom_scale_y * ( scale_y == 0 ? 1 : scale_y ); } scale_x *= consumer_ar / b_ar; } if ( scale ) { affine_max_output( affine.matrix, &sw, &sh, dz, *width, *height ); affine_scale( affine.matrix, sw * MIN( geom_scale_x, geom_scale_y ), sh * MIN( geom_scale_x, geom_scale_y ) ); } else if ( scale_x != 0 && scale_y != 0 ) { affine_scale( affine.matrix, scale_x, scale_y ); } // Set the interpolation function if ( interps == NULL || strcmp( interps, "nearest" ) == 0 || strcmp( interps, "neighbor" ) == 0 ) interp = interpNN_b32; else if ( strcmp( interps, "tiles" ) == 0 || strcmp( interps, "fast_bilinear" ) == 0 ) interp = interpNN_b32; else if ( strcmp( interps, "bilinear" ) == 0 ) interp = interpBL_b32; else if ( strcmp( interps, "bicubic" ) == 0 ) interp = interpBC_b32; // TODO: lanczos 8x8 else if ( strcmp( interps, "hyper" ) == 0 || strcmp( interps, "sinc" ) == 0 || strcmp( interps, "lanczos" ) == 0 ) interp = interpBC_b32; else if ( strcmp( interps, "spline" ) == 0 ) // TODO: spline 4x4 or 6x6 interp = interpBC_b32; // Do the transform with interpolation for ( i = 0, y = lower_y; i < *height; i++, y++ ) { for ( j = 0, x = lower_x; j < *width; j++, x++ ) { dx = MapX( affine.matrix, x, y ) / dz + x_offset; dy = MapY( affine.matrix, x, y ) / dz + y_offset; if ( dx >= 0 && dx < (b_width - 1) && dy >=0 && dy < (b_height - 1) ) interp( b_image, b_width, b_height, dx, dy, result.mix/100.0, p, b_alpha ); p += 4; } } } if ( interps ) free( interps ); return 0; }
static int filter_get_image( mlt_frame frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable ) { int error = 0; // Get the properties from the frame mlt_properties properties = MLT_FRAME_PROPERTIES( frame ); // Pop the top of stack now mlt_filter filter = mlt_frame_pop_service( frame ); mlt_profile profile = mlt_service_profile( MLT_FILTER_SERVICE( filter ) ); // Retrieve the aspect ratio double aspect_ratio = mlt_deque_pop_back_double( MLT_FRAME_IMAGE_STACK( frame ) ); double consumer_aspect = mlt_profile_sar( mlt_service_profile( MLT_FILTER_SERVICE( filter ) ) ); // Correct Width/height if necessary if ( *width == 0 || *height == 0 ) { *width = profile->width; *height = profile->height; } // Assign requested width/height from our subordinate int owidth = *width; int oheight = *height; // Check for the special case - no aspect ratio means no problem :-) if ( aspect_ratio == 0.0 ) aspect_ratio = consumer_aspect; // Reset the aspect ratio mlt_properties_set_double( properties, "aspect_ratio", aspect_ratio ); // XXX: This is a hack, but it forces the force_full_luma to apply by doing a RGB // conversion because range scaling only occurs on YUV->RGB. And we do it here, // after the deinterlace filter, which only operates in YUV to avoid a YUV->RGB->YUV->?. // Instead, it will go YUV->RGB->?. if ( mlt_properties_get_int( properties, "force_full_luma" ) ) *format = mlt_image_rgb24a; // Hmmm... char *rescale = mlt_properties_get( properties, "rescale.interp" ); if ( rescale != NULL && !strcmp( rescale, "none" ) ) return mlt_frame_get_image( frame, image, format, width, height, writable ); if ( mlt_properties_get_int( properties, "distort" ) == 0 ) { // Normalise the input and out display aspect int normalised_width = profile->width; int normalised_height = profile->height; int real_width = mlt_properties_get_int( properties, "meta.media.width" ); int real_height = mlt_properties_get_int( properties, "meta.media.height" ); if ( real_width == 0 ) real_width = mlt_properties_get_int( properties, "width" ); if ( real_height == 0 ) real_height = mlt_properties_get_int( properties, "height" ); double input_ar = aspect_ratio * real_width / real_height; double output_ar = consumer_aspect * owidth / oheight; // fprintf( stderr, "real %dx%d normalised %dx%d output %dx%d sar %f in-dar %f out-dar %f\n", // real_width, real_height, normalised_width, normalised_height, owidth, oheight, aspect_ratio, input_ar, output_ar); // Optimised for the input_ar > output_ar case (e.g. widescreen on standard) int scaled_width = rint( ( input_ar * normalised_width ) / output_ar ); int scaled_height = normalised_height; // Now ensure that our images fit in the output frame if ( scaled_width > normalised_width ) { scaled_width = normalised_width; scaled_height = rint( ( output_ar * normalised_height ) / input_ar ); } // Now calculate the actual image size that we want owidth = rint( scaled_width * owidth / normalised_width ); oheight = rint( scaled_height * oheight / normalised_height ); // Tell frame we have conformed the aspect to the consumer mlt_frame_set_aspect_ratio( frame, consumer_aspect ); } mlt_properties_set_int( properties, "distort", 0 ); // Now pass on the calculations down the line mlt_properties_set_int( properties, "resize_width", *width ); mlt_properties_set_int( properties, "resize_height", *height ); // If there will be padding, then we need packed image format. if ( *format == mlt_image_yuv420p ) { int iwidth = mlt_properties_get_int( properties, "width" ); int iheight = mlt_properties_get_int( properties, "height" ); if ( iwidth < owidth || iheight < oheight ) *format = mlt_image_yuv422; } // Now get the image if ( *format == mlt_image_yuv422 ) owidth -= owidth % 2; error = mlt_frame_get_image( frame, image, format, &owidth, &oheight, writable ); if ( error == 0 && *image ) { int bpp; mlt_image_format_size( *format, owidth, oheight, &bpp ); *image = frame_resize_image( frame, *width, *height, bpp ); } return error; }
static void draw_clock( uint8_t* image, mlt_profile profile, int angle, int line_width ) { float sar = mlt_profile_sar( profile ); int q = 0; int x_center = profile->width / 2; int y_center = profile->height / 2; line_width += 1; // Compensate for aliasing. // Look at each quadrant of the frame to see what should be done. for( q = 1; q <= 4; q++ ) { int max_angle = q * 90; int x_sign = ( q == 1 || q == 2 ) ? 1 : -1; int y_sign = ( q == 1 || q == 4 ) ? 1 : -1; int x_start = x_center * x_sign; int y_start = y_center * y_sign; // Compensate for rounding error of even lengths // (there is no "middle" pixel so everything is offset). if( x_sign == 1 && profile->width % 2 == 0 ) x_start--; if( y_sign == -1 && profile->height % 2 == 0 ) y_start++; if( angle >= max_angle ) { // This quadrant is completely behind the clock hand. Fill it in. int dx = x_start + x_sign; while( dx ) { dx -= x_sign; int dy = y_start + y_sign; while( dy ) { dy -= y_sign; mix_pixel( image, profile->width, x_center + dx, y_center - dy, CLOCK_PIXEL_VALUE, 1.0 ); } } } else if ( max_angle - angle < 90 ) { // This quadrant is partially filled // Calculate a point (vx,vy) that lies on the line created by the angle from 0,0. int vx = 0; int vy = y_start; float lv = 0; // Assume maximum y and calculate the corresponding x value // for a point at the other end of this line. if( x_sign * y_sign == 1 ) { vx = x_sign * sar * y_center / tan( ( max_angle - angle ) * M_PI / 180.0 ); } else { vx = x_sign * sar * y_center * tan( ( max_angle - angle ) * M_PI / 180.0 ); } // Calculate the length of the line defined by vx,vy lv = sqrtf((float)(vx*vx)*sar*sar + (float)vy*vy); // Scan through each pixel in the quadrant counting up/down to 0,0. int dx = x_start + x_sign; while( dx ) { dx -= x_sign; int dy = y_start + y_sign; while( dy ) { dy -= y_sign; // Calculate the cross product to determine which side of // the line this pixel lies on. int xp = vx * (vy - dy) - vy * (vx - dx); xp = xp * -1; // Easier to work with positive. Positive number means "behind" the line. if( xp > 0 ) { // This pixel is behind the clock hand and should be filled in. // Calculate the distance from the pixel to the line to determine // if it is part of the clock hand. float distance = (float)xp / lv; int val = CLOCK_PIXEL_VALUE; float mix = 1.0; if( distance < line_width ) { // This pixel makes up the clock hand. val = LINE_PIXEL_VALUE; if( distance < 1.0 ) { // Antialias the outside of the clock hand mix = distance; } else if( (float)line_width - distance < 1.0 ) { // Antialias the inside of the clock hand mix_pixel( image, profile->width, x_center + dx, y_center - dy, CLOCK_PIXEL_VALUE, 1.0 ); mix = (float)line_width - distance; } } mix_pixel( image, profile->width, x_center + dx, y_center - dy, val, mix ); } } } } } }
static void mlt_consumer_property_changed( mlt_properties owner, mlt_consumer self, char *name ) { if ( !strcmp( name, "mlt_profile" ) ) { // Get the properies mlt_properties properties = MLT_CONSUMER_PROPERTIES( self ); // Get the current profile mlt_profile profile = mlt_service_profile( MLT_CONSUMER_SERVICE( self ) ); // Load the new profile mlt_profile new_profile = mlt_profile_init( mlt_properties_get( properties, name ) ); if ( new_profile ) { // Copy the profile if ( profile != NULL ) { free( profile->description ); memcpy( profile, new_profile, sizeof( struct mlt_profile_s ) ); profile->description = strdup( new_profile->description ); } else { profile = new_profile; } // Apply to properties apply_profile_properties( self, profile, properties ); mlt_profile_close( new_profile ); } } else if ( !strcmp( name, "frame_rate_num" ) ) { mlt_properties properties = MLT_CONSUMER_PROPERTIES( self ); mlt_profile profile = mlt_service_profile( MLT_CONSUMER_SERVICE( self ) ); if ( profile ) { profile->frame_rate_num = mlt_properties_get_int( properties, "frame_rate_num" ); mlt_properties_set_double( properties, "fps", mlt_profile_fps( profile ) ); } } else if ( !strcmp( name, "frame_rate_den" ) ) { mlt_properties properties = MLT_CONSUMER_PROPERTIES( self ); mlt_profile profile = mlt_service_profile( MLT_CONSUMER_SERVICE( self ) ); if ( profile ) { profile->frame_rate_den = mlt_properties_get_int( properties, "frame_rate_den" ); mlt_properties_set_double( properties, "fps", mlt_profile_fps( profile ) ); } } else if ( !strcmp( name, "width" ) ) { mlt_properties properties = MLT_CONSUMER_PROPERTIES( self ); mlt_profile profile = mlt_service_profile( MLT_CONSUMER_SERVICE( self ) ); if ( profile ) profile->width = mlt_properties_get_int( properties, "width" ); } else if ( !strcmp( name, "height" ) ) { mlt_properties properties = MLT_CONSUMER_PROPERTIES( self ); mlt_profile profile = mlt_service_profile( MLT_CONSUMER_SERVICE( self ) ); if ( profile ) profile->height = mlt_properties_get_int( properties, "height" ); } else if ( !strcmp( name, "progressive" ) ) { mlt_properties properties = MLT_CONSUMER_PROPERTIES( self ); mlt_profile profile = mlt_service_profile( MLT_CONSUMER_SERVICE( self ) ); if ( profile ) profile->progressive = mlt_properties_get_int( properties, "progressive" ); } else if ( !strcmp( name, "sample_aspect_num" ) ) { mlt_properties properties = MLT_CONSUMER_PROPERTIES( self ); mlt_profile profile = mlt_service_profile( MLT_CONSUMER_SERVICE( self ) ); if ( profile ) { profile->sample_aspect_num = mlt_properties_get_int( properties, "sample_aspect_num" ); mlt_properties_set_double( properties, "aspect_ratio", mlt_profile_sar( profile ) ); } } else if ( !strcmp( name, "sample_aspect_den" ) ) { mlt_properties properties = MLT_CONSUMER_PROPERTIES( self ); mlt_profile profile = mlt_service_profile( MLT_CONSUMER_SERVICE( self ) ); if ( profile ) { profile->sample_aspect_den = mlt_properties_get_int( properties, "sample_aspect_den" ); mlt_properties_set_double( properties, "aspect_ratio", mlt_profile_sar( profile ) ); } } else if ( !strcmp( name, "display_aspect_num" ) ) { mlt_properties properties = MLT_CONSUMER_PROPERTIES( self ); mlt_profile profile = mlt_service_profile( MLT_CONSUMER_SERVICE( self ) ); if ( profile ) { profile->display_aspect_num = mlt_properties_get_int( properties, "display_aspect_num" ); mlt_properties_set_double( properties, "display_ratio", mlt_profile_dar( profile ) ); } } else if ( !strcmp( name, "display_aspect_den" ) ) { mlt_properties properties = MLT_CONSUMER_PROPERTIES( self ); mlt_profile profile = mlt_service_profile( MLT_CONSUMER_SERVICE( self ) ); if ( profile ) { profile->display_aspect_den = mlt_properties_get_int( properties, "display_aspect_den" ); mlt_properties_set_double( properties, "display_ratio", mlt_profile_dar( profile ) ); } } else if ( !strcmp( name, "colorspace" ) ) { mlt_properties properties = MLT_CONSUMER_PROPERTIES( self ); mlt_profile profile = mlt_service_profile( MLT_CONSUMER_SERVICE( self ) ); if ( profile ) profile->colorspace = mlt_properties_get_int( properties, "colorspace" ); } }
static int filter_get_image( mlt_frame frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable ) { // Get the filter mlt_filter filter = mlt_frame_pop_service( frame ); // Get the properties mlt_properties properties = MLT_FILTER_PROPERTIES( filter ); // Get the image int error = 0; *format = mlt_image_rgb24a; // Only process if we have no error and a valid colour space if ( error == 0 ) { mlt_service_lock( MLT_FILTER_SERVICE( filter ) ); mlt_producer producer = mlt_properties_get_data( properties, "producer", NULL ); mlt_transition transition = mlt_properties_get_data( properties, "transition", NULL ); mlt_frame a_frame = NULL; mlt_profile profile = mlt_service_profile( MLT_FILTER_SERVICE( filter ) ); if ( producer == NULL ) { char *background = mlt_properties_get( properties, "background" ); producer = mlt_factory_producer( profile, NULL, background ); mlt_properties_set_data( properties, "producer", producer, 0, (mlt_destructor)mlt_producer_close, NULL ); } if ( transition == NULL ) { transition = mlt_factory_transition( profile, "qtblend", NULL ); mlt_properties_set_data( properties, "transition", transition, 0, (mlt_destructor)mlt_transition_close, NULL ); if ( transition ) mlt_properties_set_int( MLT_TRANSITION_PROPERTIES( transition ), "b_alpha", 1 ); } if ( producer != NULL && transition != NULL ) { mlt_position position = mlt_filter_get_position( filter, frame ); mlt_properties frame_properties = MLT_FRAME_PROPERTIES( frame ); mlt_position in = mlt_filter_get_in( filter ); mlt_position out = mlt_filter_get_out( filter ); double consumer_ar = mlt_profile_sar( profile ); mlt_transition_set_in_and_out( transition, in, out ); if ( out > 0 ) { mlt_properties_set_position( MLT_PRODUCER_PROPERTIES( producer ), "length", out - in + 1 ); mlt_producer_set_in_and_out( producer, in, out ); } mlt_producer_seek( producer, in + position ); mlt_frame_set_position( frame, position ); mlt_properties_pass( MLT_PRODUCER_PROPERTIES( producer ), properties, "producer." ); mlt_properties_pass( MLT_TRANSITION_PROPERTIES( transition ), properties, "transition." ); mlt_service_get_frame( MLT_PRODUCER_SERVICE( producer ), &a_frame, 0 ); mlt_frame_set_position( a_frame, in + position ); // Set the rescale interpolation to match the frame mlt_properties_set( MLT_FRAME_PROPERTIES( a_frame ), "rescale.interp", mlt_properties_get( frame_properties, "rescale.interp" ) ); // Special case - aspect_ratio = 0 if ( mlt_frame_get_aspect_ratio( frame ) == 0 ) mlt_frame_set_aspect_ratio( frame, consumer_ar ); if ( mlt_frame_get_aspect_ratio( a_frame ) == 0 ) mlt_frame_set_aspect_ratio( a_frame, consumer_ar ); // Add the qtblend transition onto the frame stack mlt_service_unlock( MLT_FILTER_SERVICE( filter ) ); mlt_transition_process( transition, a_frame, frame ); if ( mlt_properties_get_int( properties, "use_normalised" ) ) { // Use the normalised width & height mlt_profile profile = mlt_service_profile( MLT_FILTER_SERVICE( filter ) ); *width = profile->width; *height = profile->height; } mlt_frame_get_image( a_frame, image, format, width, height, writable ); mlt_properties_set_data( frame_properties, "affine_frame", a_frame, 0, (mlt_destructor)mlt_frame_close, NULL ); mlt_frame_set_image( frame, *image, *width * *height * 4, NULL ); //mlt_frame_set_alpha( frame, mlt_frame_get_alpha_mask( a_frame ), *width * *height, NULL ); } else { mlt_service_unlock( MLT_FILTER_SERVICE( filter ) ); } } return error; }
static mlt_frame filter_process( mlt_filter filter, mlt_frame frame ) { if ( mlt_properties_get_int( MLT_FILTER_PROPERTIES( filter ), "active" ) ) { // Push the get_image method on to the stack mlt_frame_push_service( frame, mlt_service_profile( MLT_FILTER_SERVICE( filter ) ) ); mlt_frame_push_get_image( frame, filter_get_image ); } else { mlt_properties filter_props = MLT_FILTER_PROPERTIES( filter ); mlt_properties frame_props = MLT_FRAME_PROPERTIES( frame ); int left = mlt_properties_get_int( filter_props, "left" ); int right = mlt_properties_get_int( filter_props, "right" ); int top = mlt_properties_get_int( filter_props, "top" ); int bottom = mlt_properties_get_int( filter_props, "bottom" ); int width = mlt_properties_get_int( frame_props, "meta.media.width" ); int height = mlt_properties_get_int( frame_props, "meta.media.height" ); int use_profile = mlt_properties_get_int( filter_props, "use_profile" ); mlt_profile profile = mlt_service_profile( MLT_FILTER_SERVICE( filter ) ); if ( use_profile ) { top = top * height / profile->height; bottom = bottom * height / profile->height; left = left * width / profile->width; right = right * width / profile->width; } if ( mlt_properties_get_int( filter_props, "center" ) ) { double aspect_ratio = mlt_frame_get_aspect_ratio( frame ); if ( aspect_ratio == 0.0 ) aspect_ratio = mlt_profile_sar( profile ); double input_ar = aspect_ratio * width / height; double output_ar = mlt_profile_dar( mlt_service_profile( MLT_FILTER_SERVICE(filter) ) ); int bias = mlt_properties_get_int( filter_props, "center_bias" ); if ( input_ar > output_ar ) { left = right = ( width - rint( output_ar * height / aspect_ratio ) ) / 2; if ( abs(bias) > left ) bias = bias < 0 ? -left : left; else if ( use_profile ) bias = bias * width / profile->width; left -= bias; right += bias; } else { top = bottom = ( height - rint( aspect_ratio * width / output_ar ) ) / 2; if ( abs(bias) > top ) bias = bias < 0 ? -top : top; else if ( use_profile ) bias = bias * height / profile->height; top -= bias; bottom += bias; } } // Coerce the output to an even width because subsampled YUV with odd widths is too // risky for downstream processing to handle correctly. left += ( width - left - right ) & 1; if ( width - left - right < 8 ) left = right = 0; if ( height - top - bottom < 8 ) top = bottom = 0; mlt_properties_set_int( frame_props, "crop.left", left ); mlt_properties_set_int( frame_props, "crop.right", right ); mlt_properties_set_int( frame_props, "crop.top", top ); mlt_properties_set_int( frame_props, "crop.bottom", bottom ); mlt_properties_set_int( frame_props, "crop.original_width", width ); mlt_properties_set_int( frame_props, "crop.original_height", height ); mlt_properties_set_int( frame_props, "meta.media.width", width - left - right ); mlt_properties_set_int( frame_props, "meta.media.height", height - top - bottom ); } return frame; }
mlt_frame getFrame() { struct timeval now; struct timespec tm; double fps = mlt_producer_get_fps( getProducer() ); mlt_position position = mlt_producer_position( getProducer() ); mlt_frame frame = mlt_cache_get_frame( m_cache, position ); // Allow the buffer to fill to the requested initial buffer level. if ( m_isBuffering ) { int prefill = mlt_properties_get_int( MLT_PRODUCER_PROPERTIES( getProducer() ), "prefill" ); int buffer = mlt_properties_get_int( MLT_PRODUCER_PROPERTIES( getProducer() ), "buffer" ); m_isBuffering = false; prefill = prefill > buffer ? buffer : prefill; pthread_mutex_lock( &m_mutex ); while ( mlt_deque_count( m_queue ) < prefill ) { // Wait up to buffer/fps seconds gettimeofday( &now, NULL ); long usec = now.tv_sec * 1000000 + now.tv_usec; usec += 1000000 * buffer / fps; tm.tv_sec = usec / 1000000; tm.tv_nsec = (usec % 1000000) * 1000; if ( pthread_cond_timedwait( &m_condition, &m_mutex, &tm ) ) break; } pthread_mutex_unlock( &m_mutex ); } if ( !frame ) { // Wait if queue is empty pthread_mutex_lock( &m_mutex ); while ( mlt_deque_count( m_queue ) < 1 ) { // Wait up to twice frame duration gettimeofday( &now, NULL ); long usec = now.tv_sec * 1000000 + now.tv_usec; usec += 2000000 / fps; tm.tv_sec = usec / 1000000; tm.tv_nsec = (usec % 1000000) * 1000; if ( pthread_cond_timedwait( &m_condition, &m_mutex, &tm ) ) // Stop waiting if error (timed out) break; } frame = ( mlt_frame ) mlt_deque_pop_front( m_queue ); pthread_mutex_unlock( &m_mutex ); // add to cache if ( frame ) { mlt_frame_set_position( frame, position ); mlt_cache_put_frame( m_cache, frame ); } } // Set frame timestamp and properties if ( frame ) { mlt_profile profile = mlt_service_profile( MLT_PRODUCER_SERVICE( getProducer() ) ); mlt_properties properties = MLT_FRAME_PROPERTIES( frame ); mlt_properties_set_int( properties, "progressive", profile->progressive ); mlt_properties_set_int( properties, "meta.media.progressive", profile->progressive ); mlt_properties_set_int( properties, "top_field_first", m_topFieldFirst ); mlt_properties_set_double( properties, "aspect_ratio", mlt_profile_sar( profile ) ); mlt_properties_set_int( properties, "meta.media.sample_aspect_num", profile->sample_aspect_num ); mlt_properties_set_int( properties, "meta.media.sample_aspect_den", profile->sample_aspect_den ); mlt_properties_set_int( properties, "meta.media.frame_rate_num", profile->frame_rate_num ); mlt_properties_set_int( properties, "meta.media.frame_rate_den", profile->frame_rate_den ); mlt_properties_set_int( properties, "width", profile->width ); mlt_properties_set_int( properties, "meta.media.width", profile->width ); mlt_properties_set_int( properties, "height", profile->height ); mlt_properties_set_int( properties, "meta.media.height", profile->height ); mlt_properties_set_int( properties, "format", mlt_image_yuv422 ); mlt_properties_set_int( properties, "colorspace", m_colorspace ); mlt_properties_set_int( properties, "meta.media.colorspace", m_colorspace ); mlt_properties_set_int( properties, "audio_frequency", 48000 ); mlt_properties_set_int( properties, "audio_channels", mlt_properties_get_int( MLT_PRODUCER_PROPERTIES( getProducer() ), "channels" ) ); } else mlt_log_warning( getProducer(), "buffer underrun\n" ); return frame; }
static int filter_get_image( mlt_frame frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable ) { // Error we will return int error = 0; // Get the watermark filter object mlt_filter this = mlt_frame_pop_service( frame ); // Get the properties of the filter mlt_properties properties = MLT_FILTER_PROPERTIES( this ); mlt_service_lock( MLT_FILTER_SERVICE( this ) ); // Get the producer from the filter mlt_producer producer = mlt_properties_get_data( properties, "producer", NULL ); // Get the composite from the filter mlt_transition composite = mlt_properties_get_data( properties, "composite", NULL ); // Get the resource to use char *resource = mlt_properties_get( properties, "resource" ); // Get the old resource char *old_resource = mlt_properties_get( properties, "_old_resource" ); // Create a composite if we don't have one if ( composite == NULL ) { // Create composite via the factory mlt_profile profile = mlt_service_profile( MLT_FILTER_SERVICE( this ) ); composite = mlt_factory_transition( profile, "composite", NULL ); // Register the composite for reuse/destruction if ( composite != NULL ) mlt_properties_set_data( properties, "composite", composite, 0, ( mlt_destructor )mlt_transition_close, NULL ); } // If we have one if ( composite != NULL ) { // Get the properties mlt_properties composite_properties = MLT_TRANSITION_PROPERTIES( composite ); // Pass all the composite. properties on the filter down mlt_properties_pass( composite_properties, properties, "composite." ); if ( mlt_properties_get( properties, "composite.out" ) == NULL ) mlt_properties_set_int( composite_properties, "out", mlt_properties_get_int( properties, "_out" ) ); // Force a refresh mlt_properties_set_int( composite_properties, "refresh", 1 ); } // Create a producer if don't have one if ( producer == NULL || ( old_resource != NULL && strcmp( resource, old_resource ) ) ) { // Get the factory producer service char *factory = mlt_properties_get( properties, "factory" ); // Create the producer mlt_profile profile = mlt_service_profile( MLT_FILTER_SERVICE( this ) ); producer = mlt_factory_producer( profile, factory, resource ); // If we have one if ( producer != NULL ) { // Register the producer for reuse/destruction mlt_properties_set_data( properties, "producer", producer, 0, ( mlt_destructor )mlt_producer_close, NULL ); // Ensure that we loop mlt_properties_set( MLT_PRODUCER_PROPERTIES( producer ), "eof", "loop" ); // Set the old resource mlt_properties_set( properties, "_old_resource", resource ); } } if ( producer != NULL ) { // Get the producer properties mlt_properties producer_properties = MLT_PRODUCER_PROPERTIES( producer ); // Now pass all producer. properties on the filter down mlt_properties_pass( producer_properties, properties, "producer." ); } mlt_service_unlock( MLT_FILTER_SERVICE( this ) ); // Only continue if we have both producer and composite if ( composite != NULL && producer != NULL ) { // Get the service of the producer mlt_service service = MLT_PRODUCER_SERVICE( producer ); // We will get the 'b frame' from the producer mlt_frame b_frame = NULL; // Get the original producer position mlt_position position = mlt_filter_get_position( this, frame ); // Make sure the producer is in the correct position mlt_producer_seek( producer, position ); // Resetting position to appease the composite transition mlt_frame_set_position( frame, position ); // Get the b frame and process with composite if successful if ( mlt_service_get_frame( service, &b_frame, 0 ) == 0 ) { // Get the a and b frame properties mlt_properties a_props = MLT_FRAME_PROPERTIES( frame ); mlt_properties b_props = MLT_FRAME_PROPERTIES( b_frame ); mlt_profile profile = mlt_service_profile( service ); // Set the b frame to be in the same position and have same consumer requirements mlt_frame_set_position( b_frame, position ); mlt_properties_set_int( b_props, "consumer_deinterlace", mlt_properties_get_int( a_props, "consumer_deinterlace" ) || mlt_properties_get_int( properties, "deinterlace" ) ); // Check for the special case - no aspect ratio means no problem :-) if ( mlt_frame_get_aspect_ratio( b_frame ) == 0 ) mlt_frame_set_aspect_ratio( b_frame, mlt_profile_sar( profile ) ); if ( mlt_frame_get_aspect_ratio( frame ) == 0 ) mlt_frame_set_aspect_ratio( frame, mlt_profile_sar( profile ) ); if ( mlt_properties_get_int( properties, "distort" ) ) { mlt_properties_set_int( MLT_TRANSITION_PROPERTIES( composite ), "distort", 1 ); mlt_properties_set_int( a_props, "distort", 1 ); mlt_properties_set_int( b_props, "distort", 1 ); } *format = mlt_image_yuv422; if ( mlt_properties_get_int( properties, "reverse" ) == 0 ) { // Apply all filters that are attached to this filter to the b frame mlt_service_apply_filters( MLT_FILTER_SERVICE( this ), b_frame, 0 ); // Process the frame mlt_transition_process( composite, frame, b_frame ); // Get the image error = mlt_frame_get_image( frame, image, format, width, height, 1 ); } else { char temp[ 132 ]; int count = 0; uint8_t *alpha = NULL; const char *rescale = mlt_properties_get( a_props, "rescale.interp" ); if ( rescale == NULL || !strcmp( rescale, "none" ) ) rescale = "hyper"; mlt_transition_process( composite, b_frame, frame ); mlt_properties_set_int( a_props, "consumer_deinterlace", 1 ); mlt_properties_set_int( b_props, "consumer_deinterlace", 1 ); mlt_properties_set( a_props, "rescale.interp", rescale ); mlt_properties_set( b_props, "rescale.interp", rescale ); mlt_service_apply_filters( MLT_FILTER_SERVICE( this ), b_frame, 0 ); error = mlt_frame_get_image( b_frame, image, format, width, height, 1 ); alpha = mlt_frame_get_alpha_mask( b_frame ); mlt_frame_set_image( frame, *image, *width * *height * 2, NULL ); mlt_frame_set_alpha( frame, alpha, *width * *height, NULL ); mlt_properties_set_int( a_props, "width", *width ); mlt_properties_set_int( a_props, "height", *height ); mlt_properties_set_int( a_props, "progressive", 1 ); mlt_properties_inc_ref( b_props ); strcpy( temp, "_b_frame" ); while( mlt_properties_get_data( a_props, temp, NULL ) != NULL ) sprintf( temp, "_b_frame%d", count ++ ); mlt_properties_set_data( a_props, temp, b_frame, 0, ( mlt_destructor )mlt_frame_close, NULL ); } } // Close the b frame mlt_frame_close( b_frame ); } else { // Get the image from the frame without running fx error = mlt_frame_get_image( frame, image, format, width, height, 1 ); } return error; }
static int transition_get_image( mlt_frame a_frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable ) { // Get the b frame from the stack mlt_frame b_frame = mlt_frame_pop_frame( a_frame ); // Get the transition object mlt_transition transition = mlt_frame_pop_service( a_frame ); // Get the properties of the transition mlt_properties properties = MLT_TRANSITION_PROPERTIES( transition ); // Get the properties of the a frame mlt_properties a_props = MLT_FRAME_PROPERTIES( a_frame ); // Get the properties of the b frame mlt_properties b_props = MLT_FRAME_PROPERTIES( b_frame ); // Image, format, width, height and image for the b frame uint8_t *b_image = NULL; mlt_image_format b_format = mlt_image_rgb24a; int b_width = mlt_properties_get_int( b_props, "meta.media.width" ); int b_height = mlt_properties_get_int( b_props, "meta.media.height" ); double b_ar = mlt_frame_get_aspect_ratio( b_frame ); double b_dar = b_ar * b_width / b_height; // Assign the current position mlt_position position = mlt_transition_get_position( transition, a_frame ); int mirror = mlt_properties_get_position( properties, "mirror" ); int length = mlt_transition_get_length( transition ); if ( mlt_properties_get_int( properties, "always_active" ) ) { mlt_properties props = mlt_properties_get_data( b_props, "_producer", NULL ); mlt_position in = mlt_properties_get_int( props, "in" ); mlt_position out = mlt_properties_get_int( props, "out" ); length = out - in + 1; } // Obtain the normalised width and height from the a_frame mlt_profile profile = mlt_service_profile( MLT_TRANSITION_SERVICE( transition ) ); int normalised_width = profile->width; int normalised_height = profile->height; double consumer_ar = mlt_profile_sar( profile ); if ( mirror && position > length / 2 ) position = abs( position - length ); // Fetch the a frame image *format = mlt_image_rgb24a; int error = mlt_frame_get_image( a_frame, image, format, width, height, 1 ); if (error || !image) return error; // Calculate the region now mlt_rect result = {0, 0, normalised_width, normalised_height, 1.0}; mlt_service_lock( MLT_TRANSITION_SERVICE( transition ) ); if (mlt_properties_get(properties, "geometry")) { // Structures for geometry struct mlt_geometry_item_s geometry; composite_calculate( transition, &geometry, normalised_width, normalised_height, ( double )position ); result.x = geometry.x; result.y = geometry.y; result.w = geometry.w; result.h = geometry.h; result.o = geometry.mix / 100.0f; } else if (mlt_properties_get(properties, "rect")) { // Determine length and obtain cycle double cycle = mlt_properties_get_double( properties, "cycle" ); // Allow a repeat cycle if ( cycle >= 1 ) length = cycle; else if ( cycle > 0 ) length *= cycle; mlt_position anim_pos = repeat_position(properties, "rect", position, length); result = mlt_properties_anim_get_rect(properties, "rect", anim_pos, length); if (mlt_properties_get(properties, "rect") && strchr(mlt_properties_get(properties, "rect"), '%')) { result.x *= normalised_width; result.y *= normalised_height; result.w *= normalised_width; result.h *= normalised_height; } result.o = (result.o == DBL_MIN)? 1.0 : MIN(result.o, 1.0); } mlt_service_unlock( MLT_TRANSITION_SERVICE( transition ) ); double geometry_w = result.w; double geometry_h = result.h; if ( !mlt_properties_get_int( properties, "fill" ) ) { double geometry_dar = result.w * consumer_ar / result.h; if ( b_dar > geometry_dar ) { result.w = MIN( result.w, b_width * b_ar / consumer_ar ); result.h = result.w * consumer_ar / b_dar; } else { result.h = MIN( result.h, b_height ); result.w = result.h * b_dar / consumer_ar; } } // Fetch the b frame image result.w = ( result.w * *width / normalised_width ); result.h = ( result.h * *height / normalised_height ); result.x = ( result.x * *width / normalised_width ); result.y = ( result.y * *height / normalised_height ); if (mlt_properties_get_int(properties, "b_scaled")) { // Request b frame image size just what is needed. b_width = result.w; b_height = result.h; // Set the rescale interpolation to match the frame mlt_properties_set( b_props, "rescale.interp", mlt_properties_get( a_props, "rescale.interp" ) ); } else { // Request full resolution of b frame image. mlt_properties_set_int( b_props, "rescale_width", b_width ); mlt_properties_set_int( b_props, "rescale_height", b_height ); // Suppress padding and aspect normalization. mlt_properties_set( b_props, "rescale.interp", "none" ); } // This is not a field-aware transform. mlt_properties_set_int( b_props, "consumer_deinterlace", 1 ); error = mlt_frame_get_image( b_frame, &b_image, &b_format, &b_width, &b_height, 0 ); if (error || !b_image) { // Remove potentially large image on the B frame. mlt_frame_set_image( b_frame, NULL, 0, NULL ); return error; } // Check that both images are of the correct format and process if ( *format == mlt_image_rgb24a && b_format == mlt_image_rgb24a ) { double sw, sh; // Get values from the transition double scale_x = mlt_properties_anim_get_double( properties, "scale_x", position, length ); double scale_y = mlt_properties_anim_get_double( properties, "scale_y", position, length ); int scale = mlt_properties_get_int( properties, "scale" ); double geom_scale_x = (double) b_width / result.w; double geom_scale_y = (double) b_height / result.h; struct sliced_desc desc = { .a_image = *image, .b_image = b_image, .interp = interpBL_b32, .a_width = *width, .a_height = *height, .b_width = b_width, .b_height = b_height, .lower_x = -(result.x + result.w / 2.0), // center .lower_y = -(result.y + result.h / 2.0), // middle .mix = result.o, .x_offset = (double) b_width / 2.0, .y_offset = (double) b_height / 2.0, .b_alpha = mlt_properties_get_int( properties, "b_alpha" ), // Affine boundaries .minima = 0, .xmax = b_width - 1, .ymax = b_height - 1 }; // Recalculate vars if alignment supplied. if ( mlt_properties_get( properties, "halign" ) || mlt_properties_get( properties, "valign" ) ) { double halign = alignment_parse( mlt_properties_get( properties, "halign" ) ); double valign = alignment_parse( mlt_properties_get( properties, "valign" ) ); desc.x_offset = halign * b_width / 2.0; desc.y_offset = valign * b_height / 2.0; desc.lower_x = -(result.x + geometry_w * halign / 2.0f); desc.lower_y = -(result.y + geometry_h * valign / 2.0f); } affine_init( desc.affine.matrix ); // Compute the affine transform get_affine( &desc.affine, transition, ( double )position, length ); desc.dz = MapZ( desc.affine.matrix, 0, 0 ); if ( (int) fabs( desc.dz * 1000 ) < 25 ) return 0; // Factor scaling into the transformation based on output resolution. if ( mlt_properties_get_int( properties, "distort" ) ) { scale_x = geom_scale_x * ( scale_x == 0 ? 1 : scale_x ); scale_y = geom_scale_y * ( scale_y == 0 ? 1 : scale_y ); } else { // Determine scale with respect to aspect ratio. double consumer_dar = consumer_ar * normalised_width / normalised_height; if ( b_dar > consumer_dar ) { scale_x = geom_scale_x * ( scale_x == 0 ? 1 : scale_x ); scale_y = geom_scale_x * ( scale_y == 0 ? 1 : scale_y ); scale_y *= b_ar / consumer_ar; } else { scale_x = geom_scale_y * ( scale_x == 0 ? 1 : scale_x ); scale_y = geom_scale_y * ( scale_y == 0 ? 1 : scale_y ); scale_x *= consumer_ar / b_ar; } } if ( scale ) { affine_max_output( desc.affine.matrix, &sw, &sh, desc.dz, *width, *height ); affine_scale( desc.affine.matrix, sw * MIN( geom_scale_x, geom_scale_y ), sh * MIN( geom_scale_x, geom_scale_y ) ); } else if ( scale_x != 0 && scale_y != 0 ) { affine_scale( desc.affine.matrix, scale_x, scale_y ); } char *interps = mlt_properties_get( a_props, "rescale.interp" ); // Copy in case string is changed. if ( interps ) interps = strdup( interps ); // Set the interpolation function if ( interps == NULL || strcmp( interps, "nearest" ) == 0 || strcmp( interps, "neighbor" ) == 0 || strcmp( interps, "tiles" ) == 0 || strcmp( interps, "fast_bilinear" ) == 0 ) { desc.interp = interpNN_b32; // uses lrintf. Values should be >= -0.5 and < max + 0.5 desc.minima -= 0.5; desc.xmax += 0.49; desc.ymax += 0.49; } else if ( strcmp( interps, "bilinear" ) == 0 ) { desc.interp = interpBL_b32; // uses floorf. } else if ( strcmp( interps, "bicubic" ) == 0 || strcmp( interps, "hyper" ) == 0 || strcmp( interps, "sinc" ) == 0 || strcmp( interps, "lanczos" ) == 0 || strcmp( interps, "spline" ) == 0 ) { // TODO: lanczos 8x8 // TODO: spline 4x4 or 6x6 desc.interp = interpBC_b32; // uses ceilf. Values should be > -1 and <= max. desc.minima -= 1; } free( interps ); // Do the transform with interpolation int threads = mlt_properties_get_int(properties, "threads"); threads = CLAMP(threads, 0, mlt_slices_count_normal()); if (threads == 1) sliced_proc(0, 0, 1, &desc); else mlt_slices_run_normal(threads, sliced_proc, &desc); // Remove potentially large image on the B frame. mlt_frame_set_image( b_frame, NULL, 0, NULL ); } return 0; }
static int get_frame( mlt_producer self, mlt_frame_ptr frame, int index ) { mlt_properties properties = MLT_PRODUCER_PROPERTIES(self); context cx = mlt_properties_get_data( properties, "context", NULL ); if ( !cx ) { // Allocate and initialize our context cx = mlt_pool_alloc( sizeof( struct context_s ) ); memset( cx, 0, sizeof( *cx ) ); mlt_properties_set_data( properties, "context", cx, 0, mlt_pool_release, NULL ); cx->self = self; char *profile_name = mlt_properties_get( properties, "profile" ); if ( !profile_name ) profile_name = mlt_properties_get( properties, "mlt_profile" ); mlt_profile profile = mlt_service_profile( MLT_PRODUCER_SERVICE( self ) ); if ( profile_name ) { cx->profile = mlt_profile_init( profile_name ); cx->profile->is_explicit = 1; } else { cx->profile = mlt_profile_clone( profile ); cx->profile->is_explicit = 0; } // Encapsulate a real producer for the resource cx->producer = mlt_factory_producer( cx->profile, NULL, mlt_properties_get( properties, "resource" ) ); if ( ( profile_name && !strcmp( profile_name, "auto" ) ) || mlt_properties_get_int( properties, "autoprofile" ) ) { mlt_profile_from_producer( cx->profile, cx->producer ); mlt_producer_close( cx->producer ); cx->producer = mlt_factory_producer( cx->profile, NULL, mlt_properties_get( properties, "resource" ) ); } // Since we control the seeking, prevent it from seeking on its own mlt_producer_set_speed( cx->producer, 0 ); cx->audio_position = -1; // We will encapsulate a consumer cx->consumer = mlt_consumer_new( cx->profile ); // Do not use _pass_list on real_time so that it defaults to 0 in the absence of // an explicit real_time property. mlt_properties_set_int( MLT_CONSUMER_PROPERTIES( cx->consumer ), "real_time", mlt_properties_get_int( properties, "real_time" ) ); mlt_properties_pass_list( MLT_CONSUMER_PROPERTIES( cx->consumer ), properties, "buffer, prefill, deinterlace_method, rescale" ); // Connect it all together mlt_consumer_connect( cx->consumer, MLT_PRODUCER_SERVICE( cx->producer ) ); mlt_consumer_start( cx->consumer ); } // Generate a frame *frame = mlt_frame_init( MLT_PRODUCER_SERVICE( self ) ); if ( *frame ) { // Seek the producer to the correct place // Calculate our positions double actual_position = (double) mlt_producer_frame( self ); if ( mlt_producer_get_speed( self ) != 0 ) actual_position *= mlt_producer_get_speed( self ); mlt_position need_first = floor( actual_position ); mlt_producer_seek( cx->producer, lrint( need_first * mlt_profile_fps( cx->profile ) / mlt_producer_get_fps( self ) ) ); // Get the nested frame mlt_frame nested_frame = mlt_consumer_rt_frame( cx->consumer ); // Stack the producer and our methods on the nested frame mlt_frame_push_service( *frame, nested_frame ); mlt_frame_push_service( *frame, cx ); mlt_frame_push_get_image( *frame, get_image ); mlt_frame_push_audio( *frame, nested_frame ); mlt_frame_push_audio( *frame, cx ); mlt_frame_push_audio( *frame, get_audio ); // Give the returned frame temporal identity mlt_frame_set_position( *frame, mlt_producer_position( self ) ); // Store the nested frame on the produced frame for destruction mlt_properties frame_props = MLT_FRAME_PROPERTIES( *frame ); mlt_properties_set_data( frame_props, "_producer_consumer.frame", nested_frame, 0, (mlt_destructor) mlt_frame_close, NULL ); // Inform the normalizers about our video properties mlt_properties_set_double( frame_props, "aspect_ratio", mlt_profile_sar( cx->profile ) ); mlt_properties_set_int( frame_props, "width", cx->profile->width ); mlt_properties_set_int( frame_props, "height", cx->profile->height ); mlt_properties_set_int( frame_props, "meta.media.width", cx->profile->width ); mlt_properties_set_int( frame_props, "meta.media.height", cx->profile->height ); mlt_properties_set_int( frame_props, "progressive", cx->profile->progressive ); } // Calculate the next timecode mlt_producer_prepare_next( self ); return 0; }