Example #1
0
static void check_for_reset( mlt_filter filter, int channels, int frequency )
{
	mlt_properties properties = MLT_FILTER_PROPERTIES( filter );
	private_data* pdata = (private_data*)filter->child;

	if( pdata->reset )
	{
		if( pdata->r128 )
		{
			ebur128_destroy( &pdata->r128 );
		}
		pdata->r128 = 0;
		pdata->target_gain = 0.0;
		pdata->start_gain = 0.0;
		pdata->end_gain = 0.0;
		pdata->reset = 0;
		pdata->time_elapsed_ms = 0;
		pdata->prev_o_pos = -1;
		mlt_properties_set_double( properties, "out_gain", 0.0 );
		mlt_properties_set_double( properties, "in_loudness", -100.0 );
		mlt_properties_set_int( properties, "reset_count", mlt_properties_get_int( properties, "reset_count") + 1 );
	}

	if( !pdata->r128 )
	{
		pdata->r128 = ebur128_init( channels, frequency, EBUR128_MODE_I );
		ebur128_set_max_window( pdata->r128, 400 );
		ebur128_set_max_history( pdata->r128, mlt_properties_get_int( properties, "window" ) * 1000.0 );
	}
}
Example #2
0
static void get_time_info( mlt_producer producer, mlt_frame frame, time_info* info )
{
	mlt_properties producer_properties = MLT_PRODUCER_PROPERTIES( producer );
	mlt_position position = mlt_frame_original_position( frame );

	info->fps = ceil( mlt_producer_get_fps( producer ) );

	char* direction = mlt_properties_get( producer_properties, "direction" );
	if( !strcmp( direction, "down" ) )
	{
		mlt_position length = mlt_properties_get_int( producer_properties, "length" );
		info->position = length - 1 - position;
	}
	else
	{
		info->position = position;
	}

	char* tc_str = NULL;
	if( mlt_properties_get_int( producer_properties, "drop" ) )
	{
		tc_str = mlt_properties_frames_to_time( producer_properties, info->position, mlt_time_smpte_df );
	}
	else
	{
		tc_str = mlt_properties_frames_to_time( producer_properties, info->position, mlt_time_smpte_ndf );
	}
	sscanf( tc_str, "%02d:%02d:%02d%c%d", &info->hours, &info->minutes, &info->seconds, &info->sep, &info->frames );
}
Example #3
0
static int jackrack_get_audio( mlt_frame frame, void **buffer, mlt_audio_format *format, int *frequency, int *channels, int *samples )
{
	// Get the filter service
	mlt_filter filter = mlt_frame_pop_audio( frame );

	// Get the filter properties
	mlt_properties filter_properties = MLT_FILTER_PROPERTIES( filter );

	int jack_frequency = mlt_properties_get_int( filter_properties, "_sample_rate" );

	// Get the producer's audio
	*format = mlt_audio_float;
	mlt_frame_get_audio( frame, buffer, format, &jack_frequency, channels, samples );
	
	// TODO: Deal with sample rate differences
	if ( *frequency != jack_frequency )
		mlt_log_error( MLT_FILTER_SERVICE( filter ), "mismatching frequencies JACK = %d actual = %d\n",
			jack_frequency, *frequency );
	*frequency = jack_frequency;

	// Initialise Jack ports and connections if needed
	if ( mlt_properties_get_int( filter_properties, "_samples" ) == 0 )
		mlt_properties_set_int( filter_properties, "_samples", *samples );
	
	// Get the filter-specific properties
	jack_ringbuffer_t **output_buffers = mlt_properties_get_data( filter_properties, "output_buffers", NULL );
	jack_ringbuffer_t **input_buffers = mlt_properties_get_data( filter_properties, "input_buffers", NULL );
//	pthread_mutex_t *output_lock = mlt_properties_get_data( filter_properties, "output_lock", NULL );
//	pthread_cond_t *output_ready = mlt_properties_get_data( filter_properties, "output_ready", NULL );
	
	// Process the audio
	float *q = (float*) *buffer;
	size_t size = *samples * sizeof(float);
	int j;
//	struct timespec tm = { 0, 0 };

	// Write into output ringbuffer
	for ( j = 0; j < *channels; j++ )
	{
		if ( jack_ringbuffer_write_space( output_buffers[j] ) >= size )
			jack_ringbuffer_write( output_buffers[j], (char*)( q + j * *samples ), size );
	}

	// Synchronization phase - wait for signal from Jack process
	while ( jack_ringbuffer_read_space( input_buffers[ *channels - 1 ] ) < size ) ;
		//pthread_cond_wait( output_ready, output_lock );
		
	// Read from input ringbuffer
	for ( j = 0; j < *channels; j++, q++ )
	{
		if ( jack_ringbuffer_read_space( input_buffers[j] ) >= size )
			jack_ringbuffer_read( input_buffers[j], (char*)( q + j * *samples ), size );
	}

	// help jack_sync() indicate when we are rolling
	mlt_position pos = mlt_frame_get_position( frame );
	mlt_properties_set_position( filter_properties, "_last_pos", pos );

	return 0;
}
Example #4
0
void mlt_service_apply_filters( mlt_service self, mlt_frame frame, int index )
{
	int i;
	mlt_properties frame_properties = MLT_FRAME_PROPERTIES( frame );
	mlt_properties service_properties = MLT_SERVICE_PROPERTIES( self );
	mlt_service_base *base = self->local;
	mlt_position position = mlt_frame_get_position( frame );
	mlt_position self_in = mlt_properties_get_position( service_properties, "in" );
	mlt_position self_out = mlt_properties_get_position( service_properties, "out" );

	if ( index == 0 || mlt_properties_get_int( service_properties, "_filter_private" ) == 0 )
	{
		// Process the frame with the attached filters
		for ( i = 0; i < base->filter_count; i ++ )
		{
			if ( base->filters[ i ] != NULL )
			{
				mlt_position in = mlt_filter_get_in( base->filters[ i ] );
				mlt_position out = mlt_filter_get_out( base->filters[ i ] );
				int disable = mlt_properties_get_int( MLT_FILTER_PROPERTIES( base->filters[ i ] ), "disable" );
				if ( !disable && ( ( in == 0 && out == 0 ) || ( position >= in && ( position <= out || out == 0 ) ) ) )
				{
					mlt_properties_set_position( frame_properties, "in", in == 0 ? self_in : in );
					mlt_properties_set_position( frame_properties, "out", out == 0 ? self_out : out );
					mlt_filter_process( base->filters[ i ], frame );
					mlt_service_apply_filters( MLT_FILTER_SERVICE( base->filters[ i ] ), frame, index + 1 );
				}
			}
		}
	}
}
Example #5
0
static int transition_get_image( mlt_frame a_frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable ){
	
	mlt_frame b_frame = mlt_frame_pop_frame( a_frame );
	mlt_transition transition = mlt_frame_pop_service( a_frame );
	mlt_properties properties = MLT_TRANSITION_PROPERTIES( transition );
	mlt_properties a_props = MLT_FRAME_PROPERTIES( a_frame );
	mlt_properties b_props = MLT_FRAME_PROPERTIES( b_frame );

	int invert = mlt_properties_get_int( properties, "invert" );

	uint8_t *images[]={NULL,NULL,NULL};

	*format = mlt_image_rgb24a;
	mlt_frame_get_image( a_frame, &images[0], format, width, height, 0 );
	mlt_frame_get_image( b_frame, &images[1], format, width, height, 0 );
	
	double position = mlt_transition_get_position( transition, a_frame );
	mlt_profile profile = mlt_service_profile( MLT_TRANSITION_SERVICE( transition ) );
	double time = position / mlt_profile_fps( profile );
	process_frei0r_item( MLT_TRANSITION_SERVICE(transition), position, time, properties, !invert ? a_frame : b_frame, images, width, height );
	
	*width = mlt_properties_get_int( !invert ? a_props : b_props, "width" );
        *height = mlt_properties_get_int( !invert ? a_props : b_props, "height" );
	*image = mlt_properties_get_data( !invert ? a_props : b_props , "image", NULL );
	return 0;
}
Example #6
0
static bool check_qimage( mlt_properties frame_properties )
{
	mlt_producer producer = static_cast<mlt_producer>( mlt_properties_get_data( frame_properties, "_producer_qtext", NULL ) );
	mlt_properties producer_properties = MLT_PRODUCER_PROPERTIES( producer );
	QImage* qImg = static_cast<QImage*>( mlt_properties_get_data( producer_properties, "_qimg", NULL ) );
	QSize target_size( mlt_properties_get_int( frame_properties, "rescale_width" ),
					   mlt_properties_get_int( frame_properties, "rescale_height" ) );
	QSize native_size( mlt_properties_get_int( frame_properties, "meta.media.width" ),
					   mlt_properties_get_int( frame_properties, "meta.media.height" ) );

	// Check if the last image signature is different from the path signature
	// for this frame.
	char* last_img_sig = mlt_properties_get( producer_properties, "_img_sig" );
	char* path_sig = mlt_properties_get( frame_properties, "_path_sig" );

	if( !last_img_sig || strcmp( path_sig, last_img_sig ) )
	{
		mlt_properties_set( producer_properties, "_img_sig", path_sig );
		return true;
	}

	// Check if the last image size matches the requested image size
	QSize output_size = target_size;
	if( output_size.isEmpty() )
	{
		output_size = native_size;
	}
	if( output_size != qImg->size() )
	{
		return true;
	}

	return false;
}
Example #7
0
static int start( mlt_consumer consumer )
{
	// Get the properties
	mlt_properties properties = MLT_CONSUMER_PROPERTIES( consumer );
	DeckLinkConsumer* decklink = (DeckLinkConsumer*) consumer->child;
	int result = decklink->start( mlt_properties_get_int( properties, "preroll" ) ) ? 0 : 1;

	// Check that we're not already running
	if ( !result && !mlt_properties_get_int( properties, "running" ) )
	{
		// Allocate a thread
		pthread_t *pthread = (pthread_t*) calloc( 1, sizeof( pthread_t ) );

		// Assign the thread to properties
		mlt_properties_set_data( properties, "pthread", pthread, sizeof( pthread_t ), free, NULL );

		// Set the running state
		mlt_properties_set_int( properties, "running", 1 );
		mlt_properties_set_int( properties, "joined", 0 );

		// Create the thread
		pthread_create( pthread, NULL, run, consumer->child );
	}
	return result;
}
Example #8
0
static uint8_t *filter_get_alpha_mask( mlt_frame frame )
{
	uint8_t *alpha = NULL;

	// Obtain properties of frame
	mlt_properties properties = MLT_FRAME_PROPERTIES( frame );

	// Get the shape frame
	mlt_frame shape_frame = mlt_properties_get_data( properties, "shape_frame", NULL );

	// Get the width and height of the image
	int region_width = mlt_properties_get_int( properties, "width" );
	int region_height = mlt_properties_get_int( properties, "height" );
	uint8_t *image = NULL;
	mlt_image_format format = mlt_image_yuv422;
					
	// Get the shape image to trigger alpha creation
	mlt_properties_set_int( MLT_FRAME_PROPERTIES( shape_frame ), "distort", 1 );
	mlt_frame_get_image( shape_frame, &image, &format, &region_width, &region_height, 0 );

	alpha = mlt_frame_get_alpha_mask( shape_frame );

	int size = region_width * region_height;
	uint8_t *alpha_duplicate = mlt_pool_alloc( size );

	int holewhite = mlt_properties_get_int(properties,"holecolor");

	if (holewhite) {
		alpha = alpha_duplicate;
		while (size--) {
			uint8_t test = ( int )( ( ( *image ++ - 16 ) * 299 ) / 255 );
			if (test <= 10) {
				*alpha = holewhite==1?0:255;
			}
			else if (test >= 250 ) {
				*alpha = holewhite==2?0:255;
			}
			alpha++;
			image++;
		}
	}
	// Generate from the Y component of the image if no alpha available
	else if ( alpha == NULL )
	{
		alpha = alpha_duplicate;
		while ( size -- )
		{
			*alpha ++ = ( int )( ( ( *image ++ - 16 ) * 299 ) / 255 );
			image ++;
		}
	}
	else
	{
		memcpy( alpha_duplicate, alpha, size );
	}
	mlt_frame_set_alpha( frame, alpha_duplicate, region_width * region_height, mlt_pool_release );

	return alpha_duplicate;
}
Example #9
0
static void draw_spectrum( mlt_filter filter, mlt_frame frame, QImage* qimg )
{
	mlt_properties filter_properties = MLT_FILTER_PROPERTIES( filter );
	mlt_position position = mlt_filter_get_position( filter, frame );
	mlt_position length = mlt_filter_get_length2( filter, frame );
	mlt_rect rect = mlt_properties_anim_get_rect( filter_properties, "rect", position, length );
	if ( strchr( mlt_properties_get( filter_properties, "rect" ), '%' ) ) {
		rect.x *= qimg->width();
		rect.w *= qimg->width();
		rect.y *= qimg->height();
		rect.h *= qimg->height();
	}
	char* graph_type = mlt_properties_get( filter_properties, "type" );
	int mirror = mlt_properties_get_int( filter_properties, "mirror" );
	int fill = mlt_properties_get_int( filter_properties, "fill" );
	double tension = mlt_properties_get_double( filter_properties, "tension" );

	QRectF r( rect.x, rect.y, rect.w, rect.h );
	QPainter p( qimg );

	if( mirror ) {
		// Draw two half rectangle instead of one full rectangle.
		r.setHeight( r.height() / 2.0 );
	}

	setup_graph_painter( p, r, filter_properties );
	setup_graph_pen( p, r, filter_properties );

	int bands = mlt_properties_get_int( filter_properties, "bands" );
	if ( bands == 0 ) {
		// "0" means match rectangle width
		bands = r.width();
	}
	float* spectrum = (float*)mlt_pool_alloc( bands * sizeof(float) );

	convert_fft_to_spectrum( filter, frame, bands, spectrum );

	if( graph_type && graph_type[0] == 'b' ) {
		paint_bar_graph( p, r, bands, spectrum );
	} else {
		paint_line_graph( p, r, bands, spectrum, tension, fill );
	}

	if( mirror ) {
		// Second rectangle is mirrored.
		p.translate( 0, r.y() * 2 + r.height() * 2 );
		p.scale( 1, -1 );

		if( graph_type && graph_type[0] == 'b' ) {
			paint_bar_graph( p, r, bands, spectrum );
		} else {
			paint_line_graph( p, r, bands, spectrum, tension, fill );
		}
	}

	mlt_pool_release( spectrum );

	p.end();
}
Example #10
0
static int mix_audio( mlt_frame frame, mlt_frame that, float weight_start, float weight_end, void **buffer, mlt_audio_format *format, int *frequency, int *channels, int *samples )
{
	int ret = 0;
	int16_t *src, *dest;
	int frequency_src = *frequency, frequency_dest = *frequency;
	int channels_src = *channels, channels_dest = *channels;
	int samples_src = *samples, samples_dest = *samples;
	int i, j;
	double d = 0, s = 0;

	mlt_frame_get_audio( that, (void**) &src, format, &frequency_src, &channels_src, &samples_src );
	mlt_frame_get_audio( frame, (void**) &dest, format, &frequency_dest, &channels_dest, &samples_dest );

	int silent = mlt_properties_get_int( MLT_FRAME_PROPERTIES( frame ), "silent_audio" );
	mlt_properties_set_int( MLT_FRAME_PROPERTIES( frame ), "silent_audio", 0 );
	if ( silent )
		memset( dest, 0, samples_dest * channels_dest * sizeof( int16_t ) );

	silent = mlt_properties_get_int( MLT_FRAME_PROPERTIES( that ), "silent_audio" );
	mlt_properties_set_int( MLT_FRAME_PROPERTIES( that ), "silent_audio", 0 );
	if ( silent )
		memset( src, 0, samples_src * channels_src * sizeof( int16_t ) );

	// determine number of samples to process
	*samples = samples_src < samples_dest ? samples_src : samples_dest;
	*channels = channels_src < channels_dest ? channels_src : channels_dest;
	*buffer = dest;
	*frequency = frequency_dest;

	// Compute a smooth ramp over start to end
	float weight = weight_start;
	float weight_step = ( weight_end - weight_start ) / *samples;

	if ( src == dest )
	{
		*samples = samples_src;
		*channels = channels_src;
		*buffer = src;
		*frequency = frequency_src;
		return ret;
	}

	// Mixdown
	for ( i = 0; i < *samples; i++ )
	{
		for ( j = 0; j < *channels; j++ )
		{
			if ( j < channels_dest )
				d = (double) dest[ i * channels_dest + j ];
			if ( j < channels_src )
				s = (double) src[ i * channels_src + j ];
			dest[ i * channels_dest + j ] = s * weight + d * ( 1.0 - weight );
		}
		weight += weight_step;
	}

	return ret;
}
Example #11
0
static void *consumer_thread( void *arg )
{
	// Map the argument to the object
	mlt_consumer consumer = arg;

	// Get the properties
	mlt_properties properties = MLT_CONSUMER_PROPERTIES( consumer );

	// Convenience functionality
	int terminate_on_pause = mlt_properties_get_int( properties, "terminate_on_pause" );
	int terminated = 0;

	// Frame and size
	mlt_frame frame = NULL;

	// Loop while running
	while( !terminated && mlt_properties_get_int( properties, "_running" ) )
	{
		// Get the frame
		frame = mlt_consumer_rt_frame( consumer );

		// Check for termination
		if ( terminate_on_pause && frame != NULL )
			terminated = mlt_properties_get_double( MLT_FRAME_PROPERTIES( frame ), "_speed" ) == 0.0;

		// Check that we have a frame to work with
		if ( frame )
		{
			avsync_stats* stats = mlt_properties_get_data( properties, "_stats", NULL );
			double fps = mlt_properties_get_double( properties, "fps" );
			mlt_position pos = mlt_frame_get_position( frame );

			 if( !strcmp( mlt_properties_get( properties, "report" ), "frame" ) )
			 {
				 stats->report_frames = 1;
			 }
			 else
			 {
				 stats->report_frames = 0;
			 }

			detect_flash( frame, pos, fps, stats );
			detect_blip( frame, pos, fps, stats );
			calculate_sync( stats );
			report_results( stats, pos );

			// Close the frame
			mlt_events_fire( properties, "consumer-frame-show", frame, NULL );
			mlt_frame_close( frame );
		}
	}

	// Indicate that the consumer is stopped
	mlt_properties_set_int( properties, "_running", 0 );
	mlt_consumer_stopped( consumer );

	return NULL;
}
Example #12
0
static int dummy_get_image(mlt_frame frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable)
{
	mlt_properties properties = MLT_FRAME_PROPERTIES(frame);
	*image = mlt_properties_get_data(properties, "image", NULL);
	*format = mlt_properties_get_int(properties, "format");
	*width = mlt_properties_get_int(properties, "width");
	*height = mlt_properties_get_int(properties, "height");
	return 0;
}
Example #13
0
static int producer_get_image( mlt_frame self, uint8_t **buffer, mlt_image_format *format, int *width, int *height, int writable )
{
	uint8_t *data = NULL;
	int size = 0;
	mlt_properties properties = MLT_FRAME_PROPERTIES( self );
	mlt_frame frame = mlt_frame_pop_service( self );
	mlt_properties frame_properties = MLT_FRAME_PROPERTIES( frame );
	mlt_properties_set( frame_properties, "rescale.interp", mlt_properties_get( properties, "rescale.interp" ) );
	mlt_properties_set_int( frame_properties, "resize_alpha", mlt_properties_get_int( properties, "resize_alpha" ) );
	mlt_properties_set_int( frame_properties, "distort", mlt_properties_get_int( properties, "distort" ) );
	mlt_properties_set_int( frame_properties, "consumer_deinterlace", mlt_properties_get_int( properties, "consumer_deinterlace" ) );
	mlt_properties_set( frame_properties, "deinterlace_method", mlt_properties_get( properties, "deinterlace_method" ) );
	mlt_properties_set_int( frame_properties, "consumer_tff", mlt_properties_get_int( properties, "consumer_tff" ) );
	mlt_frame_get_image( frame, buffer, format, width, height, writable );
	mlt_frame_set_image( self, *buffer, 0, NULL );
	mlt_properties_set_int( properties, "width", *width );
	mlt_properties_set_int( properties, "height", *height );
	mlt_properties_set_int( properties, "format", *format );
	mlt_properties_set_double( properties, "aspect_ratio", mlt_frame_get_aspect_ratio( frame ) );
	mlt_properties_set_int( properties, "progressive", mlt_properties_get_int( frame_properties, "progressive" ) );
	mlt_properties_set_int( properties, "distort", mlt_properties_get_int( frame_properties, "distort" ) );
	mlt_properties_set_int( properties, "colorspace", mlt_properties_get_int( frame_properties, "colorspace" ) );
	mlt_properties_set_int( properties, "force_full_luma", mlt_properties_get_int( frame_properties, "force_full_luma" ) );
	mlt_properties_set_int( properties, "top_field_first", mlt_properties_get_int( frame_properties, "top_field_first" ) );
	mlt_properties_set_data( properties, "movit.convert.fence",
		mlt_properties_get_data( frame_properties, "movit.convert.fence", NULL ),
		0, NULL, NULL );
	data = mlt_frame_get_alpha_mask( frame );
	mlt_properties_get_data( frame_properties, "alpha", &size );
	mlt_frame_set_alpha( self, data, size, NULL );
	self->convert_image = frame->convert_image;
	self->convert_audio = frame->convert_audio;
	return 0;
}
Example #14
0
static void *consumer_thread( void *arg )
{
    mlt_consumer consumer = arg;
    mlt_properties properties = MLT_CONSUMER_PROPERTIES( consumer );
    mlt_frame frame = NULL;

    // Determine whether to stop at end-of-media
    int terminate_on_pause = mlt_properties_get_int( properties, "terminate_on_pause" );
    int terminated = 0;

    // Loop while running
    while ( !terminated && !is_stopped( consumer ) )
    {
        // Get the next frame
        frame = mlt_consumer_rt_frame( consumer );

        // Check for termination
        if ( terminate_on_pause && frame )
            terminated = mlt_properties_get_double( MLT_FRAME_PROPERTIES( frame ), "_speed" ) == 0.0;

        // Check that we have a frame to work with
        if ( frame && !terminated && !is_stopped( consumer ) )
        {
            if ( mlt_properties_get_int( MLT_FRAME_PROPERTIES(frame), "rendered" ) )
            {
                if ( mlt_properties_get_int( MLT_FRAME_PROPERTIES(frame), "_speed" ) == 0 )
                    foreach_consumer_refresh( consumer );
                foreach_consumer_put( consumer, frame );
            }
            else
            {
                int dropped = mlt_properties_get_int( properties, "_dropped" );
                mlt_log_info( MLT_CONSUMER_SERVICE(consumer), "dropped frame %d\n", ++dropped );
                mlt_properties_set_int( properties, "_dropped", dropped );
            }
            mlt_frame_close( frame );
        }
        else
        {
            if ( frame && terminated )
            {
                // Send this termination frame to nested consumers for their cancellation
                foreach_consumer_put( consumer, frame );
            }
            if ( frame )
                mlt_frame_close( frame );
            terminated = 1;
        }
    }

    // Indicate that the consumer is stopped
    mlt_consumer_stopped( consumer );

    return NULL;
}
Example #15
0
static int filter_get_image( mlt_frame frame, uint8_t **image, mlt_image_format *image_format, int *width, int *height, int writable )
{
	int error = 0;
	mlt_properties frame_properties = MLT_FRAME_PROPERTIES( frame );
	mlt_filter filter = (mlt_filter)mlt_frame_pop_service( frame );
	int samples = 0;
	int channels = 0;
	int frequency = 0;
	mlt_audio_format audio_format = mlt_audio_s16;
	int16_t* audio = (int16_t*)mlt_properties_get_data( frame_properties, "audio", NULL );

	if ( !audio && !preprocess_warned ) {
		// This filter depends on the consumer processing the audio before the
		// video. If the audio is not preprocessed, this filter will process it.
		// If this filter processes the audio, it could cause confusion for the
		// consumer if it needs different audio properties.
		mlt_log_warning( MLT_FILTER_SERVICE(filter), "Audio not preprocessed. Potential audio distortion.\n" );
		preprocess_warned = true;
	}

	*image_format = mlt_image_rgb24a;

	// Get the current image
	error = mlt_frame_get_image( frame, image, image_format, width, height, writable );

	// Get the audio
	if( !error ) {
		frequency = mlt_properties_get_int( frame_properties, "audio_frequency" );
		if (!frequency) {
			frequency = 48000;
		}
		channels = mlt_properties_get_int( frame_properties, "audio_channels" );
		if (!channels) {
			channels = 2;
		}
		samples = mlt_properties_get_int( frame_properties, "audio_samples" );
		if (!samples) {
			mlt_producer producer = mlt_frame_get_original_producer( frame );
			double fps = mlt_producer_get_fps( mlt_producer_cut_parent( producer ) );
			samples = mlt_sample_calculator( fps, frequency, mlt_frame_get_position( frame ) );
		}

		error = mlt_frame_get_audio( frame, (void**)&audio, &audio_format, &frequency, &channels, &samples );
	}

	// Draw the waveforms
	if( !error ) {
		QImage qimg( *width, *height, QImage::Format_ARGB32 );
		convert_mlt_to_qimage_rgba( *image, &qimg, *width, *height );
		draw_waveforms( filter, frame, &qimg, audio, channels, samples );
		convert_qimage_to_mlt_rgba( &qimg, *image, *width, *height );
	}

	return error;
}
Example #16
0
static int filter_get_audio( mlt_frame frame, void** buffer, mlt_audio_format* format, int* frequency, int* channels, int* samples )
{
	mlt_filter filter = (mlt_filter)mlt_frame_pop_audio( frame );
	mlt_properties filter_properties = MLT_FILTER_PROPERTIES( filter );
	private_data* pdata = (private_data*)filter->child;

	// Create the FFT filter the first time.
	if( !pdata->fft )
	{
		mlt_profile profile = mlt_service_profile( MLT_FILTER_SERVICE(filter) );
		pdata->fft = mlt_factory_filter( profile, "fft", NULL );
		mlt_properties_set_int( MLT_FILTER_PROPERTIES( pdata->fft ), "window_size",
				mlt_properties_get_int( filter_properties, "window_size" ) );
		if( !pdata->fft )
		{
			mlt_log_warning( MLT_FILTER_SERVICE(filter), "Unable to create FFT.\n" );
			return 1;
		}
	}

	mlt_properties fft_properties = MLT_FILTER_PROPERTIES( pdata->fft );

	// The service must stay locked while using the private data
	mlt_service_lock( MLT_FILTER_SERVICE( filter ) );

	// Perform FFT processing on the frame
	mlt_filter_process( pdata->fft, frame );
	mlt_frame_get_audio( frame, buffer, format, frequency, channels, samples );

	float* bins = (float*)mlt_properties_get_data( fft_properties, "bins", NULL );

	if( bins )
	{
		double window_level = mlt_properties_get_double( fft_properties, "window_level" );
		int bin_count = mlt_properties_get_int( fft_properties, "bin_count" );
		size_t bins_size = bin_count * sizeof(float);
		float* save_bins = (float*)mlt_pool_alloc( bins_size );

		if( window_level == 1.0 )
		{
			memcpy( save_bins, bins, bins_size );
		} else {
			memset( save_bins, 0, bins_size );
		}

		// Save the bin data as a property on the frame to be used in get_image()
		mlt_properties_set_data( MLT_FRAME_PROPERTIES(frame), pdata->fft_prop_name, save_bins, bins_size, mlt_pool_release, NULL );
	}

	mlt_service_unlock( MLT_FILTER_SERVICE( filter ) );

	return 0;
}
Example #17
0
void mlt_profile_from_producer( mlt_profile profile, mlt_producer producer )
{
	mlt_frame fr = NULL;
	uint8_t *buffer = NULL;
	mlt_image_format fmt = mlt_image_none;
	mlt_properties p;
	int w = profile->width;
	int h = profile->height;

	if ( ! mlt_service_get_frame( MLT_PRODUCER_SERVICE(producer), &fr, 0 ) && fr )
	{
		if ( ! mlt_frame_get_image( fr, &buffer, &fmt, &w, &h, 0 ) )
		{
			// Some source properties are not exposed until after the first get_image call.
			mlt_frame_close( fr );
			mlt_service_get_frame( MLT_PRODUCER_SERVICE(producer), &fr, 0 );
			p = MLT_FRAME_PROPERTIES( fr );
//			mlt_properties_dump(p, stderr);
			if ( mlt_properties_get_int( p, "meta.media.frame_rate_den" ) &&
				 mlt_properties_get_int( p, "meta.media.sample_aspect_den" ) )
			{
				profile->width = mlt_properties_get_int( p, "meta.media.width" );
				profile->height = mlt_properties_get_int( p, "meta.media.height" );
				profile->progressive = mlt_properties_get_int( p, "meta.media.progressive" );
				if ( 1000 > mlt_properties_get_double( p, "meta.media.frame_rate_num" )
				          / mlt_properties_get_double( p, "meta.media.frame_rate_den" ) )
				{
					profile->frame_rate_num = mlt_properties_get_int( p, "meta.media.frame_rate_num" );
					profile->frame_rate_den = mlt_properties_get_int( p, "meta.media.frame_rate_den" );
				} else {
					profile->frame_rate_num = 60;
					profile->frame_rate_den = 1;
				}
				// AVCHD is mis-reported as double frame rate.
				if ( profile->progressive == 0 && (
				     profile->frame_rate_num / profile->frame_rate_den == 50 ||
				     profile->frame_rate_num / profile->frame_rate_den == 59 ) )
					profile->frame_rate_num /= 2;
				profile->sample_aspect_num = mlt_properties_get_int( p, "meta.media.sample_aspect_num" );
				profile->sample_aspect_den = mlt_properties_get_int( p, "meta.media.sample_aspect_den" );
				profile->colorspace = mlt_properties_get_int( p, "meta.media.colorspace" );
				profile->display_aspect_num = lrint( (double) profile->sample_aspect_num * profile->width
					/ profile->sample_aspect_den );
				profile->display_aspect_den = profile->height;
				free( profile->description );
				profile->description = strdup( "automatic" );
				profile->is_explicit = 0;
			}
		}
	}
	mlt_frame_close( fr );
	mlt_producer_seek( producer, 0 );
}
Example #18
0
static void generate_qimage( mlt_properties frame_properties )
{
	mlt_producer producer = static_cast<mlt_producer>( mlt_properties_get_data( frame_properties, "_producer_qtext", NULL ) );
	mlt_properties producer_properties = MLT_PRODUCER_PROPERTIES( producer );
	QImage* qImg = static_cast<QImage*>( mlt_properties_get_data( producer_properties, "_qimg", NULL ) );
	QSize target_size( mlt_properties_get_int( frame_properties, "rescale_width" ),
					   mlt_properties_get_int( frame_properties, "rescale_height" ) );
	QSize native_size( mlt_properties_get_int( frame_properties, "meta.media.width" ),
					   mlt_properties_get_int( frame_properties, "meta.media.height" ) );
	QPainterPath* qPath = static_cast<QPainterPath*>( mlt_properties_get_data( frame_properties, "_qpath", NULL ) );
	mlt_color bg_color = mlt_properties_get_color( frame_properties, "_bgcolour" );
	mlt_color fg_color = mlt_properties_get_color( frame_properties, "_fgcolour" );
	mlt_color ol_color = mlt_properties_get_color( frame_properties, "_olcolour" );
	int outline = mlt_properties_get_int( frame_properties, "_outline" );
	qreal sx = 1.0;
	qreal sy = 1.0;

	// Create a new image and set up scaling
	if( !target_size.isEmpty() && target_size != native_size )
	{
		*qImg = QImage( target_size, QImage::Format_ARGB32 );
		sx = (qreal)target_size.width() / (qreal)native_size.width();
		sy = (qreal)target_size.height() / (qreal)native_size.height();
	}
	else
	{
		*qImg = QImage( native_size, QImage::Format_ARGB32 );
	}
	qImg->fill( QColor( bg_color.r, bg_color.g, bg_color.b, bg_color.a ).rgba() );

	// Draw the text
	QPainter painter( qImg );
	// Scale the painter rather than the image for better looking results.
	painter.scale( sx, sy );
	painter.setRenderHints( QPainter::Antialiasing | QPainter::TextAntialiasing | QPainter::HighQualityAntialiasing );

	QPen pen;
	pen.setWidth( outline );
	if( outline )
	{
		pen.setColor( QColor( ol_color.r, ol_color.g, ol_color.b, ol_color.a ) );
	}
	else
	{
		pen.setColor( QColor( bg_color.r, bg_color.g, bg_color.b, bg_color.a ) );
	}
	painter.setPen( pen );
	QBrush brush( QColor( fg_color.r, fg_color.g, fg_color.b, fg_color.a ) );
	painter.setBrush( brush );
	painter.drawPath( *qPath );
}
Example #19
0
static void get_transform_config( VSTransformConfig* conf, mlt_filter filter, mlt_frame frame )
{
	mlt_properties properties = MLT_FILTER_PROPERTIES( filter );
	const char* filterName = mlt_properties_get( properties, "mlt_service" );

	*conf = vsTransformGetDefaultConfig( filterName );
	conf->smoothing = mlt_properties_get_int( properties, "smoothing" );
	conf->maxShift = mlt_properties_get_int( properties, "maxshift" );
	conf->maxAngle = mlt_properties_get_double( properties, "maxangle" );
	conf->crop = (VSBorderType)mlt_properties_get_int( properties, "crop" );
	conf->zoom = mlt_properties_get_int( properties, "zoom" );
	conf->optZoom = mlt_properties_get_int( properties, "optzoom" );
	conf->zoomSpeed = mlt_properties_get_double( properties, "zoomspeed" );
	conf->relative = mlt_properties_get_int( properties, "relative" );
	conf->invert = mlt_properties_get_int( properties, "invert" );
	if ( mlt_properties_get_int( properties, "tripod" ) != 0 )
	{
		// Virtual tripod mode: relative=False, smoothing=0
		conf->relative = 0;
		conf->smoothing = 0;
	}

	// by default a bicubic interpolation is selected
	const char *interps = mlt_properties_get( MLT_FRAME_PROPERTIES( frame ), "rescale.interp" );
	conf->interpolType = VS_BiCubic;
	if ( strcmp( interps, "nearest" ) == 0 || strcmp( interps, "neighbor" ) == 0 )
		conf->interpolType = VS_Zero;
	else if ( strcmp( interps, "tiles" ) == 0 || strcmp( interps, "fast_bilinear" ) == 0 )
		conf->interpolType = VS_Linear;
	else if ( strcmp( interps, "bilinear" ) == 0 )
		conf->interpolType = VS_BiLinear;
}
Example #20
0
mlt_consumer consumer_sdl_preview_init( mlt_profile profile, mlt_service_type type, const char *id, char *arg )
{
	consumer_sdl self = calloc( 1, sizeof( struct consumer_sdl_s ) );
	if ( self != NULL && mlt_consumer_init( &self->parent, self, profile ) == 0 )
	{
		// Get the parent consumer object
		mlt_consumer parent = &self->parent;

		// Get the properties
		mlt_properties properties = MLT_CONSUMER_PROPERTIES( parent );

		// Get the width and height
		int width = mlt_properties_get_int( properties, "width" );
		int height = mlt_properties_get_int( properties, "height" );

		// Process actual param
		if ( arg == NULL || sscanf( arg, "%dx%d", &width, &height ) == 2 )
		{
			mlt_properties_set_int( properties, "width", width );
			mlt_properties_set_int( properties, "height", height );
		}

		// Create child consumers
		self->play = mlt_factory_consumer( profile, "sdl", arg );
		self->still = mlt_factory_consumer( profile, "sdl_still", arg );
		mlt_properties_set( properties, "rescale", "nearest" );
		mlt_properties_set( properties, "deinterlace_method", "onefield" );
		mlt_properties_set_int( properties, "prefill", 1 );
		mlt_properties_set_int( properties, "top_field_first", -1 );

		parent->close = consumer_close;
		parent->start = consumer_start;
		parent->stop = consumer_stop;
		parent->is_stopped = consumer_is_stopped;
		parent->purge = consumer_purge;
		self->joined = 1;
		mlt_events_listen( MLT_CONSUMER_PROPERTIES( self->play ), self, "consumer-frame-show", ( mlt_listener )consumer_frame_show_cb );
		mlt_events_listen( MLT_CONSUMER_PROPERTIES( self->still ), self, "consumer-frame-show", ( mlt_listener )consumer_frame_show_cb );
		mlt_events_listen( MLT_CONSUMER_PROPERTIES( self->play ), self, "consumer-sdl-event", ( mlt_listener )consumer_sdl_event_cb );
		mlt_events_listen( MLT_CONSUMER_PROPERTIES( self->still ), self, "consumer-sdl-event", ( mlt_listener )consumer_sdl_event_cb );
		pthread_cond_init( &self->refresh_cond, NULL );
		pthread_mutex_init( &self->refresh_mutex, NULL );
		mlt_events_listen( MLT_CONSUMER_PROPERTIES( parent ), self, "property-changed", ( mlt_listener )consumer_refresh_cb );
		mlt_events_register( properties, "consumer-sdl-paused", NULL );
		return parent;
	}
	free( self );
	return NULL;
}
Example #21
0
static int producer_get_frame( mlt_producer producer, mlt_frame_ptr frame, int index )
{
	if ( frame )
	{
		// Construct a new frame
		*frame = mlt_frame_init( MLT_PRODUCER_SERVICE( producer ) );

		// Stack the producer and producer's get image
		mlt_frame_push_service_int( *frame, index );
		mlt_frame_push_service( *frame, producer );
		mlt_frame_push_service( *frame, framebuffer_get_image );

		mlt_properties properties = MLT_PRODUCER_PROPERTIES( producer );
		mlt_properties frame_properties = MLT_FRAME_PROPERTIES(*frame);

		// Get frame from the real producer
		mlt_frame first_frame = mlt_properties_get_data( properties, "first_frame", NULL );

		if ( first_frame == NULL )
		{
		    // Get the frame to cache from the real producer
		    mlt_producer real_producer = mlt_properties_get_data( properties, "producer", NULL );

		    // Seek the producer to the correct place
		    mlt_producer_seek( real_producer, mlt_producer_position( producer ) );

		    // Get the frame
		    mlt_service_get_frame( MLT_PRODUCER_SERVICE( real_producer ), &first_frame, index );
		    // Cache the frame
		    mlt_properties_set_data( properties, "first_frame", first_frame, 0, ( mlt_destructor )mlt_frame_close, NULL );
		}

		mlt_properties_inherit( frame_properties, MLT_FRAME_PROPERTIES(first_frame) );
		
		double force_aspect_ratio = mlt_properties_get_double( properties, "force_aspect_ratio" );
		if ( force_aspect_ratio <= 0.0 ) force_aspect_ratio = mlt_properties_get_double( properties, "aspect_ratio" );
		mlt_properties_set_double( frame_properties, "aspect_ratio", force_aspect_ratio );
                
		// Give the returned frame temporal identity
		mlt_frame_set_position( *frame, mlt_producer_position( producer ) );

		mlt_properties_set_int( frame_properties, "meta.media.width", mlt_properties_get_int( properties, "width" ) );
		mlt_properties_set_int( frame_properties, "meta.media.height", mlt_properties_get_int( properties, "height" ) );
		mlt_properties_pass_list( frame_properties, properties, "width, height" );
	}

	return 0;
}
Example #22
0
static int transition_get_image( mlt_frame a_frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable ){
	
	if (*format!=mlt_image_yuv422 ){
		return -1;
	}
	
	mlt_frame b_frame = mlt_frame_pop_frame( a_frame );
	mlt_transition transition = mlt_frame_pop_service( a_frame );
	mlt_properties properties = MLT_TRANSITION_PROPERTIES( transition );
	mlt_properties a_props = MLT_FRAME_PROPERTIES( a_frame );
	mlt_properties b_props = MLT_FRAME_PROPERTIES( b_frame );

	int invert = mlt_properties_get_int( properties, "invert" );

	if ( mlt_properties_get( a_props, "rescale.interp" ) == NULL || !strcmp( mlt_properties_get( a_props, "rescale.interp" ), "none" ) )
		mlt_properties_set( a_props, "rescale.interp", "nearest" );

	// set consumer_aspect_ratio for a and b frame
	if ( mlt_properties_get_double( a_props, "aspect_ratio" ) == 0.0 )
		mlt_properties_set_double( a_props, "aspect_ratio", mlt_properties_get_double( a_props, "consumer_aspect_ratio" ) );
	if ( mlt_properties_get_double( b_props, "aspect_ratio" ) == 0.0 )
		mlt_properties_set_double( b_props, "aspect_ratio", mlt_properties_get_double( a_props, "consumer_aspect_ratio" ) );
	mlt_properties_set_double( b_props, "consumer_aspect_ratio", mlt_properties_get_double( a_props, "consumer_aspect_ratio" ) );

	if ( mlt_properties_get( b_props, "rescale.interp" ) == NULL || !strcmp( mlt_properties_get( b_props, "rescale.interp" ), "none" ) )
                mlt_properties_set( b_props, "rescale.interp", "nearest" );
	
	uint8_t *images[]={NULL,NULL,NULL};

	mlt_frame_get_image( a_frame, &images[0], format, width, height, 1 );
	mlt_frame_get_image( b_frame, &images[1], format, width, height, 1 );
	
	mlt_position in = mlt_transition_get_in( transition );
	mlt_position out = mlt_transition_get_out( transition );

	// Get the position of the frame
	char *name = mlt_properties_get( MLT_TRANSITION_PROPERTIES( transition ), "_unique_id" );
	mlt_position position = mlt_properties_get_position( MLT_FRAME_PROPERTIES( a_frame ), name );

	float pos=( float )( position - in ) / ( float )( out - in + 1 );
	
	process_frei0r_item( transition_type , pos , properties, !invert ? a_frame : b_frame , images , format, width,height, writable );
	
	*width = mlt_properties_get_int( !invert ? a_props : b_props, "width" );
        *height = mlt_properties_get_int( !invert ? a_props : b_props, "height" );
	*image = mlt_properties_get_data( !invert ? a_props : b_props , "image", NULL );
	return 0;
}
Example #23
0
mlt_frame mlt_consumer_rt_frame( mlt_consumer self )
{
	// Frame to return
	mlt_frame frame = NULL;

	// Get the properties
	mlt_properties properties = MLT_CONSUMER_PROPERTIES( self );

	// Check if the user has requested real time or not
	if ( self->real_time > 1 || self->real_time < -1 )
	{
		// see above
		return worker_get_frame( self, properties );
	}
	else if ( self->real_time == 1 || self->real_time == -1 )
	{
		int size = 1;

		// Is the read ahead running?
		if ( self->ahead == 0 )
		{
			int buffer = mlt_properties_get_int( properties, "buffer" );
			int prefill = mlt_properties_get_int( properties, "prefill" );
			consumer_read_ahead_start( self );
			if ( buffer > 1 )
				size = prefill > 0 && prefill < buffer ? prefill : buffer;
		}

		// Get frame from queue
		pthread_mutex_lock( &self->queue_mutex );
		while( self->ahead && mlt_deque_count( self->queue ) < size )
			pthread_cond_wait( &self->queue_cond, &self->queue_mutex );
		frame = mlt_deque_pop_front( self->queue );
		pthread_cond_broadcast( &self->queue_cond );
		pthread_mutex_unlock( &self->queue_mutex );
	}
	else // real_time == 0
	{
		// Get the frame in non real time
		frame = mlt_consumer_get_frame( self );

		// This isn't true, but from the consumers perspective it is
		if ( frame != NULL )
			mlt_properties_set_int( MLT_FRAME_PROPERTIES( frame ), "rendered", 1 );
	}

	return frame;
}
Example #24
0
static void foreach_consumer_stop( mlt_consumer consumer )
{
    mlt_properties properties = MLT_CONSUMER_PROPERTIES( consumer );
    mlt_consumer nested = NULL;
    char key[30];
    int index = 0;
    struct timespec tm = { 0, 1000 * 1000 };

    do {
        snprintf( key, sizeof(key), "%d.consumer", index++ );
        nested = mlt_properties_get_data( properties, key, NULL );
        if ( nested )
        {
            // Let consumer with terminate_on_pause stop on their own
            if ( mlt_properties_get_int( MLT_CONSUMER_PROPERTIES(nested), "terminate_on_pause" ) )
            {
                // Send additional dummy frame to unlatch nested consumer's threads
                mlt_consumer_put_frame( nested, mlt_frame_init( MLT_CONSUMER_SERVICE(consumer) ) );
                // wait for stop
                while ( !mlt_consumer_is_stopped( nested ) )
                    nanosleep( &tm, NULL );
            }
            else
            {
                mlt_consumer_stop( nested );
            }
        }
    } while ( nested );
}
Example #25
0
static int consumer_stop( mlt_consumer parent )
{
	// Get the actual object
	consumer_sdl self = parent->child;

	if ( self->joined == 0 )
	{
		mlt_properties properties = MLT_CONSUMER_PROPERTIES( parent );
		int app_locked = mlt_properties_get_int( properties, "app_locked" );
		void ( *lock )( void ) = mlt_properties_get_data( properties, "app_lock", NULL );
		void ( *unlock )( void ) = mlt_properties_get_data( properties, "app_unlock", NULL );

		if ( app_locked && unlock ) unlock( );

		// Kill the thread and clean up
		self->running = 0;

		pthread_mutex_lock( &self->refresh_mutex );
		pthread_cond_broadcast( &self->refresh_cond );
		pthread_mutex_unlock( &self->refresh_mutex );
#ifndef WIN32
		if ( self->thread )
#endif
			pthread_join( self->thread, NULL );
		self->joined = 1;

		if ( app_locked && lock ) lock( );
		
		pthread_mutex_lock( &mlt_sdl_mutex );
		SDL_Quit( );
		pthread_mutex_unlock( &mlt_sdl_mutex );
	}

	return 0;
}
Example #26
0
int mlt_producer_seek( mlt_producer self, mlt_position position )
{
	// Determine eof handling
	mlt_properties properties = MLT_PRODUCER_PROPERTIES( self );
	char *eof = mlt_properties_get( properties, "eof" );
	int use_points = 1 - mlt_properties_get_int( properties, "ignore_points" );

	// Recursive behaviour for cuts - repositions parent and then repositions cut
	// hence no return on this condition
	if ( mlt_producer_is_cut( self ) )
		mlt_producer_seek( mlt_producer_cut_parent( self ), position + mlt_producer_get_in( self ) );

	// Check bounds
	if ( position < 0 || mlt_producer_get_playtime( self ) == 0 )
	{
		position = 0;
	}
	else if ( use_points && ( eof == NULL || !strcmp( eof, "pause" ) ) && position >= mlt_producer_get_playtime( self ) )
	{
		mlt_producer_set_speed( self, 0 );
		position = mlt_producer_get_playtime( self ) - 1;
	}
	else if ( use_points && eof && !strcmp( eof, "loop" ) && position >= mlt_producer_get_playtime( self ) )
	{
		position = (int)position % (int)mlt_producer_get_playtime( self );
	}

	// Set the position
	mlt_properties_set_position( MLT_PRODUCER_PROPERTIES( self ), "_position", position );

	// Calculate the absolute frame
	mlt_properties_set_position( MLT_PRODUCER_PROPERTIES( self ), "_frame", use_points * mlt_producer_get_in( self ) + position );

	return 0;
}
Example #27
0
static int stop( mlt_consumer consumer )
{
    // Check that we're running
    if ( !mlt_properties_get_int( MLT_CONSUMER_PROPERTIES(consumer), "joined" ) )
    {
        mlt_properties properties = MLT_CONSUMER_PROPERTIES( consumer );
        pthread_t *thread = mlt_properties_get_data( properties, "thread", NULL );

        // Stop the thread
        mlt_properties_set_int( properties, "running", 0 );

        // Wait for termination
        if ( thread )
        {
            foreach_consumer_refresh( consumer );
            pthread_join( *thread, NULL );
        }
        mlt_properties_set_int( properties, "joined", 1 );

        // Stop nested consumers
        foreach_consumer_stop( consumer );
    }

    return 0;
}
Example #28
0
mlt_profile mlt_profile_load_file( const char *file )
{
	mlt_profile profile = NULL;

	// Load the profile as properties
	mlt_properties properties = mlt_properties_load( file );
	if ( properties )
	{
		// Simple check if the profile is valid
		if ( mlt_properties_get_int( properties, "width" ) )
		{
			profile = mlt_profile_load_properties( properties );

			// Set MLT_PROFILE to basename
			char *filename = strdup( file );
			mlt_environment_set( "MLT_PROFILE", basename( filename ) );
			set_mlt_normalisation( basename( filename ) );
			free( filename );
		}
		mlt_properties_close( properties );
	}

	// Set MLT_NORMALISATION to appease legacy modules
	char *profile_name = mlt_environment( "MLT_PROFILE" );
	set_mlt_normalisation( profile_name );
	return profile;
}
Example #29
0
static void consumer_read_ahead_start( mlt_consumer self )
{
	// We're running now
	self->ahead = 1;

	// Create the frame queue
	self->queue = mlt_deque_init( );

	// Create the queue mutex
	pthread_mutex_init( &self->queue_mutex, NULL );

	// Create the condition
	pthread_cond_init( &self->queue_cond, NULL );

	// Create the read ahead
	if ( mlt_properties_get( MLT_CONSUMER_PROPERTIES( self ), "priority" ) )
	{
		struct sched_param priority;
		priority.sched_priority = mlt_properties_get_int( MLT_CONSUMER_PROPERTIES( self ), "priority" );
		pthread_attr_t thread_attributes;
		pthread_attr_init( &thread_attributes );
		pthread_attr_setschedpolicy( &thread_attributes, SCHED_OTHER );
		pthread_attr_setschedparam( &thread_attributes, &priority );
		pthread_attr_setinheritsched( &thread_attributes, PTHREAD_EXPLICIT_SCHED );
		pthread_attr_setscope( &thread_attributes, PTHREAD_SCOPE_SYSTEM );
		if ( pthread_create( &self->ahead_thread, &thread_attributes, consumer_read_ahead_thread, self ) < 0 )
			pthread_create( &self->ahead_thread, NULL, consumer_read_ahead_thread, self );
		pthread_attr_destroy( &thread_attributes );
	}
	else
	{
		pthread_create( &self->ahead_thread, NULL, consumer_read_ahead_thread, self );
	}
	self->started = 1;
}
Example #30
0
static mlt_frame filter_process( mlt_filter filter, mlt_frame frame )
{
	mlt_properties properties = MLT_FILTER_PROPERTIES( filter );
	mlt_properties frame_props = MLT_FRAME_PROPERTIES( frame );

	// Propogate the parameters
	mlt_properties_set_int( frame_props, "channelcopy.to", mlt_properties_get_int( properties, "to" ) );
	mlt_properties_set_int( frame_props, "channelcopy.from", mlt_properties_get_int( properties, "from" ) );
	mlt_properties_set_int( frame_props, "channelcopy.swap", mlt_properties_get_int( properties, "swap" ) );

	// Override the get_audio method
	mlt_frame_push_audio( frame, filter );
	mlt_frame_push_audio( frame, filter_get_audio );

	return frame;
}