示例#1
0
static void analyze_image( mlt_filter filter, mlt_frame frame, uint8_t* vs_image, VSPixelFormat vs_format, int width, int height )
{
	mlt_properties properties = MLT_FILTER_PROPERTIES( filter );
	vs_data* data = (vs_data*)filter->child;
	mlt_position pos = mlt_filter_get_position( filter, frame );

	// If any frames are skipped, analysis data will be incomplete.
	if( data->analyze_data && pos != data->analyze_data->last_position + 1 )
	{
		mlt_log_error( MLT_FILTER_SERVICE(filter), "Bad frame sequence\n" );
		destory_analyze_data( data->analyze_data );
		data->analyze_data = NULL;
	}

	if ( !data->analyze_data && pos == 0 )
	{
		// Analysis must start on the first frame
		init_analyze_data( filter, frame, vs_format, width, height );
	}

	if( data->analyze_data )
	{
		// Initialize the VSFrame to be analyzed.
		VSMotionDetect* md = &data->analyze_data->md;
		LocalMotions localmotions;
		VSFrame vsFrame;
		vsFrameFillFromBuffer( &vsFrame, vs_image, &md->fi );

		// Detect and save motions.
		if( vsMotionDetection( md, &localmotions, &vsFrame ) == VS_OK )
		{
			vsWriteToFile( md, data->analyze_data->results, &localmotions);
			vs_vector_del( &localmotions );
		}
		else
		{
			mlt_log_error( MLT_FILTER_SERVICE(filter), "Motion detection failed\n" );
			destory_analyze_data( data->analyze_data );
			data->analyze_data = NULL;
		}

		// Publish the motions if this is the last frame.
		if ( pos + 1 == mlt_filter_get_length2( filter, frame ) )
		{
			mlt_log_info( MLT_FILTER_SERVICE(filter), "Analysis complete\n" );
			destory_analyze_data( data->analyze_data );
			data->analyze_data = NULL;
			mlt_properties_set( properties, "results", mlt_properties_get( properties, "filename" ) );
		}
		else
		{
			data->analyze_data->last_position = pos;
		}
	}
}
示例#2
0
文件: common.c 项目: bmatherly/mlt
mlt_channel_layout av_channel_layout_to_mlt( int64_t layout )
{
	switch( layout )
	{
		case 0:                              return mlt_channel_independent;
		case AV_CH_LAYOUT_MONO:              return mlt_channel_mono;
		case AV_CH_LAYOUT_STEREO:            return mlt_channel_stereo;
		case AV_CH_LAYOUT_STEREO_DOWNMIX:    return mlt_channel_stereo;
		case AV_CH_LAYOUT_2POINT1:           return mlt_channel_2p1;
		case AV_CH_LAYOUT_SURROUND:          return mlt_channel_3p0;
		case AV_CH_LAYOUT_2_1:               return mlt_channel_3p0_back;
		case AV_CH_LAYOUT_3POINT1:           return mlt_channel_3p1;
		case AV_CH_LAYOUT_4POINT0:           return mlt_channel_4p0;
		case AV_CH_LAYOUT_QUAD:              return mlt_channel_quad_back;
		case AV_CH_LAYOUT_2_2:               return mlt_channel_quad_side;
		case AV_CH_LAYOUT_5POINT0:           return mlt_channel_5p0;
		case AV_CH_LAYOUT_5POINT0_BACK:      return mlt_channel_5p0_back;
		case AV_CH_LAYOUT_4POINT1:           return mlt_channel_4p1;
		case AV_CH_LAYOUT_5POINT1:           return mlt_channel_5p1;
		case AV_CH_LAYOUT_5POINT1_BACK:      return mlt_channel_5p1_back;
		case AV_CH_LAYOUT_6POINT0:           return mlt_channel_6p0;
		case AV_CH_LAYOUT_6POINT0_FRONT:     return mlt_channel_6p0_front;
		case AV_CH_LAYOUT_HEXAGONAL:         return mlt_channel_hexagonal;
		case AV_CH_LAYOUT_6POINT1:           return mlt_channel_6p1;
		case AV_CH_LAYOUT_6POINT1_BACK:      return mlt_channel_6p1_back;
		case AV_CH_LAYOUT_6POINT1_FRONT:     return mlt_channel_6p1_front;
		case AV_CH_LAYOUT_7POINT0:           return mlt_channel_7p0;
		case AV_CH_LAYOUT_7POINT0_FRONT:     return mlt_channel_7p0_front;
		case AV_CH_LAYOUT_7POINT1:           return mlt_channel_7p1;
		case AV_CH_LAYOUT_7POINT1_WIDE:      return mlt_channel_7p1_wide_side;
		case AV_CH_LAYOUT_7POINT1_WIDE_BACK: return mlt_channel_7p1_wide_back;
	}
	mlt_log_error( NULL, "[avformat] Unknown channel layout: %lu\n", (unsigned long)layout );
	return mlt_channel_independent;
}
示例#3
0
static int get_audio( mlt_frame frame, void **buffer, mlt_audio_format *format, int *frequency, int *channels, int *samples )
{
	mlt_frame nested_frame = mlt_frame_pop_audio( frame );
	int result = mlt_frame_get_audio( nested_frame, buffer, format, frequency, channels, samples );
	int size = *channels * *samples;

	switch ( *format )
	{
		case mlt_audio_s16:
			size *= sizeof( int16_t );
			break;
		case mlt_audio_s32:
			size *= sizeof( int32_t );
		case mlt_audio_float:
			size *= sizeof( float );
		default:
			mlt_log_error( NULL, "[producer consumer] Invalid audio format\n" );
	}
	int16_t *new_buffer = mlt_pool_alloc( size );
	mlt_properties_set_data( MLT_FRAME_PROPERTIES( frame ), "audio", new_buffer, size, mlt_pool_release, NULL );
	memcpy( new_buffer, *buffer, size );
	*buffer = new_buffer;

	return result;
}
示例#4
0
int consumer_start( mlt_consumer parent )
{
	consumer_sdl self = parent->child;

	if ( !self->running )
	{
		consumer_stop( parent );

		mlt_properties properties = MLT_CONSUMER_PROPERTIES( parent );
		char *audio_driver = mlt_properties_get( properties, "audio_driver" );
		char *audio_device = mlt_properties_get( properties, "audio_device" );

		if ( audio_driver && strcmp( audio_driver, "" ) )
			setenv( "SDL_AUDIODRIVER", audio_driver, 1 );

		if ( audio_device && strcmp( audio_device, "" ) )
			setenv( "AUDIODEV", audio_device, 1 );

		pthread_mutex_lock( &mlt_sdl_mutex );
		int ret = SDL_Init( SDL_INIT_AUDIO | SDL_INIT_NOPARACHUTE );
		pthread_mutex_unlock( &mlt_sdl_mutex );
		if ( ret < 0 )
		{
			mlt_log_error( MLT_CONSUMER_SERVICE(parent), "Failed to initialize SDL: %s\n", SDL_GetError() );
			return -1;
		}

		self->running = 1;
		self->joined = 0;
		pthread_create( &self->thread, NULL, consumer_thread, self );
	}

	return 0;
}
示例#5
0
static int jackrack_get_audio( mlt_frame frame, void **buffer, mlt_audio_format *format, int *frequency, int *channels, int *samples )
{
	// Get the filter service
	mlt_filter filter = mlt_frame_pop_audio( frame );

	// Get the filter properties
	mlt_properties filter_properties = MLT_FILTER_PROPERTIES( filter );

	int jack_frequency = mlt_properties_get_int( filter_properties, "_sample_rate" );

	// Get the producer's audio
	*format = mlt_audio_float;
	mlt_frame_get_audio( frame, buffer, format, &jack_frequency, channels, samples );
	
	// TODO: Deal with sample rate differences
	if ( *frequency != jack_frequency )
		mlt_log_error( MLT_FILTER_SERVICE( filter ), "mismatching frequencies JACK = %d actual = %d\n",
			jack_frequency, *frequency );
	*frequency = jack_frequency;

	// Initialise Jack ports and connections if needed
	if ( mlt_properties_get_int( filter_properties, "_samples" ) == 0 )
		mlt_properties_set_int( filter_properties, "_samples", *samples );
	
	// Get the filter-specific properties
	jack_ringbuffer_t **output_buffers = mlt_properties_get_data( filter_properties, "output_buffers", NULL );
	jack_ringbuffer_t **input_buffers = mlt_properties_get_data( filter_properties, "input_buffers", NULL );
//	pthread_mutex_t *output_lock = mlt_properties_get_data( filter_properties, "output_lock", NULL );
//	pthread_cond_t *output_ready = mlt_properties_get_data( filter_properties, "output_ready", NULL );
	
	// Process the audio
	float *q = (float*) *buffer;
	size_t size = *samples * sizeof(float);
	int j;
//	struct timespec tm = { 0, 0 };

	// Write into output ringbuffer
	for ( j = 0; j < *channels; j++ )
	{
		if ( jack_ringbuffer_write_space( output_buffers[j] ) >= size )
			jack_ringbuffer_write( output_buffers[j], (char*)( q + j * *samples ), size );
	}

	// Synchronization phase - wait for signal from Jack process
	while ( jack_ringbuffer_read_space( input_buffers[ *channels - 1 ] ) < size ) ;
		//pthread_cond_wait( output_ready, output_lock );
		
	// Read from input ringbuffer
	for ( j = 0; j < *channels; j++, q++ )
	{
		if ( jack_ringbuffer_read_space( input_buffers[j] ) >= size )
			jack_ringbuffer_read( input_buffers[j], (char*)( q + j * *samples ), size );
	}

	// help jack_sync() indicate when we are rolling
	mlt_position pos = mlt_frame_get_position( frame );
	mlt_properties_set_position( filter_properties, "_last_pos", pos );

	return 0;
}
示例#6
0
static int convert_mlt_to_av_cs( mlt_image_format format )
{
	int value = 0;

	switch( format )
	{
		case mlt_image_rgb24:
			value = AV_PIX_FMT_RGB24;
			break;
		case mlt_image_rgb24a:
		case mlt_image_opengl:
			value = AV_PIX_FMT_RGBA;
			break;
		case mlt_image_yuv422:
			value = AV_PIX_FMT_YUYV422;
			break;
		case mlt_image_yuv420p:
			value = AV_PIX_FMT_YUV420P;
			break;
		default:
			mlt_log_error( NULL, "[filter avcolor_space] Invalid format %s\n",
				mlt_image_format_name( format ) );
			break;
	}

	return value;
}
示例#7
0
    bool open( unsigned card =  0 )
    {
        IDeckLinkIterator* decklinkIterator = NULL;
        try
        {
#ifdef WIN32
            HRESULT result =  CoInitialize( NULL );
            if ( FAILED( result ) )
                throw "COM initialization failed";
            result = CoCreateInstance( CLSID_CDeckLinkIterator, NULL, CLSCTX_ALL, IID_IDeckLinkIterator, (void**) &decklinkIterator );
            if ( FAILED( result ) )
                throw "The DeckLink drivers are not installed.";
#else
            decklinkIterator = CreateDeckLinkIteratorInstance();
            if ( !decklinkIterator )
                throw "The DeckLink drivers are not installed.";
#endif
            // Connect to the Nth DeckLink instance
            for ( unsigned i = 0; decklinkIterator->Next( &m_decklink ) == S_OK ; i++)
            {
                if ( i == card )
                    break;
                else
                    SAFE_RELEASE( m_decklink );
            }
            SAFE_RELEASE( decklinkIterator );
            if ( !m_decklink )
                throw "DeckLink card not found.";

            // Get the input interface
            if ( m_decklink->QueryInterface( IID_IDeckLinkInput, (void**) &m_decklinkInput ) != S_OK )
                throw "No DeckLink cards support input.";

            // Provide this class as a delegate to the input callback
            m_decklinkInput->SetCallback( this );

            // Initialize other members
            pthread_mutex_init( &m_mutex, NULL );
            pthread_cond_init( &m_condition, NULL );
            m_queue = mlt_deque_init();
            m_started = false;
            m_dropped = 0;
            m_isBuffering = true;
            m_cache = mlt_cache_init();

            // 3 covers YADIF and increasing framerate use cases
            mlt_cache_set_size( m_cache, 3 );
        }
        catch ( const char *error )
        {
            SAFE_RELEASE( m_decklinkInput );
            SAFE_RELEASE( m_decklink );
            mlt_log_error( getProducer(), "%s\n", error );
            return false;
        }
        return true;
    }
示例#8
0
mlt_filter filter_audiospectrum_init( mlt_profile profile, mlt_service_type type, const char *id, char *arg )
{
	mlt_filter filter = mlt_filter_new();
	private_data* pdata = (private_data*)calloc( 1, sizeof(private_data) );

	if ( filter && pdata && createQApplicationIfNeeded( MLT_FILTER_SERVICE(filter) ) )
	{
		mlt_properties properties = MLT_FILTER_PROPERTIES( filter );
		mlt_properties_set_int( properties, "_filter_private", 1 );
		mlt_properties_set_int( properties, "frequency_low", 20 );
		mlt_properties_set_int( properties, "frequency_high", 20000 );
		mlt_properties_set( properties, "type", "line" );
		mlt_properties_set( properties, "bgcolor", "0x00000000" );
		mlt_properties_set( properties, "color.1", "0xffffffff" );
		mlt_properties_set( properties, "rect", "0% 0% 100% 100%" );
		mlt_properties_set( properties, "thickness", "0" );
		mlt_properties_set( properties, "fill", "0" );
		mlt_properties_set( properties, "mirror", "0" );
		mlt_properties_set( properties, "reverse", "0" );
		mlt_properties_set( properties, "tension", "0.4" );
		mlt_properties_set( properties, "angle", "0" );
		mlt_properties_set( properties, "gorient", "v" );
		mlt_properties_set_int( properties, "bands", 31 );
		mlt_properties_set_double( properties, "threshold", -60.0 );
		mlt_properties_set_int( properties, "window_size", 8192 );

		// Create a unique ID for storing data on the frame
		pdata->fft_prop_name = (char*)calloc( 1, 20 );
		snprintf( pdata->fft_prop_name, 20, "fft.%p", filter );
		pdata->fft_prop_name[20 - 1] = '\0';

		pdata->fft = 0;

		filter->close = filter_close;
		filter->process = filter_process;
		filter->child = pdata;
	}
	else
	{
		mlt_log_error( MLT_FILTER_SERVICE(filter), "Filter audio spectrum failed\n" );

		if( filter )
		{
			mlt_filter_close( filter );
		}

		if( pdata )
		{
			free( pdata );
		}

		filter = NULL;
	}
	return filter;
}
示例#9
0
文件: jack_rack.c 项目: mcfrisk/mlt
int
jack_rack_open_file (jack_rack_t * jack_rack, const char * filename)
{
  xmlDocPtr doc;
  saved_rack_t * saved_rack;
  GSList * list;
  saved_plugin_t * saved_plugin;

  doc = xmlParseFile (filename);
  if (!doc)
    {
      mlt_log_error( NULL, _("Could not parse file '%s'\n"), filename);
      return 1;
    }
  
  if (xmlStrcmp ( ((xmlDtdPtr)doc->children)->name, _x("jackrack")) != 0)
    {
      mlt_log_error( NULL, _("The file '%s' is not a JACK Rack settings file\n"), filename);
      return 1;
    }
  
  saved_rack = saved_rack_new (jack_rack, filename, doc);
  xmlFreeDoc (doc);
  
  if (!saved_rack)
    return 1;

  for (list = saved_rack->plugins; list; list = g_slist_next (list))
    {
      saved_plugin = list->data;
      
      settings_set_sample_rate (saved_plugin->settings, sample_rate);
      
      jack_rack_add_saved_plugin (jack_rack, saved_plugin);
    }
  
  saved_rack_destroy (saved_rack);
  
  return 0;
}
示例#10
0
文件: common.c 项目: bmatherly/mlt
int64_t mlt_to_av_channel_layout( mlt_channel_layout layout )
{
	switch( layout )
	{
		case mlt_channel_auto:
		case mlt_channel_independent:
			mlt_log_error( NULL, "[avformat] No matching channel layout: %s\n", mlt_channel_layout_name( layout ) );
			return 0;
		case mlt_channel_mono:           return AV_CH_LAYOUT_MONO;
		case mlt_channel_stereo:         return AV_CH_LAYOUT_STEREO;
		case mlt_channel_2p1:            return AV_CH_LAYOUT_2POINT1;
		case mlt_channel_3p0:            return AV_CH_LAYOUT_SURROUND;
		case mlt_channel_3p0_back:       return AV_CH_LAYOUT_2_1;
		case mlt_channel_3p1:            return AV_CH_LAYOUT_3POINT1;
		case mlt_channel_4p0:            return AV_CH_LAYOUT_4POINT0;
		case mlt_channel_quad_back:      return AV_CH_LAYOUT_QUAD;
		case mlt_channel_quad_side:      return AV_CH_LAYOUT_2_2;
		case mlt_channel_5p0:            return AV_CH_LAYOUT_5POINT0;
		case mlt_channel_5p0_back:       return AV_CH_LAYOUT_5POINT0_BACK;
		case mlt_channel_4p1:            return AV_CH_LAYOUT_4POINT1;
		case mlt_channel_5p1:            return AV_CH_LAYOUT_5POINT1;
		case mlt_channel_5p1_back:       return AV_CH_LAYOUT_5POINT1_BACK;
		case mlt_channel_6p0:            return AV_CH_LAYOUT_6POINT0;
		case mlt_channel_6p0_front:      return AV_CH_LAYOUT_6POINT0_FRONT;
		case mlt_channel_hexagonal:      return AV_CH_LAYOUT_HEXAGONAL;
		case mlt_channel_6p1:            return AV_CH_LAYOUT_6POINT1;
		case mlt_channel_6p1_back:       return AV_CH_LAYOUT_6POINT1_BACK;
		case mlt_channel_6p1_front:      return AV_CH_LAYOUT_6POINT1_FRONT;
		case mlt_channel_7p0:            return AV_CH_LAYOUT_7POINT0;
		case mlt_channel_7p0_front:      return AV_CH_LAYOUT_7POINT0_FRONT;
		case mlt_channel_7p1:            return AV_CH_LAYOUT_7POINT1;
		case mlt_channel_7p1_wide_side:  return AV_CH_LAYOUT_7POINT1_WIDE;
		case mlt_channel_7p1_wide_back:  return AV_CH_LAYOUT_7POINT1_WIDE_BACK;
	}
	mlt_log_error( NULL, "[avformat] Unknown channel configuration: %d\n", layout );
	return 0;
}
示例#11
0
static void apply_lut( mlt_filter filter, uint8_t* image, mlt_image_format format, int width, int height )
{
	private_data* self = (private_data*)filter->child;
	uint8_t* rlut = malloc( sizeof(self->rlut) );
	uint8_t* glut = malloc( sizeof(self->glut) );
	uint8_t* blut = malloc( sizeof(self->blut) );
	int total = width * height + 1;
	uint8_t* sample = image;

	// Copy the LUT so that we can be frame-thread safe.
	mlt_service_lock( MLT_FILTER_SERVICE( filter ) );
	memcpy( rlut, self->rlut, sizeof(self->rlut) );
	memcpy( glut, self->glut, sizeof(self->glut) );
	memcpy( blut, self->blut, sizeof(self->blut) );
	mlt_service_unlock( MLT_FILTER_SERVICE( filter ) );

	switch( format )
	{
	case mlt_image_rgb24:
		while( --total )
		{
			*sample = rlut[ *sample ];
			sample++;
			*sample = glut[ *sample ];
			sample++;
			*sample = blut[ *sample ];
			sample++;
		}
		break;
	case mlt_image_rgb24a:
		while( --total )
		{
			*sample = rlut[ *sample ];
			sample++;
			*sample = glut[ *sample ];
			sample++;
			*sample = blut[ *sample ];
			sample++;
			sample++; // Skip alpha
		}
		break;
	default:
		mlt_log_error( MLT_FILTER_SERVICE( filter ), "Invalid image format: %s\n", mlt_image_format_name( format ) );
		break;
	}
	free( rlut );
	free( glut );
	free( blut );
}
示例#12
0
	bool start( unsigned preroll )
	{
		m_displayMode = getDisplayMode();
		if ( !m_displayMode )
		{
			mlt_log_error( &m_consumer, "Profile is not compatible with decklink.\n" );
			return false;
		}
		
		// Set the video output mode
		if ( S_OK != m_deckLinkOutput->EnableVideoOutput( m_displayMode->GetDisplayMode(), bmdVideoOutputFlagDefault) )
		{
			mlt_log_error( &m_consumer, "Failed to enable video output\n" );
			return false;
		}
		
		// Set the audio output mode
		m_channels = 2;
		if ( S_OK != m_deckLinkOutput->EnableAudioOutput( bmdAudioSampleRate48kHz, bmdAudioSampleType16bitInteger,
			m_channels, bmdAudioOutputStreamContinuous ) )
		{
			mlt_log_error( &m_consumer, "Failed to enable audio output\n" );
			stop();
			return false;
		}
		m_fifo = sample_fifo_init();
		
		// Preroll
		m_isPrerolling = true;
		m_prerollCounter = 0;
		m_preroll = preroll < PREROLL_MINIMUM ? PREROLL_MINIMUM : preroll;
		m_count = 0;
		m_deckLinkOutput->BeginAudioPreroll();
		
		return true;
	}
示例#13
0
mlt_filter filter_spot_remover_init( mlt_profile profile, mlt_service_type type, const char *id, char *arg )
{
	mlt_filter filter = mlt_filter_new();

	if ( filter )
	{
		mlt_properties properties = MLT_FILTER_PROPERTIES( filter );
		mlt_properties_set( properties, "rect", "0% 0% 10% 10%" );
		filter->process = filter_process;
	}
	else
	{
		mlt_log_error( NULL, "Filter spot_remover initialization failed\n" );
	}
	return filter;
}
示例#14
0
mlt_filter filter_lift_gamma_gain_init( mlt_profile profile, mlt_service_type type, const char *id, char *arg )
{
	mlt_filter filter = mlt_filter_new();
	private_data* self = (private_data*)calloc( 1, sizeof(private_data) );
	int i = 0;

	if ( filter && self )
	{
		mlt_properties properties = MLT_FILTER_PROPERTIES( filter );

		// Initialize self data
		for( i = 0; i < 256; i++ )
		{
			self->rlut[i] = i;
			self->glut[i] = i;
			self->blut[i] = i;
		}
		self->rlift = self->glift = self->blift = 0.0;
		self->rgamma = self->ggamma = self->bgamma = 1.0;
		self->rgain = self->ggain = self->bgain = 1.0;

		// Initialize filter properties
		mlt_properties_set_double( properties, "lift_r", self->rlift );
		mlt_properties_set_double( properties, "lift_g", self->glift );
		mlt_properties_set_double( properties, "lift_b", self->blift );
		mlt_properties_set_double( properties, "gamma_r", self->rgamma );
		mlt_properties_set_double( properties, "gamma_g", self->ggamma );
		mlt_properties_set_double( properties, "gamma_b", self->bgamma );
		mlt_properties_set_double( properties, "gain_r", self->rgain );
		mlt_properties_set_double( properties, "gain_g", self->ggain );
		mlt_properties_set_double( properties, "gain_b", self->bgain );

		filter->close = filter_close;
		filter->process = filter_process;
		filter->child = self;
	}
	else
	{
		mlt_log_error( MLT_FILTER_SERVICE(filter), "Filter lift_gamma_gain init failed\n" );
		mlt_filter_close( filter );
		filter = NULL;
		free( self );
	}

	return filter;
}
示例#15
0
文件: producer_ladspa.c 项目: aib/mlt
static jack_rack_t* initialise_jack_rack( mlt_properties properties, int channels )
{
	jack_rack_t *jackrack = NULL;
	unsigned long plugin_id = mlt_properties_get_int64( properties, "_pluginid" );

	// Start JackRack
	if ( plugin_id )
	{
		// Create JackRack without Jack client name so that it only uses LADSPA
		jackrack = jack_rack_new( NULL, channels );
		mlt_properties_set_data( properties, "_jackrack", jackrack, 0,
			(mlt_destructor) jack_rack_destroy, NULL );

		// Load one LADSPA plugin by its UniqueID
		plugin_desc_t *desc = plugin_mgr_get_any_desc( jackrack->plugin_mgr, plugin_id );
		plugin_t *plugin;

		if ( desc && ( plugin = jack_rack_instantiate_plugin( jackrack, desc ) ) )
		{
			LADSPA_Data value;
			int index, c;

			plugin->enabled = TRUE;
			plugin->wet_dry_enabled = FALSE;
			for ( index = 0; index < desc->control_port_count; index++ )
			{
				// Apply the control port values
				char key[20];
				value = plugin_desc_get_default_control_value( desc, index, sample_rate );
				snprintf( key, sizeof(key), "%d", index );
				if ( mlt_properties_get( properties, key ) )
					value = mlt_properties_get_double( properties, key );
				for ( c = 0; c < plugin->copies; c++ )
					plugin->holders[c].control_memory[index] = value;
			}
			process_add_plugin( jackrack->procinfo, plugin );
		}
		else
		{
			mlt_log_error( properties, "failed to load plugin %lu\n", plugin_id );
		}
	}

	return jackrack;
}
示例#16
0
static jack_rack_t* initialise_jack_rack( mlt_properties properties, int channels )
{
	jack_rack_t *jackrack = NULL;
	char *resource = mlt_properties_get( properties, "resource" );
	if ( !resource && mlt_properties_get( properties, "src" ) )
		resource = mlt_properties_get( properties, "src" );

	// Start JackRack
	if ( resource || mlt_properties_get_int64( properties, "_pluginid" ) )
	{
		// Create JackRack without Jack client name so that it only uses LADSPA
		jackrack = jack_rack_new( NULL, channels );
		mlt_properties_set_data( properties, "jackrack", jackrack, 0,
			(mlt_destructor) jack_rack_destroy, NULL );

		if ( resource )
			// Load JACK Rack XML file
			jack_rack_open_file( jackrack, resource );
		else if ( mlt_properties_get_int64( properties, "_pluginid" ) )
		{
			// Load one LADSPA plugin by its UniqueID
			unsigned long id = mlt_properties_get_int64( properties, "_pluginid" );
			plugin_desc_t *desc = plugin_mgr_get_any_desc( jackrack->plugin_mgr, id );
			plugin_t *plugin;
			if ( desc && ( plugin = jack_rack_instantiate_plugin( jackrack, desc ) ) )
			{
				plugin->enabled = TRUE;
				process_add_plugin( jackrack->procinfo, plugin );
				mlt_properties_set_int( properties, "instances", plugin->copies );
			}
			else
			{
				mlt_log_error( properties, "failed to load plugin %lu\n", id );
				return jackrack;
			}

			if ( plugin && plugin->desc && plugin->copies == 0 )
			{
				mlt_log_warning( properties, "Not compatible with %d channels. Requesting %d channels instead.\n", channels, plugin->desc->channels );
				jackrack = initialise_jack_rack( properties, plugin->desc->channels );
			}
		}
	}
	return jackrack;
}
示例#17
0
文件: jack_rack.c 项目: mcfrisk/mlt
plugin_t *
jack_rack_instantiate_plugin (jack_rack_t * jack_rack, plugin_desc_t * desc)
{
  plugin_t * plugin;
  
  /* check whether or not the plugin is RT capable and confirm with the user if it isn't */
  if (!LADSPA_IS_HARD_RT_CAPABLE(desc->properties)) {
    mlt_log_info( NULL, "Plugin not RT capable. The plugin '%s' does not describe itself as being capable of real-time operation. You may experience drop outs or jack may even kick us out if you use it.\n",
               desc->name);
  }

  /* create the plugin */
  plugin = plugin_new (desc, jack_rack);

  if (!plugin) {
   mlt_log_error( NULL, "Error loading file plugin '%s' from file '%s'\n",
               desc->name, desc->object_file);
  }
  
  return plugin;
}
示例#18
0
文件: common.c 项目: bmatherly/mlt
int mlt_to_av_sample_format( mlt_audio_format format )
{
	switch( format )
	{
	case mlt_audio_none:
		return AV_SAMPLE_FMT_NONE;
	case mlt_audio_s16:
		return AV_SAMPLE_FMT_S16;
	case mlt_audio_s32:
		return AV_SAMPLE_FMT_S32P;
	case mlt_audio_float:
		return AV_SAMPLE_FMT_FLTP;
	case mlt_audio_s32le:
		return AV_SAMPLE_FMT_S32;
	case mlt_audio_f32le:
		return AV_SAMPLE_FMT_FLT;
	case mlt_audio_u8:
		return AV_SAMPLE_FMT_U8;
	}
	mlt_log_error( NULL, "[avformat] Unknown audio format: %d\n", format );
	return AV_SAMPLE_FMT_NONE;
}
示例#19
0
static void init_analyze_data( mlt_filter filter, mlt_frame frame, VSPixelFormat vs_format, int width, int height )
{
	mlt_properties properties = MLT_FILTER_PROPERTIES( filter );
	vs_data* data = (vs_data*)filter->child;
	vs_analyze* analyze_data = (vs_analyze*)calloc( 1, sizeof(vs_analyze) );
	memset( analyze_data, 0, sizeof(vs_analyze) );

	// Initialize a VSMotionDetectConfig
	const char* filterName = mlt_properties_get( properties, "mlt_service" );
	VSMotionDetectConfig conf = vsMotionDetectGetDefaultConfig( filterName );
	conf.shakiness = mlt_properties_get_int( properties, "shakiness" );
	conf.accuracy = mlt_properties_get_int( properties, "accuracy" );
	conf.stepSize = mlt_properties_get_int( properties, "stepsize" );
	conf.contrastThreshold = mlt_properties_get_double( properties, "mincontrast" );
	conf.show = mlt_properties_get_int( properties, "show" );
	conf.virtualTripod = mlt_properties_get_int( properties, "tripod" );

	// Initialize a VSFrameInfo
	VSFrameInfo fi;
	vsFrameInfoInit( &fi, width, height, vs_format );

	// Initialize the saved VSMotionDetect
	vsMotionDetectInit( &analyze_data->md, &conf, &fi );

	// Initialize the file to save results to
	char* filename = mlt_properties_get( properties, "filename" );
	analyze_data->results = fopen( filename, "w" );
	if ( vsPrepareFile( &analyze_data->md, analyze_data->results ) != VS_OK )
	{
		mlt_log_error( MLT_FILTER_SERVICE(filter), "Can not write to results file: %s\n", filename );
		destory_analyze_data( analyze_data );
		data->analyze_data = NULL;
	}
	else
	{
		data->analyze_data = analyze_data;
	}
}
示例#20
0
文件: common.cpp 项目: elfring/mlt
bool createQApplicationIfNeeded(mlt_service service)
{
	if (!qApp) {
#if defined(Q_OS_UNIX) && !defined(Q_OS_MAC)
		XInitThreads();
		if (getenv("DISPLAY") == 0) {
			mlt_log_error(service,
				"The MLT Qt module requires a X11 environment.\n"
				"Please either run melt from an X session or use a fake X server like xvfb:\n"
				"xvfb-run -a melt (...)\n" );
			return false;
		}
#endif
		if (!mlt_properties_get(mlt_global_properties(), "qt_argv"))
			mlt_properties_set(mlt_global_properties(), "qt_argv", "MLT");
		static int argc = 1;
		static char* argv[] = { mlt_properties_get(mlt_global_properties(), "Qt argv") };
		new QApplication(argc, argv);
		const char *localename = mlt_properties_get_lcnumeric(MLT_SERVICE_PROPERTIES(service));
		QLocale::setDefault(QLocale(localename));
	}
	return true;
}
示例#21
0
int consumer_start( mlt_consumer parent )
{
	consumer_sdl self = parent->child;

	if ( !self->running )
	{
		consumer_stop( parent );

		pthread_mutex_lock( &mlt_sdl_mutex );
		int ret = SDL_Init( SDL_INIT_AUDIO | SDL_INIT_NOPARACHUTE );
		pthread_mutex_unlock( &mlt_sdl_mutex );
		if ( ret < 0 )
		{
			mlt_log_error( MLT_CONSUMER_SERVICE(parent), "Failed to initialize SDL: %s\n", SDL_GetError() );
			return -1;
		}

		self->running = 1;
		self->joined = 0;
		pthread_create( &self->thread, NULL, consumer_thread, self );
	}

	return 0;
}
示例#22
0
static int resample_get_audio( mlt_frame frame, void **buffer, mlt_audio_format *format, int *frequency, int *channels, int *samples )
{
	// Get the filter service
	mlt_filter filter = mlt_frame_pop_audio( frame );

	// Get the filter properties
	mlt_properties filter_properties = MLT_FILTER_PROPERTIES( filter );

	// Get the resample information
	int output_rate = mlt_properties_get_int( filter_properties, "frequency" );

	// If no resample frequency is specified, default to requested value
	if ( output_rate == 0 )
		output_rate = *frequency;

	// Get the producer's audio
	int error = mlt_frame_get_audio( frame, buffer, format, frequency, channels, samples );
	if ( error ) return error;

	// Return now if no work to do
	if ( output_rate != *frequency && *frequency > 0 && *channels > 0 )
	{
		mlt_log_debug( MLT_FILTER_SERVICE(filter), "channels %d samples %d frequency %d -> %d\n",
			*channels, *samples, *frequency, output_rate );

		// Do not convert to float unless we need to change the rate
		if ( *format != mlt_audio_f32le )
			frame->convert_audio( frame, buffer, format, mlt_audio_f32le );

		mlt_service_lock( MLT_FILTER_SERVICE(filter) );

		SRC_DATA data;
		data.data_in = *buffer;
		data.data_out = mlt_properties_get_data( filter_properties, "output_buffer", NULL );
		data.src_ratio = ( float ) output_rate / ( float ) *frequency;
		data.input_frames = *samples;
		data.output_frames = BUFFER_LEN / *channels;
		data.end_of_input = 0;

		SRC_STATE *state = mlt_properties_get_data( filter_properties, "state", NULL );
		if ( !state || mlt_properties_get_int( filter_properties, "channels" ) != *channels )
		{
			// Recreate the resampler if the number of channels changed
			state = src_new( RESAMPLE_TYPE, *channels, &error );
			mlt_properties_set_data( filter_properties, "state", state, 0, (mlt_destructor) src_delete, NULL );
			mlt_properties_set_int( filter_properties, "channels", *channels );
		}

		// Resample the audio
		error = src_process( state, &data );
		if ( !error )
		{
			// Update output variables
			*samples = data.output_frames_gen;
			*frequency = output_rate;
			*buffer = data.data_out;
		}
		else
		{
			mlt_log_error( MLT_FILTER_SERVICE( filter ), "%s %d,%d,%d\n", src_strerror( error ), *frequency, *samples, output_rate );
		}
		mlt_service_unlock( MLT_FILTER_SERVICE(filter) );
	}

	return error;
}
示例#23
0
static int producer_get_image( mlt_frame frame, uint8_t **buffer, mlt_image_format *format, int *width, int *height, int writable )
{
	// Obtain properties of frame
	mlt_properties properties = MLT_FRAME_PROPERTIES( frame );

	// Obtain the producer for this frame
	mlt_producer producer = mlt_properties_get_data( properties, "producer_colour", NULL );

	mlt_service_lock( MLT_PRODUCER_SERVICE( producer ) );

	// Obtain properties of producer
	mlt_properties producer_props = MLT_PRODUCER_PROPERTIES( producer );

	// Get the current and previous colour strings
	char *now = mlt_properties_get( producer_props, "resource" );
	char *then = mlt_properties_get( producer_props, "_resource" );

	// Get the current image and dimensions cached in the producer
	int size = 0;
	uint8_t *image = mlt_properties_get_data( producer_props, "image", &size );
	int current_width = mlt_properties_get_int( producer_props, "_width" );
	int current_height = mlt_properties_get_int( producer_props, "_height" );
	mlt_image_format current_format = mlt_properties_get_int( producer_props, "_format" );

	// Parse the colour
	if ( now && strchr( now, '/' ) )
	{
		now = strdup( strrchr( now, '/' ) + 1 );
		mlt_properties_set( producer_props, "resource", now );
		free( now );
		now = mlt_properties_get( producer_props, "resource" );
	}
	mlt_color color = mlt_properties_get_color( producer_props, "resource" );

	if ( mlt_properties_get( producer_props, "mlt_image_format") )
		*format = mlt_image_format_id( mlt_properties_get( producer_props, "mlt_image_format") );

	// Choose suitable out values if nothing specific requested
	if ( *format == mlt_image_none || *format == mlt_image_glsl )
		*format = mlt_image_rgb24a;
	if ( *width <= 0 )
		*width = mlt_service_profile( MLT_PRODUCER_SERVICE(producer) )->width;
	if ( *height <= 0 )
		*height = mlt_service_profile( MLT_PRODUCER_SERVICE(producer) )->height;
	
	// Choose default image format if specific request is unsupported
	if (*format!=mlt_image_yuv420p  && *format!=mlt_image_yuv422  && *format!=mlt_image_rgb24 && *format!= mlt_image_glsl && *format!= mlt_image_glsl_texture)
		*format = mlt_image_rgb24a;

	// See if we need to regenerate
	if ( !now || ( then && strcmp( now, then ) ) || *width != current_width || *height != current_height || *format != current_format )
	{
		// Color the image
		int i = *width * *height + 1;
		int bpp;

		// Allocate the image
		size = mlt_image_format_size( *format, *width, *height, &bpp );
		uint8_t *p = image = mlt_pool_alloc( size );

		// Update the producer
		mlt_properties_set_data( producer_props, "image", image, size, mlt_pool_release, NULL );
		mlt_properties_set_int( producer_props, "_width", *width );
		mlt_properties_set_int( producer_props, "_height", *height );
		mlt_properties_set_int( producer_props, "_format", *format );
		mlt_properties_set( producer_props, "_resource", now );

		mlt_service_unlock( MLT_PRODUCER_SERVICE( producer ) );

		switch ( *format )
		{
		case mlt_image_yuv420p:
		{
			int plane_size =  *width * *height;
			uint8_t y, u, v;

			RGB2YUV_601_SCALED( color.r, color.g, color.b, y, u, v );
			memset(p + 0, y, plane_size);
			memset(p + plane_size, u, plane_size/4);
			memset(p + plane_size + plane_size/4, v, plane_size/4);
			mlt_properties_set_int( properties, "colorspace", 601 );
			break;
		}
		case mlt_image_yuv422:
		{
			int uneven = *width % 2;
			int count = ( *width - uneven ) / 2 + 1;
			uint8_t y, u, v;

			RGB2YUV_601_SCALED( color.r, color.g, color.b, y, u, v );
			i = *height + 1;
			while ( --i )
			{
				int j = count;
				while ( --j )
				{
					*p ++ = y;
					*p ++ = u;
					*p ++ = y;
					*p ++ = v;
				}
				if ( uneven )
				{
					*p ++ = y;
					*p ++ = u;
				}
			}
			mlt_properties_set_int( properties, "colorspace", 601 );
			break;
		}
		case mlt_image_rgb24:
			while ( --i )
			{
				*p ++ = color.r;
				*p ++ = color.g;
				*p ++ = color.b;
			}
			break;
		case mlt_image_glsl:
		case mlt_image_glsl_texture:
			memset(p, 0, size);
			break;
		case mlt_image_rgb24a:
			while ( --i )
			{
				*p ++ = color.r;
				*p ++ = color.g;
				*p ++ = color.b;
				*p ++ = color.a;
			}
			break;
		default:
			mlt_log_error( MLT_PRODUCER_SERVICE( producer ),
				"invalid image format %s\n", mlt_image_format_name( *format ) );
		}
	}
	else
	{
		mlt_service_unlock( MLT_PRODUCER_SERVICE( producer ) );
	}

	// Create the alpha channel
	int alpha_size = 0;
	uint8_t *alpha = NULL;

	// Initialise the alpha
	if (color.a < 255 || *format == mlt_image_rgb24a) {
		alpha_size = *width * *height;
		alpha = mlt_pool_alloc( alpha_size );
		if ( alpha )
			memset( alpha, color.a, alpha_size );
		else
			alpha_size = 0;
	}

	// Clone our image
	if (buffer && image && size > 0) {
		*buffer = mlt_pool_alloc( size );
		memcpy( *buffer, image, size );
	}

	// Now update properties so we free the copy after
	mlt_frame_set_image( frame, *buffer, size, mlt_pool_release );
	mlt_frame_set_alpha( frame, alpha, alpha_size, mlt_pool_release );
	mlt_properties_set_double( properties, "aspect_ratio", mlt_properties_get_double( producer_props, "aspect_ratio" ) );
	mlt_properties_set_int( properties, "meta.media.width", *width );
	mlt_properties_set_int( properties, "meta.media.height", *height );


	return 0;
}
示例#24
0
static int framebuffer_get_image( mlt_frame frame, uint8_t **image, mlt_image_format *format, int *width, int *height, int writable )
{

	// Get the filter object and properties
	mlt_producer producer = mlt_frame_pop_service( frame );
	int index = ( int )mlt_frame_pop_service( frame );
	mlt_properties properties = MLT_PRODUCER_PROPERTIES( producer );

	mlt_service_lock( MLT_PRODUCER_SERVICE( producer ) );

	// Frame properties objects
	mlt_properties frame_properties = MLT_FRAME_PROPERTIES( frame );
	mlt_frame first_frame = mlt_properties_get_data( properties, "first_frame", NULL );

	// Get producer parameters
	int strobe = mlt_properties_get_int( properties, "strobe" );
	int freeze = mlt_properties_get_int( properties, "freeze" );
	int freeze_after = mlt_properties_get_int( properties, "freeze_after" );
	int freeze_before = mlt_properties_get_int( properties, "freeze_before" );
	int in = mlt_properties_get_position( properties, "in" );

	// Determine the position
	mlt_position first_position = (first_frame != NULL) ? mlt_frame_get_position( first_frame ) : -1;
	mlt_position need_first = freeze;

	if ( !freeze || freeze_after || freeze_before )
	{
		double prod_speed = mlt_properties_get_double( properties, "_speed" );
		double actual_position = in + prod_speed * (double) mlt_producer_position( producer );

		if ( mlt_properties_get_int( properties, "reverse" ) )
			actual_position = mlt_producer_get_playtime( producer ) - actual_position;

		if ( strobe < 2 )
		{
			need_first = floor( actual_position );
		}
		else
		{
			// Strobe effect wanted, calculate frame position
			need_first = floor( actual_position );
			need_first -= need_first % strobe;
		}
		if ( freeze )
		{
			if ( freeze_after && need_first > freeze ) need_first = freeze;
			else if ( freeze_before && need_first < freeze ) need_first = freeze;
		}
	}
	
	// Determine output buffer size
	*width = mlt_properties_get_int( frame_properties, "width" );
	*height = mlt_properties_get_int( frame_properties, "height" );
	int size = mlt_image_format_size( *format, *width, *height, NULL );

	// Get output buffer
	int buffersize = 0;
        int alphasize = *width * *height;
	uint8_t *output = mlt_properties_get_data( properties, "output_buffer", &buffersize );
        uint8_t *output_alpha = mlt_properties_get_data( properties, "output_alpha", NULL );
	if( buffersize == 0 || buffersize != size )
	{
		// invalidate cached frame
		first_position = -1;
	}

	if ( need_first != first_position )
	{
		// invalidate cached frame
		first_position = -1;
		
		// Bust the cached frame
		mlt_properties_set_data( properties, "first_frame", NULL, 0, NULL, NULL );
		first_frame = NULL;
	}

	if ( output && first_position != -1 ) {
		// Using the cached frame
	  	uint8_t *image_copy = mlt_pool_alloc( size );
		memcpy( image_copy, output, size );
                uint8_t *alpha_copy = mlt_pool_alloc( alphasize );
                memcpy( alpha_copy, output_alpha, alphasize );

		// Set the output image
		*image = image_copy;
		mlt_frame_set_image( frame, image_copy, size, mlt_pool_release );
                mlt_frame_set_alpha( frame, alpha_copy, alphasize, mlt_pool_release );

		*width = mlt_properties_get_int( properties, "_output_width" );
		*height = mlt_properties_get_int( properties, "_output_height" );
		*format = mlt_properties_get_int( properties, "_output_format" );

		mlt_service_unlock( MLT_PRODUCER_SERVICE( producer ) );
		return 0;
	}

	// Get the cached frame
	if ( first_frame == NULL )
	{
		// Get the frame to cache from the real producer
		mlt_producer real_producer = mlt_properties_get_data( properties, "producer", NULL );

		// Seek the producer to the correct place
		mlt_producer_seek( real_producer, need_first );

		// Get the frame
		mlt_service_get_frame( MLT_PRODUCER_SERVICE( real_producer ), &first_frame, index );

		// Cache the frame
		mlt_properties_set_data( properties, "first_frame", first_frame, 0, ( mlt_destructor )mlt_frame_close, NULL );
	}
	mlt_properties first_frame_properties = MLT_FRAME_PROPERTIES( first_frame );


	// Which frames are buffered?
	uint8_t *first_image = mlt_properties_get_data( first_frame_properties, "image", NULL );
        uint8_t *first_alpha = mlt_properties_get_data( first_frame_properties, "alpha", NULL );
	if ( !first_image )
	{
		mlt_properties_set( first_frame_properties, "rescale.interp", mlt_properties_get( frame_properties, "rescale.interp" ) );

		int error = mlt_frame_get_image( first_frame, &first_image, format, width, height, writable );

		if ( error != 0 ) {
			mlt_log_error( MLT_PRODUCER_SERVICE( producer ), "first_image == NULL get image died\n" );
			mlt_service_unlock( MLT_PRODUCER_SERVICE( producer ) );
			return error;
		}
		output = mlt_pool_alloc( size );
		memcpy( output, first_image, size );
		// Let someone else clean up
		mlt_properties_set_data( properties, "output_buffer", output, size, mlt_pool_release, NULL ); 
		mlt_properties_set_int( properties, "_output_width", *width );
		mlt_properties_set_int( properties, "_output_height", *height );
		mlt_properties_set_int( properties, "_output_format", *format );
	
	}

	if ( !first_alpha )
        {
                alphasize = *width * *height;
                first_alpha = mlt_frame_get_alpha_mask( first_frame );
                output_alpha = mlt_pool_alloc( alphasize );
                memcpy( output_alpha, first_alpha, alphasize );
                mlt_properties_set_data( properties, "output_alpha", output_alpha, alphasize, mlt_pool_release, NULL ); 
        }

	mlt_service_unlock( MLT_PRODUCER_SERVICE( producer ) );

	// Create a copy
	uint8_t *image_copy = mlt_pool_alloc( size );
	memcpy( image_copy, first_image, size );
        uint8_t *alpha_copy = mlt_pool_alloc( alphasize );
        memcpy( alpha_copy, first_alpha, alphasize );

	// Set the output image
	*image = image_copy;
	mlt_frame_set_image( frame, *image, size, mlt_pool_release );

	mlt_frame_set_alpha( frame, alpha_copy, alphasize, mlt_pool_release );

	return 0;
}
示例#25
0
static int jack_process (jack_nframes_t frames, void * data)
{
	mlt_filter filter = (mlt_filter) data;
 	mlt_properties properties = MLT_FILTER_PROPERTIES( filter );
	int channels = mlt_properties_get_int( properties, "channels" );
	int frame_size = mlt_properties_get_int( properties, "_samples" ) * sizeof(float);
	int sync = mlt_properties_get_int( properties, "_sync" );
	int err = 0;
	int i;
	static int total_size = 0;
  
	jack_ringbuffer_t **output_buffers = mlt_properties_get_data( properties, "output_buffers", NULL );
	if ( output_buffers == NULL )
		return 0;
	jack_ringbuffer_t **input_buffers = mlt_properties_get_data( properties, "input_buffers", NULL );
	jack_port_t **jack_output_ports = mlt_properties_get_data( properties, "jack_output_ports", NULL );
	jack_port_t **jack_input_ports = mlt_properties_get_data( properties, "jack_input_ports", NULL );
	float **jack_output_buffers = mlt_properties_get_data( properties, "jack_output_buffers", NULL );
	float **jack_input_buffers = mlt_properties_get_data( properties, "jack_input_buffers", NULL );
	pthread_mutex_t *output_lock = mlt_properties_get_data( properties, "output_lock", NULL );
	pthread_cond_t *output_ready = mlt_properties_get_data( properties, "output_ready", NULL );
	
	for ( i = 0; i < channels; i++ )
	{
		size_t jack_size = ( frames * sizeof(float) );
		size_t ring_size;
		
		// Send audio through out port
		jack_output_buffers[i] = jack_port_get_buffer( jack_output_ports[i], frames );
		if ( ! jack_output_buffers[i] )
		{
			mlt_log_error( MLT_FILTER_SERVICE(filter), "no buffer for output port %d\n", i );
			err = 1;
			break;
		}
		ring_size = jack_ringbuffer_read_space( output_buffers[i] );
		jack_ringbuffer_read( output_buffers[i], ( char * )jack_output_buffers[i], ring_size < jack_size ? ring_size : jack_size );
		if ( ring_size < jack_size )
			memset( &jack_output_buffers[i][ring_size], 0, jack_size - ring_size );
		
		// Return audio through in port
		jack_input_buffers[i] = jack_port_get_buffer( jack_input_ports[i], frames );
		if ( ! jack_input_buffers[i] )
		{
			mlt_log_error( MLT_FILTER_SERVICE(filter), "no buffer for input port %d\n", i );
			err = 1;
			break;
		}
		
		// Do not start returning audio until we have sent first mlt frame
		if ( sync && i == 0 && frame_size > 0 )
			total_size += ring_size;
		mlt_log_debug( MLT_FILTER_SERVICE(filter), "sync %d frame_size %d ring_size %zu jack_size %zu\n", sync, frame_size, ring_size, jack_size );
		
		if ( ! sync || ( frame_size > 0  && total_size >= frame_size ) )
		{
			ring_size = jack_ringbuffer_write_space( input_buffers[i] );
			jack_ringbuffer_write( input_buffers[i], ( char * )jack_input_buffers[i], ring_size < jack_size ? ring_size : jack_size );
			
			if ( sync )
			{
				// Tell mlt that audio is available
				pthread_mutex_lock( output_lock);
				pthread_cond_signal( output_ready );
				pthread_mutex_unlock( output_lock);

				// Clear sync phase
				mlt_properties_set_int( properties, "_sync", 0 );
			}
		}
	}

	// Often jackd does not send the stopped event through the JackSyncCallback
	jack_client_t *jack_client = mlt_properties_get_data( properties, "jack_client", NULL );
	jack_position_t jack_pos;
	jack_transport_state_t state = jack_transport_query( jack_client, &jack_pos );
	int transport_state = mlt_properties_get_int( properties, "_transport_state" );
	if ( state != transport_state )
	{
		mlt_properties_set_int( properties, "_transport_state", state );
		if ( state == JackTransportStopped )
			jack_sync( state, &jack_pos, filter );
	}

	return err;
}
示例#26
0
文件: vdpau.c 项目: aib/mlt
static int vdpau_decoder_init( producer_avformat self )
{
	mlt_log_debug( MLT_PRODUCER_SERVICE(self->parent), "vdpau_decoder_init\n" );
	int success = 1;
	
	self->video_codec->opaque = self;
	self->video_codec->get_format = vdpau_get_format;
	self->video_codec->get_buffer = vdpau_get_buffer;
	self->video_codec->release_buffer = vdpau_release_buffer;
	self->video_codec->draw_horiz_band = vdpau_draw_horiz;
	self->video_codec->slice_flags = SLICE_FLAG_CODED_ORDER | SLICE_FLAG_ALLOW_FIELD;
	self->video_codec->pix_fmt = PIX_FMT_VDPAU_H264;
	
	VdpDecoderProfile profile = VDP_DECODER_PROFILE_H264_HIGH;
	uint32_t max_references = self->video_codec->refs;
	pthread_mutex_lock( &mlt_sdl_mutex );
	VdpStatus status = vdp_decoder_create( self->vdpau->device,
		profile, self->video_codec->width, self->video_codec->height, max_references, &self->vdpau->decoder );
	pthread_mutex_unlock( &mlt_sdl_mutex );
	
	if ( status == VDP_STATUS_OK )
	{
			int i, n = FFMIN( self->video_codec->refs + 2, MAX_VDPAU_SURFACES );

			self->vdpau->deque = mlt_deque_init();
			for ( i = 0; i < n; i++ )
			{
				if ( VDP_STATUS_OK == vdp_surface_create( self->vdpau->device, VDP_CHROMA_TYPE_420,
					self->video_codec->width, self->video_codec->height, &self->vdpau->render_states[i].surface ) )
				{
					mlt_log_debug( MLT_PRODUCER_SERVICE(self->parent), "successfully created VDPAU surface %x\n",
						self->vdpau->render_states[i].surface );
					mlt_deque_push_back( self->vdpau->deque, &self->vdpau->render_states[i] );
				}
				else
				{
					mlt_log_info( MLT_PRODUCER_SERVICE(self->parent), "failed to create VDPAU surface %dx%d\n",
						self->video_codec->width, self->video_codec->height );
					while ( mlt_deque_count( self->vdpau->deque ) )
					{
						struct vdpau_render_state *render = mlt_deque_pop_front( self->vdpau->deque );
						vdp_surface_destroy( render->surface );
					}
					mlt_deque_close( self->vdpau->deque );
					success = 0;
					break;
				}
			}
			if ( self->vdpau )
				self->vdpau->b_age = self->vdpau->ip_age[0] = self->vdpau->ip_age[1] = 256*256*256*64; // magic from Avidemux
	}
	else
	{
		success = 0;
		self->vdpau->decoder = VDP_INVALID_HANDLE;
		mlt_log_error( MLT_PRODUCER_SERVICE(self->parent), "VDPAU failed to initialize decoder (%s)\n",
			vdp_get_error_string( status ) );
	}
	
	return success;
}
示例#27
0
static int consumer_play_audio( consumer_sdl self, mlt_frame frame, int init_audio, int *duration )
{
	// Get the properties of self consumer
	mlt_properties properties = self->properties;
	mlt_audio_format afmt = mlt_audio_s16;

	// Set the preferred params of the test card signal
	int channels = mlt_properties_get_int( properties, "channels" );
	int dest_channels = channels;
	int frequency = mlt_properties_get_int( properties, "frequency" );
	static int counter = 0;

	int samples = mlt_sample_calculator( mlt_properties_get_double( self->properties, "fps" ), frequency, counter++ );
	
	int16_t *pcm;
	int bytes;

	mlt_frame_get_audio( frame, (void**) &pcm, &afmt, &frequency, &channels, &samples );
	*duration = ( ( samples * 1000 ) / frequency );
	pcm += mlt_properties_get_int( properties, "audio_offset" );

	if ( mlt_properties_get_int( properties, "audio_off" ) )
	{
		self->playing = 1;
		init_audio = 1;
		return init_audio;
	}

	if ( init_audio == 1 )
	{
		SDL_AudioSpec request;
		SDL_AudioSpec got;

		int audio_buffer = mlt_properties_get_int( properties, "audio_buffer" );

		// specify audio format
		memset( &request, 0, sizeof( SDL_AudioSpec ) );
		self->playing = 0;
		request.freq = frequency;
		request.format = AUDIO_S16SYS;
		request.channels = dest_channels;
		request.samples = audio_buffer;
		request.callback = sdl_fill_audio;
		request.userdata = (void *)self;
		if ( SDL_OpenAudio( &request, &got ) != 0 )
		{
			mlt_log_error( MLT_CONSUMER_SERVICE( self ), "SDL failed to open audio: %s\n", SDL_GetError() );
			init_audio = 2;
		}
		else if ( got.size != 0 )
		{
			SDL_PauseAudio( 0 );
			init_audio = 0;
		}
	}

	if ( init_audio == 0 )
	{
		mlt_properties properties = MLT_FRAME_PROPERTIES( frame );
		
		bytes = samples * dest_channels * sizeof(*pcm);
		pthread_mutex_lock( &self->audio_mutex );
		while ( self->running && bytes > ( sizeof( self->audio_buffer) - self->audio_avail ) )
			pthread_cond_wait( &self->audio_cond, &self->audio_mutex );
		if ( self->running )
		{
			if ( mlt_properties_get_double( properties, "_speed" ) == 1 )
			{
				if ( channels == dest_channels )
				{
					memcpy( &self->audio_buffer[ self->audio_avail ], pcm, bytes );
				}
				else
				{
					int16_t *dest = (int16_t*) &self->audio_buffer[ self->audio_avail ];
					int i = samples + 1;
					
					while ( --i )
					{
						memcpy( dest, pcm, dest_channels * sizeof(*pcm) );
						pcm += channels;
						dest += dest_channels;
					}
				}
			}
			else
			{
				memset( &self->audio_buffer[ self->audio_avail ], 0, bytes );
			}
			self->audio_avail += bytes;
		}
		pthread_cond_broadcast( &self->audio_cond );
		pthread_mutex_unlock( &self->audio_mutex );
	}
	else
	{
		self->playing = 1;
	}

	return init_audio;
}
示例#28
0
static int filter_get_audio( mlt_frame frame, void **buffer, mlt_audio_format *format, int *frequency, int *channels, int *samples )
{
	// Get the properties of the a frame
	mlt_properties properties = MLT_FRAME_PROPERTIES( frame );

	// Get the filter service
	mlt_filter filter = mlt_frame_pop_audio( frame );

	int from = mlt_properties_get_int( properties, "channelcopy.from" );
	int to = mlt_properties_get_int( properties, "channelcopy.to" );
	int swap = mlt_properties_get_int( properties, "channelcopy.swap" );

	// Get the producer's audio
	mlt_frame_get_audio( frame, buffer, format, frequency, channels, samples );

	// Copy channels as necessary
	if ( from != to)
	switch ( *format )
	{
		case mlt_audio_u8:
		{
			uint8_t *f = (uint8_t*) *buffer + from;
			uint8_t *t = (uint8_t*) *buffer + to;
			uint8_t x;
			int i;

			if ( swap )
				for ( i = 0; i < *samples; i++, f += *channels, t += *channels )
				{
					x = *t;
					*t = *f;
					*f = x;
				}
			else
				for ( i = 0; i < *samples; i++, f += *channels, t += *channels )
					*t = *f;
			break;
		}
		case mlt_audio_s16:
		{
			int16_t *f = (int16_t*) *buffer + from;
			int16_t *t = (int16_t*) *buffer + to;
			int16_t x;
			int i;

			if ( swap )
				for ( i = 0; i < *samples; i++, f += *channels, t += *channels )
				{
					x = *t;
					*t = *f;
					*f = x;
				}
			else
				for ( i = 0; i < *samples; i++, f += *channels, t += *channels )
					*t = *f;
			break;
		}
		case mlt_audio_s32:
		{
			int32_t *f = (int32_t*) *buffer + from * *samples;
			int32_t *t = (int32_t*) *buffer + to * *samples;

			if ( swap )
			{
				int32_t *x = malloc( *samples * sizeof(int32_t) );
				memcpy( x, t, *samples * sizeof(int32_t) );
				memcpy( t, f, *samples * sizeof(int32_t) );
				memcpy( f, x, *samples * sizeof(int32_t) );
				free( x );
			}
			else
			{
				memcpy( t, f, *samples * sizeof(int32_t) );
			}
			break;
		}
		case mlt_audio_s32le:
		case mlt_audio_f32le:
		{
			int32_t *f = (int32_t*) *buffer + from;
			int32_t *t = (int32_t*) *buffer + to;
			int32_t x;
			int i;

			if ( swap )
				for ( i = 0; i < *samples; i++, f += *channels, t += *channels )
				{
					x = *t;
					*t = *f;
					*f = x;
				}
			else
				for ( i = 0; i < *samples; i++, f += *channels, t += *channels )
					*t = *f;
			break;
		}
		case mlt_audio_float:
		{
			float *f = (float*) *buffer + from * *samples;
			float *t = (float*) *buffer + to * *samples;

			if ( swap )
			{
				float *x = malloc( *samples * sizeof(float) );
				memcpy( x, t, *samples * sizeof(float) );
				memcpy( t, f, *samples * sizeof(float) );
				memcpy( f, x, *samples * sizeof(float) );
				free( x );
			}
			else
			{
				memcpy( t, f, *samples * sizeof(float) );
			}
			break;
		}
		default:
			mlt_log_error( MLT_FILTER_SERVICE( filter ), "Invalid audio format\n" );
			break;
	}

	return 0;
}
示例#29
0
static int setup_sdl_video( consumer_sdl self )
{
	int error = 0;
	int sdl_flags = SDL_WINDOW_RESIZABLE;
	int texture_format = SDL_PIXELFORMAT_YUY2;

	// Skip this if video is disabled.
	int video_off = mlt_properties_get_int( self->properties, "video_off" );
	int preview_off = mlt_properties_get_int( self->properties, "preview_off" );
	if ( video_off || preview_off )
		return error;

	if (!SDL_WasInit(SDL_INIT_VIDEO))
	{
		pthread_mutex_lock( &mlt_sdl_mutex );
		int ret = SDL_Init( SDL_INIT_VIDEO );
		pthread_mutex_unlock( &mlt_sdl_mutex );
		if ( ret < 0 )
		{
			mlt_log_error( MLT_CONSUMER_SERVICE(&self->parent), "Failed to initialize SDL: %s\n", SDL_GetError() );
			return -1;
		}
	}

#if 0 // only yuv422 working currently
	int image_format = mlt_properties_get_int( self->properties, "mlt_image_format" );

	if ( image_format ) switch ( image_format ) {
	case mlt_image_rgb24:
		texture_format = SDL_PIXELFORMAT_RGB24;
		break;
	case mlt_image_rgb24a:
		texture_format = SDL_PIXELFORMAT_ABGR8888;
		break;
	case mlt_image_yuv420p:
		texture_format = SDL_PIXELFORMAT_IYUV;
		break;
	case mlt_image_yuv422:
		texture_format = SDL_PIXELFORMAT_YUY2;
		break;
	default:
		mlt_log_error( MLT_CONSUMER_SERVICE(&self->parent), "Invalid image format %s\n",
			mlt_image_format_name( image_format ) );
		return -1;
	}
#endif

	if ( mlt_properties_get_int( self->properties, "fullscreen" ) )
	{
		self->window_width = self->width;
		self->window_height = self->height;
		sdl_flags |= SDL_WINDOW_FULLSCREEN_DESKTOP;
		SDL_ShowCursor( SDL_DISABLE );
	}

	pthread_mutex_lock( &mlt_sdl_mutex );
	self->sdl_window = SDL_CreateWindow("MLT", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
		self->window_width, self->window_height, sdl_flags);
	self->sdl_renderer = SDL_CreateRenderer(self->sdl_window, -1, SDL_RENDERER_ACCELERATED);
	if ( self->sdl_renderer )
	{
		self->sdl_texture = SDL_CreateTexture( self->sdl_renderer, texture_format,
			SDL_TEXTUREACCESS_STREAMING, self->window_width, self->window_height );
		if ( self->sdl_texture ) {
			SDL_SetRenderDrawColor( self->sdl_renderer, 0, 0, 0, 255);
		} else {
			mlt_log_error( MLT_CONSUMER_SERVICE(&self->parent), "Failed to create SDL texture: %s\n", SDL_GetError() );
			error = -1;
		}
	} else {
		mlt_log_error( MLT_CONSUMER_SERVICE(&self->parent), "Failed to create SDL renderer: %s\n", SDL_GetError() );
		error = -1;
	}
	pthread_mutex_unlock( &mlt_sdl_mutex );

	return error;
}
示例#30
0
static jack_rack_t* initialise_jack_rack( mlt_properties properties, int channels )
{
	jack_rack_t *jackrack = NULL;
	char *resource = mlt_properties_get( properties, "resource" );
	if ( !resource && mlt_properties_get( properties, "src" ) )
		resource = mlt_properties_get( properties, "src" );

	// Start JackRack
	if ( resource || mlt_properties_get_int64( properties, "_pluginid" ) )
	{
		// Create JackRack without Jack client name so that it only uses LADSPA
		jackrack = jack_rack_new( NULL, channels );
		mlt_properties_set_data( properties, "jackrack", jackrack, 0,
			(mlt_destructor) jack_rack_destroy, NULL );

		if ( resource )
			// Load JACK Rack XML file
			jack_rack_open_file( jackrack, resource );
		else if ( mlt_properties_get_int64( properties, "_pluginid" ) )
		{
			// Load one LADSPA plugin by its UniqueID
			unsigned long id = mlt_properties_get_int64( properties, "_pluginid" );
			plugin_desc_t *desc = plugin_mgr_get_any_desc( jackrack->plugin_mgr, id );
			plugin_t *plugin;
			if ( desc && ( plugin = jack_rack_instantiate_plugin( jackrack, desc ) ) )
			{
				plugin->enabled = TRUE;
				process_add_plugin( jackrack->procinfo, plugin );
				mlt_properties_set_int( properties, "instances", plugin->copies );
			}
			else
			{
				mlt_log_error( properties, "failed to load plugin %lu\n", id );
				return jackrack;
			}

			if ( plugin && plugin->desc && plugin->copies == 0 )
			{
				// Calculate the number of channels that will work with this plugin
				int request_channels = plugin->desc->channels;
				while ( request_channels < channels )
					request_channels += plugin->desc->channels;

				if ( request_channels != channels )
				{
					// Try to load again with a compatible number of channels.
					mlt_log_warning( properties, "Not compatible with %d channels. Requesting %d channels instead.\n",
						channels, request_channels );
					jack_rack_destroy( jackrack );
					jackrack = initialise_jack_rack( properties, request_channels );
				}
				else
				{
					mlt_log_error( properties, "Invalid plugin configuration: %lu\n", id );
					return jackrack;
				}
			}

			if ( plugin && plugin->desc && plugin->copies )
				mlt_log_debug( properties, "Plugin Initialized. Channels: %lu\tCopies: %d\tTotal: %lu\n", plugin->desc->channels, plugin->copies, jackrack->channels );
		}
	}
	return jackrack;
}