Ejemplo n.º 1
0
Archivo: gavf.c Proyecto: kidaa/gmerlin
static void gavf_stream_free(gavf_stream_t * s)
{
    if(s->asrc)
        gavl_audio_source_destroy(s->asrc);
    if(s->vsrc)
        gavl_video_source_destroy(s->vsrc);
    if(s->psrc)
        gavl_packet_source_destroy(s->psrc);

    if(s->asink)
        gavl_audio_sink_destroy(s->asink);
    if(s->vsink)
        gavl_video_sink_destroy(s->vsink);
    if(s->psink)
        gavl_packet_sink_destroy(s->psink);
    if(s->pb)
        gavf_packet_buffer_destroy(s->pb);

    if(s->aframe)
    {
        gavl_audio_frame_null(s->aframe);
        gavl_audio_frame_destroy(s->aframe);
    }
    if(s->vframe)
    {
        gavl_video_frame_null(s->vframe);
        gavl_video_frame_destroy(s->vframe);
    }
}
Ejemplo n.º 2
0
void
gavl_audio_source_set_dst(gavl_audio_source_t * s, int dst_flags,
                          const gavl_audio_format_t * dst_format)
  {
  s->next_pts = GAVL_TIME_UNDEFINED;
  s->dst_flags = dst_flags;

  if(dst_format)
    gavl_audio_format_copy(&s->dst_format, dst_format);
  else
    gavl_audio_format_copy(&s->dst_format, &s->src_format);

  if(gavl_audio_converter_init(s->cnv,
                               &s->src_format, &s->dst_format))
    s->flags |= FLAG_DO_CONVERT;
  else
    s->flags &= ~FLAG_DO_CONVERT;
  
  if(!(s->flags & FLAG_DO_CONVERT) &&
     (s->src_format.samples_per_frame == s->src_format.samples_per_frame) &&
     !(s->src_flags & GAVL_SOURCE_SRC_FRAMESIZE_MAX))
    s->flags |= (FLAG_PASSTHROUGH | FLAG_PASSTHROUGH_INIT);
  else
    s->flags &= ~(FLAG_PASSTHROUGH | FLAG_PASSTHROUGH_INIT);
  
  if(s->out_frame)
    {
    gavl_audio_frame_destroy(s->out_frame);
    s->out_frame = NULL;
    }
  if(s->dst_frame)
    {
    gavl_audio_frame_destroy(s->dst_frame);
    s->dst_frame = NULL;
    }
  if(s->buffer_frame)
    {
    gavl_audio_frame_destroy(s->buffer_frame);
    s->buffer_frame = NULL;
    }

  s->frame = NULL;

  s->flags |= FLAG_DST_SET;

  }
Ejemplo n.º 3
0
Archivo: gavf.c Proyecto: kidaa/gmerlin
void gavf_close(gavf_t * g)
{
    int i;
    if(g->wr)
    {
        if(g->streams)
        {
            /* Flush packets if any */
            gavf_flush_packets(g, NULL);

            /* Append final sync header */
            write_sync_header(g, -1, NULL);
        }

        /* Write footer */

        if(!gavf_footer_write(g))
            return;
    }

    /* Free stuff */

    if(g->streams)
    {
        for(i = 0; i < g->ph.num_streams; i++)
            gavf_stream_free(&g->streams[i]);
        free(g->streams);
    }
    gavf_sync_index_free(&g->si);
    gavf_packet_index_free(&g->pi);
    gavf_program_header_free(&g->ph);

    if(g->cl)
        gavl_chapter_list_destroy(g->cl);

    if(g->write_vframe)
    {
        gavl_video_frame_null(g->write_vframe);
        gavl_video_frame_destroy(g->write_vframe);
    }

    if(g->write_aframe)
    {
        gavl_audio_frame_null(g->write_aframe);
        gavl_audio_frame_destroy(g->write_aframe);
    }

    if(g->sync_pts)
        free(g->sync_pts);

    gavl_packet_free(&g->write_pkt);

    gavf_buffer_free(&g->pkt_buf);
    gavf_buffer_free(&g->meta_buf);
    gavl_metadata_free(&g->metadata);

    free(g);
}
Ejemplo n.º 4
0
static void close_gsm(bgav_stream_t * s)
  {
  gsm_priv * priv;
  priv = s->decoder_priv;

  if(priv->frame)
    gavl_audio_frame_destroy(priv->frame);
  gsm_destroy(priv->gsm_state);
  free(priv);
  }
Ejemplo n.º 5
0
static void close_esd(void * data)
  {
  esd_t * e = data;
  esd_close(e->esd_socket);
  gavl_audio_frame_destroy(e->f);
  if(e->src)
    {
    gavl_audio_source_destroy(e->src);
    e->src = NULL;
    }
  }
Ejemplo n.º 6
0
static void close_gavf_audio(bgav_stream_t * s)
  {
  gavf_audio_t * priv = s->decoder_priv;
  
  if(priv->frame)
    {
    gavl_audio_frame_null(priv->frame);
    gavl_audio_frame_destroy(priv->frame);
    }
  free(priv);
  }
Ejemplo n.º 7
0
void gavl_audio_source_destroy(gavl_audio_source_t * s)
  {
#if 0
  if(s->src_fp)
    gavl_audio_frame_pool_destroy(s->src_fp);
  if(s->dst_fp)
    gavl_audio_frame_pool_destroy(s->dst_fp);
#endif

  if(s->out_frame)
    gavl_audio_frame_destroy(s->out_frame);
  if(s->in_frame)
    gavl_audio_frame_destroy(s->in_frame);
  if(s->dst_frame)
    gavl_audio_frame_destroy(s->dst_frame);
  if(s->buffer_frame)
    gavl_audio_frame_destroy(s->buffer_frame);
  
  
  gavl_audio_converter_destroy(s->cnv);
  free(s);
  }
Ejemplo n.º 8
0
static void close_mad(bgav_stream_t * s)
  {
  mad_priv_t * priv;
  priv = s->decoder_priv;

  mad_synth_finish(&priv->synth);
  mad_frame_finish(&priv->frame);
  mad_stream_finish(&priv->stream);

  bgav_bytebuffer_free(&priv->buf);
  
  if(priv->audio_frame)
    gavl_audio_frame_destroy(priv->audio_frame);
  free(priv);
  }
Ejemplo n.º 9
0
static void alloc_frames(gavl_audio_converter_t* cnv,
                         int in_samples, double new_ratio)
  {
  gavl_audio_convert_context_t * ctx;
  int out_samples_needed;  
  if((cnv->input_format.samples_per_frame >= in_samples) && (new_ratio < 0.0))
    return;
 
  cnv->input_format.samples_per_frame = in_samples;
  
  /* Set the samples_per_frame member of all intermediate formats */
  
  ctx = cnv->contexts;
  out_samples_needed = in_samples;  

  while(ctx->next)
    {
    ctx->input_format.samples_per_frame = out_samples_needed;
    
    if(ctx->samplerate_converter)
      {
      /* Varispeed */
      if(new_ratio > 0.0)
        {
        out_samples_needed = 
          (int)(0.5 * (ctx->samplerate_converter->ratio + new_ratio) * out_samples_needed) + 10;
        }
      /* Constant ratio */
      else
        {
        out_samples_needed =
          (out_samples_needed * ctx->output_format.samplerate) /
          ctx->input_format.samplerate + 10;
        }
      }
    if(ctx->output_format.samples_per_frame < out_samples_needed)
      {
      ctx->output_format.samples_per_frame = out_samples_needed + 1024;
      if(ctx->output_frame)
        gavl_audio_frame_destroy(ctx->output_frame);
      ctx->output_frame = gavl_audio_frame_create(&ctx->output_format);
      ctx->next->input_frame = ctx->output_frame;
      }
    ctx = ctx->next;
    }
  }
Ejemplo n.º 10
0
static void audio_converter_cleanup(gavl_audio_converter_t* cnv)
  {
  gavl_audio_convert_context_t * ctx;
  
  ctx = cnv->contexts;

  while(ctx)
    {
    ctx = cnv->contexts->next;
    if(ctx && cnv->contexts->output_frame)
      gavl_audio_frame_destroy(cnv->contexts->output_frame);
    destroy_context(cnv->contexts);
    cnv->contexts = ctx;
    }
  cnv->num_conversions = 0;
  cnv->contexts     = NULL;
  cnv->last_context = NULL;
  }
Ejemplo n.º 11
0
static void close_real(bgav_stream_t * s)
  {
  real_priv_t * p = s->decoder_priv;

  if(p->frame)
    gavl_audio_frame_destroy(p->frame);
  
  if(p->read_buffer)
    free(p->read_buffer);
#if 1
  if(p->raFreeDecoder)
    p->raFreeDecoder(p->real_handle);
  if(p->raCloseCodec)
    p->raCloseCodec(p->real_handle);
#endif
  //  dlclose(p->module);
  free(p);
  }
Ejemplo n.º 12
0
ReadMedia::~ReadMedia() {
	printf("killing the media..\n");
	setCommand( CMD_QUIT );		
	signalDispatcher();
		

	// signal dispatcher joins the opener and AV threads	
	pthread_join( m_thread_dispatch, NULL);

	//printf("joined dispatcher\n");	
	if (m_audio_frame != NULL) {
		gavl_audio_frame_destroy(m_audio_frame);
	}
	if (m_video_frame != NULL) {
		gavl_video_frame_destroy(m_video_frame);
	}
	if (m_file != NULL) {
		bgav_close(m_file);
	}

	//printf("now, on to deleting fifo...\n");
	if( m_fifoaudio != NULL) delete m_fifoaudio;
	if( m_fifovideo != NULL) delete m_fifovideo;

	// these are created only once	
	bgav_options_destroy(m_opt);

	pthread_cond_destroy(&m_cond_dispatch);
	pthread_mutex_destroy(&m_condmut_dispatch);

	pthread_cond_destroy(&m_cond_a);
	pthread_cond_destroy(&m_cond_v);
	pthread_mutex_destroy(&m_condmut_a);
	pthread_mutex_destroy(&m_condmut_v);

	pthread_mutex_destroy(&m_av_mut);
	pthread_mutex_destroy(&m_state_mut);

	printf("killed the media..\n");
}
Ejemplo n.º 13
0
bool ReadMedia::initFormat() {

	const gavl_audio_format_t * open_audio_format;
	const gavl_video_format_t * open_video_format;

	// we use the m_vfifosize to see if the user app wants video or not
	// then, we set m_video_stream_count to 0 if he doesn't want video
	if (m_video_stream_count > 0 && m_vfifosize > 0) {
		open_video_format = bgav_get_video_format(m_file, 0);

		if (open_video_format->pixelformat == GAVL_PIXELFORMAT_NONE) {
			printf("!!!sorry, pixelformat is not recognized.\n");
			return false;
		}

		// let's check to see if the formats are the same, if they are the same
		// there is no reason to recreate the fifo or frames
		if ( gavl_video_formats_equal( &m_video_format, open_video_format) == 0 ) { 	
			// the formats are different
			gavl_video_format_copy (&m_video_format, open_video_format);
			if (m_video_frame != NULL)
				gavl_video_frame_destroy(m_video_frame);
			m_video_frame = gavl_video_frame_create(&m_video_format);
			gavl_video_frame_clear( m_video_frame, &m_video_format);
			if (m_fifovideo != NULL)
				delete m_fifovideo;
			m_fifovideo=  new FifoVideoFrames( m_vfifosize ,  &m_video_format); 
		}
	} else {
		m_video_stream_count = 0;
		m_veof = true;
	}

	// we use the m_afifosize to see if the user app wants audio or not
	// then, we set m_audio_stream_count to 0 if he doesn't want audio
	if (m_audio_stream_count > 0 && m_afifosize > 0) {  
		open_audio_format = bgav_get_audio_format(m_file, 0);    
	
		// we can get audio formats that are unkown
		if ( open_audio_format->sample_format == GAVL_SAMPLE_NONE) {
			printf("sorry, this file has unsupported audio.\n"); 
			return false;	
		}

		if ( gavl_audio_formats_equal(&m_audio_format, open_audio_format) == 0 ) { 	
			// audio formats are different
			// save the old spf
			int spf = m_audio_format.samples_per_frame; 
			gavl_audio_format_copy(&m_audio_format, open_audio_format);

			if (m_audio_frame != NULL) {
				gavl_audio_frame_destroy(m_audio_frame);
			}

			// set it back to original
			m_audio_format.samples_per_frame = spf ;

			m_audio_frame = gavl_audio_frame_create(&m_audio_format);
	
			gavl_audio_frame_mute( m_audio_frame, &m_audio_format);
			if( m_fifoaudio != NULL )
				delete m_fifoaudio;
			m_fifoaudio = new FifoAudioFrames( m_afifosize , &m_audio_format); 
		}
	} else {
		// user doesn't want audio
		m_audio_stream_count = 0;
		m_aeof=true;
	}


	m_length_in_gavltime = bgav_get_duration ( m_file, 0);;
	m_length_in_seconds = gavl_time_to_seconds(  m_length_in_gavltime );
	m_num_samples = 0;
	m_num_frames = 0;

	if (m_audio_stream_count) {
		if ( bgav_can_seek_sample(m_file) == 1 ) {
			m_num_samples=	bgav_audio_duration ( m_file, 0) ;
	 } else { 
			m_num_samples=	gavl_time_to_samples( m_audio_format.samplerate ,  bgav_get_duration ( m_file, 0) );
		}
	}

	// set frames   WE NEED TO take care here for non-constant frame-rates
	if(m_video_stream_count) {
		if ( bgav_can_seek_sample(m_file) == 1  && m_video_format.framerate_mode == GAVL_FRAMERATE_CONSTANT) { 
			m_num_frames =	bgav_video_duration ( m_file, 0)/ m_video_format.frame_duration;
		} else if ( bgav_can_seek_sample(m_file) == 1  && m_video_format.framerate_mode == GAVL_FRAMERATE_VARIABLE ) {
			// FIXME what to do with non constant frame rates?
			m_num_frames=0;
		} else { 
			m_num_frames =	gavl_time_to_frames( m_video_format.timescale, m_video_format.frame_duration ,  bgav_get_duration ( m_file, 0) );
		}
	}

  //	printf("m_num_frames =%lld, duration = %lld , vid_duration=%lld\n", 
	//		m_num_frames, bgav_get_duration ( m_file, 0),  bgav_video_duration ( m_file, 0) );
	// set seconds
	if ( bgav_can_seek_sample(m_file) == 1) {
		gavl_time_t atime=0,vtime=0;
		if ( m_audio_stream_count ) 
			atime =  gavl_samples_to_time( m_audio_format.samplerate, m_num_samples );
		if (m_video_stream_count &&  m_video_format.frame_duration > 0) {
			vtime =  gavl_frames_to_time( m_video_format.timescale, m_video_format.frame_duration, m_num_frames );
		} else if ( m_video_stream_count  ) { // non constant framerate			
			vtime = bgav_video_duration( m_file, 0);
		}
		// else rely on audio time
		m_length_in_gavltime = atime > vtime ? atime :vtime;
		m_length_in_seconds = gavl_time_to_seconds( m_length_in_gavltime );
		//printf("atime=%ld,  vtime=%ld, l_in_sec=%f\n", atime, vtime, m_length_in_seconds);
	} 

	m_pcm_seek = SEEK_NOTHING;
	m_frame_seek = SEEK_NOTHING;

	return true;
}
Ejemplo n.º 14
0
static void open_audio(visualizer_t * v)
  {
  int i;
  int was_open;
  gavl_time_t delay_time = GAVL_TIME_SCALE / 20; /* 50 ms */
  memset(&v->audio_format, 0, sizeof(v->audio_format));
  v->audio_format.num_channels = 2;
  v->audio_format.samplerate = 44100;
  v->audio_format.sample_format = GAVL_SAMPLE_S16;
  gavl_set_channel_setup(&v->audio_format);
  
  if(v->audio_frame)
    {
    gavl_audio_frame_destroy(v->audio_frame);
    v->ra_plugin->close(v->ra_handle->priv);
    v->audio_frame = NULL;
    bg_plugin_unref(v->ra_handle);
    was_open = 1;
    }
  else
    was_open = 0;
  
  v->audio_open = 0;
  
  v->ra_handle = bg_plugin_load(v->plugin_reg,
                                v->ra_info);
  v->ra_plugin = (bg_recorder_plugin_t*)(v->ra_handle->plugin);
  
  /* The soundcard might be busy from last time,
     give the kernel some time to free the device */
  
  if(!v->ra_plugin->open(v->ra_handle->priv, &v->audio_format, NULL, &v->m))
    {
    if(!was_open)
      {
      bg_log(BG_LOG_ERROR, LOG_DOMAIN,
             "Opening audio device failed, fix settings and click restart");
      gtk_label_set_text(GTK_LABEL(v->fps), TR("No audio"));
      return;
      }
    for(i = 0; i < 20; i++)
      {
      gavl_time_delay(&delay_time);
      
      if(v->ra_plugin->open(v->ra_handle->priv, &v->audio_format, NULL, &v->m))
        {
        v->audio_open = 1;
        break;
        }
      }
    }
  else
    v->audio_open = 1;

  if(v->audio_open)
    {
    v->audio_frame = gavl_audio_frame_create(&v->audio_format);
    bg_gtk_vumeter_set_format(v->vumeter, &v->audio_format);
    }
  else
    {
    bg_log(BG_LOG_ERROR, LOG_DOMAIN,
           "Opening audio device failed, fix settings and click restart");
    gtk_label_set_text(GTK_LABEL(v->fps), TR("No audio"));
    }
  }