예제 #1
0
static gboolean
gst_spectra_scope_render (GstBaseAudioVisualizer * bscope, GstBuffer * audio,
    GstBuffer * video)
{
  GstSpectraScope *scope = GST_SPECTRA_SCOPE (bscope);
  guint32 *vdata = (guint32 *) GST_BUFFER_DATA (video);
  gint16 *adata = (gint16 *) g_memdup (GST_BUFFER_DATA (audio),
      GST_BUFFER_SIZE (audio));
  GstFFTS16Complex *fdata = scope->freq_data;
  guint x, y, off;
  guint l, h = bscope->height - 1;
  gfloat fr, fi;
  guint w = bscope->width;

  if (bscope->channels > 1) {
    guint ch = bscope->channels;
    guint num_samples = GST_BUFFER_SIZE (audio) / (ch * sizeof (gint16));
    guint i, c, v, s = 0;

    /* deinterleave and mixdown adata */
    for (i = 0; i < num_samples; i++) {
      v = 0;
      for (c = 0; c < ch; c++) {
        v += adata[s++];
      }
      adata[i] = v / ch;
    }
  }

  /* run fft */
  gst_fft_s16_window (scope->fft_ctx, adata, GST_FFT_WINDOW_HAMMING);
  gst_fft_s16_fft (scope->fft_ctx, adata, fdata);
  g_free (adata);

  /* draw lines */
  for (x = 0; x < bscope->width; x++) {
    /* figure out the range so that we don't need to clip,
     * or even better do a log mapping? */
    fr = (gfloat) fdata[1 + x].r / 512.0;
    fi = (gfloat) fdata[1 + x].i / 512.0;
    y = (guint) (h * fabs (fr * fr + fi * fi));
    if (y > h)
      y = h;
    y = h - y;
    off = (y * w) + x;
    vdata[off] = 0x00FFFFFF;
    for (l = y + 1; l <= h; l++) {
      off += w;
      add_pixel (&vdata[off], 0x007F7F7F);
    }
  }
  return TRUE;
}
예제 #2
0
static gboolean
gst_spectra_scope_render (GstBaseAudioVisualizer * bscope, GstBuffer * audio,
    GstBuffer * video)
{
  GstSpectraScope *scope = GST_SPECTRA_SCOPE (bscope);
  gint16 *mono_adata;
  GstFFTS16Complex *fdata = scope->freq_data;
  guint x, y, off;
  guint l, h = bscope->height - 1;
  gfloat fr, fi;
  guint w = bscope->width;
  GstMapInfo amap, vmap;
  guint32 *vdata;
  gint channels;

  gst_buffer_map (audio, &amap, GST_MAP_READ);
  gst_buffer_map (video, &vmap, GST_MAP_WRITE);
  vdata = (guint32 *) vmap.data;

  channels = GST_AUDIO_INFO_CHANNELS (&bscope->ainfo);

  mono_adata = (gint16 *) g_memdup (amap.data, amap.size);

  if (channels > 1) {
    guint ch = channels;
    guint num_samples = amap.size / (ch * sizeof (gint16));
    guint i, c, v, s = 0;

    /* deinterleave and mixdown adata */
    for (i = 0; i < num_samples; i++) {
      v = 0;
      for (c = 0; c < ch; c++) {
        v += mono_adata[s++];
      }
      mono_adata[i] = v / ch;
    }
  }

  /* run fft */
  gst_fft_s16_window (scope->fft_ctx, mono_adata, GST_FFT_WINDOW_HAMMING);
  gst_fft_s16_fft (scope->fft_ctx, mono_adata, fdata);
  g_free (mono_adata);

  /* draw lines */
  for (x = 0; x < bscope->width; x++) {
    /* figure out the range so that we don't need to clip,
     * or even better do a log mapping? */
    fr = (gfloat) fdata[1 + x].r / 512.0;
    fi = (gfloat) fdata[1 + x].i / 512.0;
    y = (guint) (h * fabs (fr * fr + fi * fi));
    if (y > h)
      y = h;
    y = h - y;
    off = (y * w) + x;
    vdata[off] = 0x00FFFFFF;
    for (l = y + 1; l <= h; l++) {
      off += w;
      add_pixel (&vdata[off], 0x007F7F7F);
    }
  }
  gst_buffer_unmap (video, &vmap);
  gst_buffer_unmap (audio, &amap);
  return TRUE;
}
예제 #3
0
int main(int argc,  char *argv[]){
	if(argc < 4){
        printf(USAGE);
        exit(1);
    }

    pcmfile_t   *wav_inp;
    
	wav_inp = wav_open_read(argv[1], 0);
    if(wav_inp == NULL){
        printf("wav_open_read failed\n");
        exit(1);
    }
    if(wav_inp->channels == 2){
        printf("Input file is stereo. NOT SUPPRTED\n");
        exit(1);
    }
	int         block_size 	= atoi(argv[3]);
	FILE        *fp_fft_out	= fopen(argv[2],"w");
// 	FILE		*fp_ifft_out= fopen(argv[4],"w");	
	short       *pcm 		= malloc(block_size*wav_inp->channels*sizeof(short));

#ifdef KISS_FFT
    void        		*fft_cfg	= 	kiss_fftr_alloc(block_size ,0,0,0 );
	kiss_fft_scalar*    fft_in		= 	(kiss_fft_scalar*)malloc(sizeof(kiss_fft_scalar)*(block_size));
	kiss_fft_cpx*      	fft_out		= 	(kiss_fft_cpx*)malloc(sizeof(kiss_fft_cpx)*(block_size));
#elif GST_FFT
	gint16* 			fft_in		= 	(gint16*)malloc(sizeof(gint16)*block_size);		
	GstFFTS16Complex*	fft_out		= 	(GstFFTS16Complex*)malloc(sizeof(GstFFTS16Complex*)*block_size);
	GstFFTS16*			fft_self	=	gst_fft_s16_new(block_size,FALSE);
#elif ALLGO_FFT
	int     *fft_in					=	(int*)malloc(sizeof(int)*block_size);
//	int     *fft_out				=	(int*)malloc(2*sizeof(int)*block_size);
//	short   *out_pcm				=	(short*)malloc(sizeof(short)*block_size);
#endif

    int 			i;
	int         	num_samples_read;
	long 			time=0;
	struct timeval 	start,end;

    while(1){
        num_samples_read = wav_read_int16(wav_inp, pcm, (block_size*wav_inp->channels), NULL);
		if(num_samples_read != block_size){
            printf("END of wav file reached\n");
            break;
        }
	
		for(i=0;i<block_size;i++){
#ifdef ALLGO_FFT
			fft_in[i] = (int)((pcm[i]));
	    //    fft_in[2*i+1] = 0;
#elif KISS_FFT
        	fft_in[i] =(kiss_fft_scalar)pcm[i];
#elif GST_FFT
			fft_in[i] =(gint16)pcm[i];		
#endif	
		}

		gettimeofday(&start,NULL);
#ifdef KISS_FFT
    	kiss_fftr(fft_cfg , fft_in, fft_out);
#elif GST_FFT
        gst_fft_s16_fft(fft_self,fft_in,fft_out);
#elif ALLGO_FFT
		FFT(fft_in, block_size);
#endif
		gettimeofday(&end,NULL);   

		time=time+(end.tv_usec+end.tv_sec*1000000-start.tv_usec-start.tv_sec*1000000);

#ifdef FIXED_POINT
#ifdef ALLGO_FFT
		for(i = 0; i < (block_size); i+=2){
        	fprintf(fp_fft_out, "%d\t", fft_in[i]);
        	fprintf(fp_fft_out, "%d\n", fft_in[i+1]);
        }
#else	
		for(i=0;i<(block_size/2);i++){
        	fprintf(fp_fft_out,"%d\t%d\n",(int)fft_out[i].r,(int)fft_out[i].i);
		}
#endif
#else
	for(i=0;i<(block_size/2);i++){
            fprintf(fp_fft_out,"%d\t%d\n",(int)((fft_out[i].r)/block_size),(int)((fft_out[i].i/block_size)));
	}
#endif
    }

   	printf("%ld time elapsed\n\n",time); 
	wav_close(wav_inp);
    fclose(fp_fft_out);
return 0;
}
예제 #4
0
static gboolean
gst_spectra_scope_render (GstAudioVisualizer * bscope, GstBuffer * audio,
    GstVideoFrame * video)
{
  GstSpectraScope *scope = GST_SPECTRA_SCOPE (bscope);
  gint16 *mono_adata;
  GstFFTS16Complex *fdata = scope->freq_data;
  guint x, y, off, l;
  guint w = GST_VIDEO_INFO_WIDTH (&bscope->vinfo);
  guint h = GST_VIDEO_INFO_HEIGHT (&bscope->vinfo) - 1;
  gfloat fr, fi;
  GstMapInfo amap;
  guint32 *vdata;
  gint channels;

  gst_buffer_map (audio, &amap, GST_MAP_READ);
  vdata = (guint32 *) GST_VIDEO_FRAME_PLANE_DATA (video, 0);

  channels = GST_AUDIO_INFO_CHANNELS (&bscope->ainfo);

  mono_adata = (gint16 *) g_memdup (amap.data, amap.size);

  if (channels > 1) {
    guint ch = channels;
    guint num_samples = amap.size / (ch * sizeof (gint16));
    guint i, c, v, s = 0;

    /* deinterleave and mixdown adata */
    for (i = 0; i < num_samples; i++) {
      v = 0;
      for (c = 0; c < ch; c++) {
        v += mono_adata[s++];
      }
      mono_adata[i] = v / ch;
    }
  }

  /* run fft */
  gst_fft_s16_window (scope->fft_ctx, mono_adata, GST_FFT_WINDOW_HAMMING);
  gst_fft_s16_fft (scope->fft_ctx, mono_adata, fdata);
  g_free (mono_adata);

  /* draw lines */
  for (x = 0; x < w; x++) {
    /* figure out the range so that we don't need to clip,
     * or even better do a log mapping? */
    fr = (gfloat) fdata[1 + x].r / 512.0;
    fi = (gfloat) fdata[1 + x].i / 512.0;
    y = (guint) (h * sqrt (fr * fr + fi * fi));
    if (y > h)
      y = h;
    y = h - y;
    off = (y * w) + x;
    vdata[off] = 0x00FFFFFF;
    for (l = y; l < h; l++) {
      off += w;
      add_pixel (&vdata[off], 0x007F7F7F);
    }
    /* ensure bottom line is full bright (especially in move-up mode) */
    add_pixel (&vdata[off], 0x007F7F7F);
  }
  gst_buffer_unmap (audio, &amap);
  return TRUE;
}