コード例 #1
0
ファイル: ld_cluster_curv.c プロジェクト: abachrach/csm
void ld_cluster_curv(LDP ld) {
	int min_cluster_size = 10;
	double sigma = 0.005; 
	int orientation_neighbours = 4;
	int npeaks = 5;
	double near_peak_threshold = 0.4;

	if(JJ) jj_context_enter("ld_cluster_curv");
	int n = ld->nrays;
	
	
	if(JJ) jj_add_int_array("a00valid", ld->valid, n);
	if(JJ) jj_add_double_array("a01theta", ld->theta, n);
	if(JJ) jj_add_double_array("a02readings", ld->readings, n);
	
	
	ld_simple_clustering(ld, sigma*5);
/*	int i=0; for(i=0;i<n;i++)
		ld->cluster[i] = ld->valid[i] ? 1 : -1;*/
	
	
	if(JJ) jj_add_int_array("a04cluster", ld->cluster, n);
	ld_remove_small_clusters(ld, min_cluster_size);
	ld_mark_cluster_as_invalid(ld, -1);
	if(JJ) jj_add_int_array("a06cluster", ld->cluster, n);
	
	double filter[10] = {.5, .4, .3, .2, .2, .2, .2, .2, .2, .2};
	double deriv_filter[7] = {0, .6, .3, .2, .2, .2, .1};
	double smooth_alpha[n];
	double deriv_alpha[n];

	int p;
	if(JJ) jj_loop_enter("it");
	
	for(p=0;p<npeaks;p++) {  if(JJ) jj_loop_iteration();
		
		if(JJ) jj_add_int_array("cluster", ld->cluster, n);

		ld_compute_orientation(ld, orientation_neighbours, sigma);
		
		int i;
		for(i=0;i<ld->nrays;i++) 
			if(!ld->alpha_valid[i])
			ld->cluster[i] = -1;
		
		if(JJ) jj_add_double_array("alpha", ld->alpha, n);
		cluster_convolve(ld->cluster, ld->alpha, n, smooth_alpha, filter, 10, 0);
		if(JJ) jj_add_int_array("alpha_valid", ld->alpha_valid, n);

		if(JJ) jj_add_double_array("smooth_alpha", smooth_alpha, n);
		cluster_convolve(ld->cluster, smooth_alpha, n, deriv_alpha, deriv_filter, 7, 1);
		if(JJ) jj_add_double_array("deriv_alpha", deriv_alpha, n);
		array_abs(deriv_alpha, n);
		
		int peak = cluster_find_max(ld->cluster, deriv_alpha, n);
		if(JJ) jj_add_int("peak", peak);
		
		int peak_cluster = ld->cluster[peak];
		int up = peak; double threshold = near_peak_threshold  * deriv_alpha[peak];
		while(up<n-1 && (ld->cluster[up]==peak_cluster) && deriv_alpha[up+1] >  threshold) up++;
		int down = peak;
		while(down>1  && (ld->cluster[up]==peak_cluster) && deriv_alpha[down-1] > threshold) down--;
		int j;
		for(j=down;j<=up;j++) {
			ld->cluster[j] = -1;
			ld->valid[j] = 0;
			ld->readings[j] = NAN;
		}
		
		int next_cluster = ld_max_cluster_id(ld) + 1;
		for(j = up+1; j<ld->nrays; j++) {
			if(ld->cluster[j] == peak_cluster)
				ld->cluster[j] = next_cluster;
		}
	}
	if(JJ) jj_loop_exit();

	if(JJ) jj_context_exit();
}
コード例 #2
0
ファイル: scale_by.c プロジェクト: berndf/avg_q
/*{{{  scale_by(transform_info_ptr tinfo)*/
METHODDEF DATATYPE *
scale_by(transform_info_ptr tinfo) {
 struct scale_by_storage *local_arg=(struct scale_by_storage *)tinfo->methods->local_storage;
 DATATYPE factor;
 int itempart;
 array indata;

 tinfo_array(tinfo, &indata);
 switch (local_arg->type) {
  /* Operations which are done on maps */
  case SCALE_BY_XDATA:
  case SCALE_BY_INVXDATA:
   if (tinfo->xdata==NULL) create_xaxis(tinfo, NULL);
  case SCALE_BY_NORMALIZE:
  case SCALE_BY_INVNORM:
  case SCALE_BY_INVSQUARENORM:
  case SCALE_BY_INVSUM:
  case SCALE_BY_INVMAX:
  case SCALE_BY_INVMAXABS:
  case SCALE_BY_INVQUANTILE:
   array_transpose(&indata);	/* Vectors are maps */
   if (local_arg->have_channel_list) {
    ERREXIT(tinfo->emethods, "scale_by: Channel subsets are not supported for map operations.\n");
   }
   break;

  /* Operations which are done on channels */
  case SCALE_BY_INVPOINTNORM:
  case SCALE_BY_INVPOINTSQUARENORM:
  case SCALE_BY_INVPOINTSUM:
  case SCALE_BY_INVPOINTMAX:
  case SCALE_BY_INVPOINTMAXABS:
  case SCALE_BY_INVPOINTQUANTILE:
  case SCALE_BY_FACTOR:
   break;

  /* Operations which involve a special but constant factor */
  case SCALE_BY_PI:
   local_arg->factor= M_PI;
   break;
  case SCALE_BY_INVPI:
   local_arg->factor= 1.0/M_PI;
   break;
  case SCALE_BY_SFREQ:
   local_arg->factor= tinfo->sfreq;
   break;
  case SCALE_BY_INVSFREQ:
   local_arg->factor= 1.0/tinfo->sfreq;
   break;
  case SCALE_BY_NR_OF_POINTS:
   local_arg->factor= (tinfo->data_type==FREQ_DATA ? tinfo->nroffreq : tinfo->nr_of_points);
   break;
  case SCALE_BY_INVNR_OF_POINTS:
   local_arg->factor= 1.0/(tinfo->data_type==FREQ_DATA ? tinfo->nroffreq : tinfo->nr_of_points);
   break;
  case SCALE_BY_NR_OF_CHANNELS:
   local_arg->factor= tinfo->nr_of_channels;
   break;
  case SCALE_BY_INVNR_OF_CHANNELS:
   local_arg->factor= 1.0/tinfo->nr_of_channels;
   break;
  case SCALE_BY_NROFAVERAGES:
   local_arg->factor= tinfo->nrofaverages;
   break;
  case SCALE_BY_INVNROFAVERAGES:
   local_arg->factor= 1.0/tinfo->nrofaverages;
   break;
  case SCALE_BY_SQRTNROFAVERAGES:
   local_arg->factor= sqrt(tinfo->nrofaverages);
   break;
  case SCALE_BY_INVSQRTNROFAVERAGES:
   local_arg->factor= 1.0/sqrt(tinfo->nrofaverages);
   break;
 }

 for (itempart=local_arg->fromitem; itempart<=local_arg->toitem; itempart++) {
  array_use_item(&indata, itempart);
  do {
   if (local_arg->have_channel_list && !is_in_channellist(indata.current_vector+1, local_arg->channel_list)) {
    array_nextvector(&indata);
    continue;
   }
   switch (local_arg->type) {
    case SCALE_BY_NORMALIZE:
    case SCALE_BY_INVNORM:
    case SCALE_BY_INVPOINTNORM:
     factor=array_abs(&indata);
     if (factor==0.0) factor=1.0;
     factor=1.0/factor;
     array_previousvector(&indata);
     break;
    case SCALE_BY_INVSQUARENORM:
    case SCALE_BY_INVPOINTSQUARENORM:
     factor=array_square(&indata);
     if (factor==0.0) factor=1.0;
     factor=1.0/factor;
     array_previousvector(&indata);
     break;
    case SCALE_BY_INVSUM:
    case SCALE_BY_INVPOINTSUM:
     factor=array_sum(&indata);
     if (factor==0.0) factor=1.0;
     factor=1.0/factor;
     array_previousvector(&indata);
     break;
    case SCALE_BY_INVMAX:
    case SCALE_BY_INVPOINTMAX:
     factor=array_max(&indata);
     if (factor==0.0) factor=1.0;
     factor=1.0/factor;
     array_previousvector(&indata);
     break;
    case SCALE_BY_INVMAXABS:
    case SCALE_BY_INVPOINTMAXABS: {
     DATATYPE amax=fabs(array_scan(&indata)), hold;
     while (indata.message==ARRAY_CONTINUE) {
      hold=fabs(array_scan(&indata));
      if (hold>amax) amax=hold;
     }
     factor=amax;
     }
     if (factor==0.0) factor=1.0;
     factor=1.0/factor;
     array_previousvector(&indata);
     break;
    case SCALE_BY_INVQUANTILE:
    case SCALE_BY_INVPOINTQUANTILE:
     factor=array_quantile(&indata,local_arg->factor);
     if (factor==0.0) factor=1.0;
     factor=1.0/factor;
     break;
    case SCALE_BY_XDATA:
     factor=tinfo->xdata[indata.current_vector];
     break;
    case SCALE_BY_INVXDATA:
     factor=1.0/tinfo->xdata[indata.current_vector];
     break;
    case SCALE_BY_PI:
    case SCALE_BY_INVPI:
    case SCALE_BY_SFREQ:
    case SCALE_BY_INVSFREQ:
    case SCALE_BY_NR_OF_POINTS:
    case SCALE_BY_INVNR_OF_POINTS:
    case SCALE_BY_NR_OF_CHANNELS:
    case SCALE_BY_INVNR_OF_CHANNELS:
    case SCALE_BY_NROFAVERAGES:
    case SCALE_BY_INVNROFAVERAGES:
    case SCALE_BY_SQRTNROFAVERAGES:
    case SCALE_BY_INVSQRTNROFAVERAGES:
    case SCALE_BY_FACTOR:
     factor=local_arg->factor;
     break;
    default:
     continue;
   }
   array_scale(&indata, factor);
  } while (indata.message!=ARRAY_ENDOFSCAN);
 }

 return tinfo->tsdata;
}