Exemple #1
0
/*
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%                                                                             %
%                                                                             %
%     M o d u l a t e I m a g e                                               %
%                                                                             %
%                                                                             %
%                                                                             %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
%  Method ModulateImage modulates the hue, saturation, and brightness of an
%  image.
%
%  The format of the ModulateImage method is:
%
%      void ModulateImage(Image *image,const char *modulate)
%
%  A description of each parameter follows:
%
%    o image: The address of a structure of type Image;  returned from
%      ReadImage.
%
%    o modulate: A character string indicating the percent change in brightness,
%      saturation, and hue in floating point notation separated by commas
%      (e.g. 10.1,0.0,3.1).
%
%
*/
Export void ModulateImage(Image *image,const char *modulate)
{
#define ModulateImageText  "  Modulating image...  "

  double
    percent_brightness,
    percent_hue,
    percent_saturation;

  int
    y;

  register int
    i,
    x;

  register PixelPacket
    *q;

  /*
    Initialize gamma table.
  */
  assert(image != (Image *) NULL);
  if (modulate == (char *) NULL)
    return;
  percent_hue=0.0;
  percent_brightness=0.0;
  percent_saturation=0.0;
  (void) sscanf(modulate,"%lf,%lf,%lf",&percent_brightness,&percent_saturation,
    &percent_hue);
  (void) sscanf(modulate,"%lf/%lf/%lf",&percent_brightness,&percent_saturation,
    &percent_hue);
  switch (image->class)
  {
    case DirectClass:
    default:
    {
      /*
        Modulate the color for a DirectClass image.
      */
      for (y=0; y < (int) image->rows; y++)
      {
        q=GetPixelCache(image,0,y,image->columns,1);
        if (q == (PixelPacket *) NULL)
          break;
        for (x=0; x < (int) image->columns; x++)
        {
          Modulate(percent_hue,percent_saturation,percent_brightness,
            &q->red,&q->green,&q->blue);
          q++;
        }
        if (!SyncPixelCache(image))
          break;
        if (QuantumTick(y,image->rows))
          ProgressMonitor(ModulateImageText,y,image->rows);
      }
      break;
    }
    case PseudoClass:
    {
      Quantum
        blue,
        green,
        red;

      /*
        Modulate the color for a PseudoClass image.
      */
      for (i=0; i < (int) image->colors; i++)
      {
        red=image->colormap[i].red;
        green=image->colormap[i].green;
        blue=image->colormap[i].blue;
        Modulate(percent_hue,percent_saturation,percent_brightness,
          &red,&green,&blue);
        image->colormap[i].red=red;
        image->colormap[i].green=green;
        image->colormap[i].blue=blue;
      }
      SyncImage(image);
      break;
    }
  }
}
Exemple #2
0
//*****************************************************************************
//transmition loop: grab 8KHz speech samples from Mike,
//resample, collect frame (540 in 67.5 mS), encode
//encrypt, modulate, play 48KHz baseband signal into Line
int tx(int job)
{
 int i,j;

 //loop 1: try to play unplayed samples
 job+=_playjit(); //the first try to play a tail of samples in buffer

 //loop 2: try to grab next 180 samples
 //check for number of grabbed samples
 if(spcnt<540) //we haven't enought samples for melpe encoder
 {
  i=soundgrab((char*)spraw, 180); //grab up to 180 samples
  if((i>0)&&(i<=180)) //if some samles was grabbed
  {
   //Since we are using different audio devices
   //on headset and line sides, the sampling rates of grabbing
   // and playing devices can slightly differ then 48/8 depends HW
   //so we must adjusts one of rates for synchronizing grabbing and playing processes
   //The line side is more sensitive (requirements for baseband is more hard)
   //That why we resamples grabbed stream (slave) for matching rate with playing stream as a master
   //The adjusting process doing approximation in iterative way
   //and requires several seconds for adaptation during possible loss of some speech 67.5mS frames

   //computes estimated rate depends recording delay obtained in moment of last block was modulated
   j=8000-(_fdelay-27000)/50; //computes samplerate using optimal delay and adjusting sensitivity
   if(j>9000) j=9000; //restrict resulting samplerate
   if(j<7000) j=7000;

   //change rate of grabbed samples for synchronizing grabbing and playing loops
   i=_resample(spraw, spbuf+spcnt, i, j); //resample and collect speech samples
   spcnt+=i; //the number of samples in buffer for processing
   tgrab+=i; //the total difference between grabbed speech and played baseband samples
                //this is actually recording delay and must be near 270 sample in average
                //for jitter protecting (due PC multi threading etc.)

   job+=32; //set job
  }
 }
 //check for we have enough grabbed samples for processing
 if(spcnt>=540) //we have enough samples for melpe encoder
 {
  if(Mute(0)>0)
  {
   i=vad2(spbuf+10, &vad);  //check frame is speech (by VAD)
   i+=vad2(spbuf+100,&vad);
   i+=vad2(spbuf+190,&vad);
   i+=vad2(spbuf+280,&vad);
   i+=vad2(spbuf+370,&vad);
   i+=vad2(spbuf+460,&vad);
  }
  else i=0;
  
  txbuf[11]=0xFF;   //set defaults flag for voiced frame
  if(i) //frame is voices: compress it
  {
   melpe_a(txbuf, spbuf); //encode the speech frame
   i=State(1); //set VAD flag
  }
  else //unvoiced frame: sync packet will be send
  {
   txbuf[11]=0xFE; //or set silence flag for control blocks
   i=State(-1); //clears VAD flag
  }

  spcnt-=540; //samples rest
  if(spcnt) memcpy((char*)spbuf, (char*)(spbuf+540), 2*spcnt);  //move tail to start of buffer
  job+=64;
 }

 //Loop 3: playing
//get number of unplayed samples in buffer 
 i=_getdelay();
//preventing of freezing audio output after underrun or overrun 
 if(i>540*3*6)
 {
  _soundflush1();
  i=_getdelay();
 }   
//check for delay is acceptable for playing next portion of samples 
 if(i<720*6) 
 {
  if(l__jit_buf) return job; //we have some unplayed samples in local buffer, not play now.
  MakePkt(txbuf); //encrypt voice or get actual control packet
  l__jit_buf=Modulate(txbuf, _jit_buf); //modulate block
  txbuf[11]=0; //clear tx buffer (processed)
  _playjit();  //immediately play baseband into Line
 
  //estimate rate changing for grabbed samples for synchronizing grabbing and playing
  _fdelay*=0.99; //smooth coefficient
  _fdelay+=tgrab;   //averages recording delay
  tgrab-=540;  //decrease counter of grabbed samples

  job+=128;
 }

 return job;
}