Exemple #1
0
void subsample_image_altivec_verify(SUBSAMPLE_IMAGE_PDECL)
{
    int width, height;
    unsigned long checksum44_1, checksum44_2;
    unsigned long checksum22_1, checksum22_2;
    unsigned char *cpy22, *cpy44;

    width = rowstride;
    height = (unsigned long)(sub22_image - image) / rowstride;

    cpy22 = (unsigned char*)malloc((width/2) * (height/2));
    cpy44 = (unsigned char*)malloc((width/4) * (height/4));
    if (cpy22 == NULL || cpy44 == NULL)
	mjpeg_error_exit1("subsample_image: malloc failed");

    subsample_image_altivec(SUBSAMPLE_IMAGE_ARGS);
    checksum22_1 = checksum(sub22_image, width/2, height/2, rowstride/2);
    checksum44_1 = checksum(sub44_image, width/4, height/4, rowstride/4);

    /* copy data for imgcmp */
    imgcpy(cpy22, sub22_image, width/2, height/2, rowstride/2);
    imgcpy(cpy44, sub44_image, width/4, height/4, rowstride/4);

    ALTIVEC_TEST_WITH(subsample_image)(SUBSAMPLE_IMAGE_ARGS);
    checksum22_2 = checksum(sub22_image, width/2, height/2, rowstride/2);
    checksum44_2 = checksum(sub44_image, width/4, height/4, rowstride/4);

    if (checksum22_1 != checksum22_2 || checksum44_1 != checksum44_2) {
	mjpeg_debug("subsample_image(" SUBSAMPLE_IMAGE_PFMT ")",
	    SUBSAMPLE_IMAGE_ARGS);
	if (checksum22_1 != checksum22_2)
	    mjpeg_debug("subsample_image: %s checksums differ %d != %d",
		"2*2", checksum22_1, checksum22_2);
	if (checksum44_1 != checksum44_2)
	    mjpeg_debug("subsample_image: %s checksums differ %d != %d",
		"4*4", checksum44_1, checksum44_2);

	imgcmp("2*2", cpy22, sub22_image, width/2, height/2, rowstride/2);
	imgcmp("4*4", cpy44, sub44_image, width/4, height/4, rowstride/4);
    }

    free(cpy22);
    free(cpy44);
}
Exemple #2
0
boolean test_generation_callback(int generation, population *pop){
  double * img = (double *)pop->entity_iarray[0]->chromosome[0];
  int i;
  char buffer[1024];
  real support_threshold;
  for(i = 0;i<TSIZE(amp);i++){
    real_out->image[i] = sqrt(img[i]*img[i]+img[TSIZE(amp)+i]*img[TSIZE(amp)+i]);
  }
  printf("%d: fitness = %f\n",generation,pop->entity_iarray[0]->fitness);


  if(opts->iterations && opts->cur_iteration%opts->iterations == opts->iterations-1){
    sprintf(buffer,"real_out-%05d.png",opts->cur_iteration);
    write_png(real_out,buffer,COLOR_JET);
    
    freeimg(prev_support);
    prev_support = imgcpy(support);
    freeimg(support);      
    support_threshold = get_newsupport_level(real_out,&support_size,radius,&my_log,opts);
    my_log.threshold = support_threshold;
    if(support_threshold > 0){
      /*	support =  get_newsupport(real_out,support_threshold, radius,opts);*/
      support =  get_filtered_support(real_out,support_threshold, radius,opts);
    }else{
      if(opts->support_update_algorithm == REAL_ERROR_CAPPED){
	exit(0);
      }else{
	abort();
      }
    }
    if(opts->cur_iteration <= opts->iterations_to_min_blur){
      radius = get_blur_radius(opts);
    }
    if(/*opts->cur_iteration > 50 ||*/ (opts->automatic && opts->algorithm == RAAR && my_log.Ereal < 0.2)){
      stop++;
    }
    if(stop > stop_threshold){
      exit(0);
    }
    sprintf(buffer,"support-%05d.png",opts->cur_iteration);    
    write_png(support,buffer,COLOR_JET);
  }
  /* restore original amplitudes */
  for(i = 0;i<TSIZE(amp);i++){
    real_out->image[i] = norm(real_out,i);
  }


  opts->cur_iteration++;
  return TRUE;
}
bool showRightImage(int index,List* matches,Image rightimage) {
	char buffer[128];
	char filename[128];
	sprintf(filename,"%d.match",index);
	FILE* key_match = fopen(filename,"r");
	if (!key_match) {
		printf("Cannot open %s\n",filename);
		return false;
	}
	matches->size=0;
	while (fgets(buffer,128,key_match)) {
		Match m;
		int id1,id2;
		if (sscanf(buffer,"%d %f %f %d %f %f",&id1,&m.x1,&m.y1,&id2,&m.x2,&m.y2)==6) {
			if (matches->size == matches->capacity) {
				matches->data = realloc(matches->data,matches->capacity*2*sizeof(Match));
				matches->capacity *= 2;
			}
			((Match*)matches->data)[matches->size++] = m;
		}
	}
	printf("Loaded %d matches from %s\n",matches->size,filename);
	fclose(key_match);
	sprintf(filename,"%d.pgm",index);
	FILE* ppm = fopen(filename,"r");
	fgets(buffer,128,ppm); //P5 or P6
	fgets(buffer,128,ppm);
	fgets(buffer,128,ppm); //255
	fread(rightimage.data,1,rightimage.width*rightimage.height,ppm);
	//populate gray level
	for (int i=rightimage.width*rightimage.height;i>=0;i--) {
		rightimage.data[i*3+2] = rightimage.data[i*3+1] = rightimage.data[i*3] = rightimage.data[i];
	}
	imgcpy(rightimage,rightscreen);
	fclose(ppm);
	return true;
}
int main(int argc, char* argv[]) {

	if (argc < 2) {
		printf("./mark_image in.ppm/in.pgm\n");
		return 1;
	}

	SDL_Init(SDL_INIT_VIDEO);
	SDL_WM_SetCaption(argv[1],NULL);
	char buffer[128];
	FILE* ppm = fopen(argv[1],"r");
	if (!ppm) {
		printf("%s not found\n",argv[1]);
		return 1;
	}
	fgets(buffer,128,ppm); //P5 or P6
	useColor = strncmp(buffer,"P6",2) == 0;
	fgets(buffer,128,ppm);
	char *c = buffer;
	int width = strtol(c,&c,10);
	int height = strtol(c,&c,10);
	fgets(buffer,128,ppm); //255
	screen = SDL_SetVideoMode(width,height,24,SDL_SWSURFACE);
	palette = malloc(width*height*3);
	if (useColor) {
		fread(palette,1,width*height*3,ppm);
		//swap RGB order
		for (int i=0;i<width*height*3;i+=3) {
			unsigned char tmp = palette[i];
			palette[i] = palette[i+2];
			palette[i+2] = tmp;
		}
	} else {
		fread(palette,1,width*height,ppm);
		//populate gray level
		for (int i=width*height;i>=0;i--) {
			palette[i*3+2] = palette[i*3+1] = palette[i*3] = palette[i];
		}
	}
	fclose(ppm);
	Image image = {width,height,palette};
	Image screenImage = {width,height,screen->pixels};
	Color red = {255,0,0};
	if (useColor)
		strcpy(strstr(argv[1],".ppm"),".rpt");
	else
		strcpy(strstr(argv[1],".pgm"),".rpt");
	FILE* point_file = fopen(argv[1],"w");
	if (!point_file) {
		printf("%s not found\n",argv[1]);
		return 1;
	}
	double cx = 0.5 * (width-1);
	double cy = 0.5 * (height-1);
	imgcpy(image,screen);
	SDL_Flip(screen);

	SDL_Event event;
	while (true) {
		while (SDL_PollEvent(&event)) {
			switch(event.type){
				case SDL_MOUSEBUTTONDOWN:
					if (event.button.button == SDL_BUTTON_LEFT) {
						fprintf(point_file,"%f %f\n",(event.button.x-cx)/width,(event.button.y-cy)/height);
						drawKeyPoint(image,event.button.x,event.button.y,red);
						imgcpy(image,screen);
						SDL_Flip(screen);
					}
					break;
				case SDL_MOUSEMOTION:
					break;
				case SDL_MOUSEBUTTONUP:
					break;
				case SDL_QUIT:
					exit(0);
					break;
			}
		}
		usleep(1000);
	}

	free(palette);

}
Exemple #5
0
void genetic_reconstruction(Image * _amp, Image * initial_support, Image * _exp_sigma,
			     Options * _opts, char * dir){

  char prev_dir[1024];
  real support_threshold = _opts->new_level;
  population *pop;			/* Population of solutions. */

  random_seed(23091975);
  stop_threshold = 10;
  stop = 0;
  support_size = -support_threshold;
  opts = _opts;
  amp = _amp;
  exp_sigma = _exp_sigma;
  
  init_log(&my_log);
  my_log.threshold = support_threshold;
  opts->cur_iteration = 0;
  opts->flog = NULL;
  if(opts->automatic){
    opts->algorithm = HIO;
  }
  
  support = imgcpy(initial_support);
  prev_support = imgcpy(initial_support);

  /* Set the initial guess */
  if(opts->image_guess){
    real_in = imgcpy(opts->image_guess);
  }else{
    real_in = imgcpy(support);
  }

  /* make sure we make the input complex */
  rephase(real_in);
  
  /* Set random phases if needed */
  if(opts->rand_phases){
    /*    set_rand_phases(real_in,img);*/
    set_rand_ints(real_in,amp);
  }

  getcwd(prev_dir,1024);
  mkdir(dir,0755);
  chdir(dir);
  write_png(support,"support.png",COLOR_JET);
  write_png(real_in,"initial_guess.png",COLOR_JET);
  write_png(initial_support,"initial_support.png",COLOR_JET);

  if(get_algorithm(opts,&my_log) == HIO){     
    real_out = basic_hio_iteration(amp, real_in, support,opts,&my_log);
  }else if(get_algorithm(opts,&my_log) == RAAR){
    real_out = basic_raar_iteration(amp,exp_sigma, real_in, support,opts,&my_log);
  }else if(get_algorithm(opts,&my_log) == HPR){
    real_out = basic_hpr_iteration(amp, real_in, support,opts,&my_log);
  }else{
    fprintf(stderr,"Error: Undefined algorithm!\n");
    exit(-1);
  }

  radius = opts->max_blur_radius;

  
  pop = ga_genesis_double(
			  3,			/* const int              population_size */
			  1,			/* const int              num_chromo */
			  TSIZE(amp)*2,	/* const int              len_chromo */
			  test_generation_callback,/* GAgeneration_hook      generation_hook */
			  NULL,			/* GAiteration_hook       iteration_hook */
			  NULL,			/* GAdata_destructor      data_destructor */
			  NULL,			/* GAdata_ref_incrementor data_ref_incrementor */
			  test_score,		/* GAevaluate             evaluate */
			  test_seed,		/* GAseed                 seed */
			  test_adaptation,	/* GAadapt                adapt */
			  ga_select_one_bestof2,	/* GAselect_one           select_one */
			  ga_select_two_bestof2,	/* GAselect_two           select_two */
			  ga_mutate_double_singlepoint_drift,	/* GAmutate               mutate */
			  ga_crossover_double_doublepoints,	/* GAcrossover            crossover */
			  NULL,			/* GAreplace              replace */
			  NULL			/* vpointer	User data */
			  );


  ga_population_set_parameters(
       pop,				/* population      *pop */
       GA_SCHEME_LAMARCK_ALL,		/* const ga_scheme_type     scheme */
       GA_ELITISM_PARENTS_SURVIVE,	/* const ga_elitism_type   elitism */
       0.8,				/* double  crossover */
       0.2,				/* double  mutation */
       0.0      		        /* double  migration */
                              );

  ga_evolution(
       pop,				/* population	*pop */
       500				/* const int	max_generations */
              );

  ga_extinction(pop);
  exit(EXIT_SUCCESS);
}
void CMediaSource::ProcessVideoYUVFrame(
					u_int8_t* pY,
					u_int8_t* pU,
					u_int8_t* pV,
					u_int16_t yStride,
					u_int16_t uvStride,
					Timestamp srcFrameTimestamp)
{
  if (m_videoSrcFrameNumber == 0) {
    if (m_audioSrcFrameNumber == 0) {
      m_encodingStartTimestamp = GetTimestamp();
    }
    m_videoStartTimestamp = srcFrameTimestamp;
  }

  m_videoSrcFrameNumber++;
  m_videoSrcElapsedDuration = srcFrameTimestamp - m_videoStartTimestamp;

#ifdef DEBUG_VIDEO_SYNC
  debug_message("vsrc# %d srcDuration="U64" dst# %d dstDuration "U64,
                m_videoSrcFrameNumber, m_videoSrcElapsedDuration,
                m_videoDstFrameNumber, m_videoDstElapsedDuration);
#endif

  // destination gets ahead of source
  // drop src frames as needed to match target frame rate
  if (m_videoSrcElapsedDuration + m_videoDstFrameDuration < m_videoDstElapsedDuration) {
#ifdef DEBUG_VIDEO_SYNC
    debug_message("video: dropping frame, SrcElapsedDuration="U64" DstElapsedDuration="U64,
                  m_videoSrcElapsedDuration, m_videoDstElapsedDuration);
#endif
    return;
  }

  Duration lag = m_videoSrcElapsedDuration - m_videoDstElapsedDuration;

  // source gets ahead of destination
  if (lag > 3 * m_videoDstFrameDuration) {
    debug_message("lag "D64" src "U64" dst "U64,
		  lag, m_videoSrcElapsedDuration, m_videoDstElapsedDuration);
    int j = (lag - (2 * m_videoDstFrameDuration)) / m_videoDstFrameDuration;
    m_videoDstFrameNumber += j;
    m_videoDstElapsedDuration = VideoDstFramesToDuration();
    debug_message("video: advancing dst by %d frames", j);
  }

  // Disabled since we are not taking into account audio drift anymore
  // and the new algorithm automatically factors in any drift due
  // to video encoding
  /*
    // add any external drift (i.e. audio encoding drift)
    //to our drift measurement
    m_videoEncodingDrift += m_otherTotalDrift - m_otherLastTotalDrift;
    m_otherLastTotalDrift = m_otherTotalDrift;

    // check if the video encoding drift exceeds the max limit
    if (m_videoEncodingDrift >= m_videoEncodingMaxDrift) {
      // we skip multiple destination frames to give audio
      // a better chance to keep up
      // on subsequent calls, we will return immediately until
      // m_videoSrcElapsedDuration catches up with m_videoDstElapsedDuration
      int framesToSkip = m_videoEncodingDrift / m_videoDstFrameDuration;
      m_videoEncodingDrift -= framesToSkip * m_videoDstFrameDuration;
      m_videoDstFrameNumber += framesToSkip;
      m_videoDstElapsedDuration = VideoDstFramesToDuration();

      debug_message("video: will skip %d frames due to encoding drift", framesToSkip);

      return;
    }
  */

  m_videoEncodedFrames++;
  m_videoDstFrameNumber++;
  m_videoDstElapsedDuration = VideoDstFramesToDuration();

  //Timestamp encodingStartTimestamp = GetTimestamp();

  // this will either never happen (live capture)
  // or just happen once at startup when we discover
  // the stride used by the video decoder
  if (yStride != m_videoSrcYStride) {
    SetVideoSrcSize(m_videoSrcWidth, m_videoSrcHeight, 
		    yStride, m_videoMatchAspectRatios);
  }

  u_int8_t* mallocedYuvImage = NULL;

  // crop to desired aspect ratio (may be a no-op)
  u_int8_t* yImage = pY + m_videoSrcYCrop;
  u_int8_t* uImage = pU + m_videoSrcUVCrop;
  u_int8_t* vImage = pV + m_videoSrcUVCrop;

  // resize image if necessary
  if (m_videoYResizer) {
    u_int8_t* resizedYUV = (u_int8_t*)Malloc(m_videoDstYUVSize);
		
    u_int8_t* resizedY = resizedYUV;
    u_int8_t* resizedU = resizedYUV + m_videoDstYSize;
    u_int8_t* resizedV = resizedYUV + m_videoDstYSize + m_videoDstUVSize;

    m_videoSrcYImage->data = yImage;
    m_videoDstYImage->data = resizedY;
    scale_image_process(m_videoYResizer);

    m_videoSrcUVImage->data = uImage;
    m_videoDstUVImage->data = resizedU;
    scale_image_process(m_videoUVResizer);

    m_videoSrcUVImage->data = vImage;
    m_videoDstUVImage->data = resizedV;
    scale_image_process(m_videoUVResizer);

    // done with the original source image
    if (mallocedYuvImage) free(mallocedYuvImage);

    // switch over to resized version
    mallocedYuvImage = resizedYUV;
    yImage = resizedY;
    uImage = resizedU;
    vImage = resizedV;
    yStride = m_videoDstWidth;
    uvStride = yStride / 2;
  }

  if (m_videoFilterInterlace) {
    video_filter_interlace(yImage, yImage + m_videoDstYSize, yStride);
  }
  // if we want encoded video frames
  if (m_pConfig->m_videoEncode) {
    bool rc = m_videoEncoder->EncodeImage(
					  yImage, uImage, vImage, 
					  yStride, uvStride,
					  m_videoWantKeyFrame,
					  m_videoDstElapsedDuration,
					  srcFrameTimestamp);

    if (!rc) {
      debug_message("Can't encode image!");
      if (mallocedYuvImage) free(mallocedYuvImage);
      return;
    }

#ifdef DEBUG_VCODEC_SHADOW
  m_videoEncoderShadow->EncodeImage(
                                    yImage, uImage, vImage,
                                    yStride, uvStride,
                                    m_videoWantKeyFrame);
  //Note: we don't retrieve encoded frame from shadow
#endif

    m_videoWantKeyFrame = false;
  }

  // forward encoded video to sinks
  if (m_pConfig->m_videoEncode) {
    uint8_t *frame;
    uint32_t frame_len;
    bool got_image;
    Timestamp pts, dts;
    got_image = m_videoEncoder->GetEncodedImage(&frame,
						&frame_len,
						&dts,
						&pts);
    if (got_image) {
      //error_message("frame len %d time %llu", frame_len, out);
      CMediaFrame* pFrame = new CMediaFrame(
					    m_videoEncoder->GetFrameType(),
					    frame,
					    frame_len,
					    dts,
					    m_videoDstFrameDuration,
					    TimestampTicks,
					    pts);
      pFrame->SetMediaFreeFunction(m_videoEncoder->GetMediaFreeFunction());
      ForwardFrame(pFrame);
    }
  }

  // forward raw video to sinks
  if (m_pConfig->SourceRawVideo() ||
      m_pConfig->GetBoolValue(CONFIG_FEEDER_SINK_ENABLE)) {

    m_videoDstPrevImage = (u_int8_t*)Malloc(m_videoDstYUVSize);

    imgcpy(m_videoDstPrevImage, 
	   yImage, 
	   m_videoDstWidth,
	   m_videoDstHeight,
	   yStride);
    imgcpy(m_videoDstPrevImage + m_videoDstYSize,
	   uImage, 
	   m_videoDstWidth / 2,
	   m_videoDstHeight / 2,
	   uvStride);
    imgcpy(m_videoDstPrevImage + m_videoDstYSize + m_videoDstUVSize,
	   vImage, 
	   m_videoDstWidth / 2,
	   m_videoDstHeight / 2,
	   uvStride);

    CMediaFrame* pFrame =
      new CMediaFrame(
                      YUVVIDEOFRAME, 
                      m_videoDstPrevImage, 
                      m_videoDstYUVSize,
                      srcFrameTimestamp, 
                      m_videoDstFrameDuration);
    ForwardFrame(pFrame);
  }

  // forward reconstructed video to sinks
  if (m_pConfig->m_videoEncode
      && m_pConfig->GetBoolValue(CONFIG_VIDEO_ENCODED_PREVIEW)) {

    m_videoDstPrevReconstructImage = (u_int8_t*)Malloc(m_videoDstYUVSize);

    m_videoEncoder->GetReconstructedImage(
					  m_videoDstPrevReconstructImage,
					  m_videoDstPrevReconstructImage
					  + m_videoDstYSize,
					  m_videoDstPrevReconstructImage
					  + m_videoDstYSize + m_videoDstUVSize);

    CMediaFrame* pFrame = new CMediaFrame(RECONSTRUCTYUVVIDEOFRAME,
                                          m_videoDstPrevReconstructImage,
                                          m_videoDstYUVSize,
                                          srcFrameTimestamp,
                                          m_videoDstFrameDuration);
    ForwardFrame(pFrame);
  }

  // Disabled since we are not taking into account audio drift anymore
  /*
  // update the video encoding drift
  if (m_sourceRealTime) {
    Duration drift = GetTimestamp() - encodingStartTimestamp;
    if (drift > m_videoDstFrameDuration) {
      m_videoEncodingDrift += drift - m_videoDstFrameDuration;
    } else {
      drift = m_videoDstFrameDuration - drift;
      if (m_videoEncodingDrift > drift) {
	m_videoEncodingDrift -= drift;
      } else {
	m_videoEncodingDrift = 0;
      }
    }
  }
  */

  if (mallocedYuvImage) free(mallocedYuvImage);
}
int main(int argc, char* argv[]) {

	if (argc < 2) {
		printf("./match_image target_point.txt target.pgm [0.pgm ..]\n");
		return 1;
	}

	SDL_Init(SDL_INIT_VIDEO);
	SDL_WM_SetCaption("match_image",NULL);
	char buffer[128];
	target_point = fopen(argv[1],"w");
	FILE* ppm = fopen(argv[2],"r");
	if (!ppm) {
		printf("%s not found\n",argv[2]);
		return 1;
	}
	fgets(buffer,128,ppm); //P5 or P6
	do {
		fgets(buffer,128,ppm);
	} while (buffer[0]=='#'); //remove comments
	char *c = buffer;
	width = strtol(c,&c,10);
	height = strtol(c,&c,10);
	fgets(buffer,128,ppm); //255
	screen = SDL_SetVideoMode(width*2,height,24,SDL_SWSURFACE);
	leftscreen = SDL_CreateRGBSurface(0,width,height,24,0xFF0000,0xFF00,0xFF,0);
	rightscreen = SDL_CreateRGBSurface(0,width,height,24,0xFF0000,0xFF00,0xFF,0);
	Image leftimage = {width,height,NULL};
	Image rightimage = {width,height,NULL};
	leftimage.data = malloc(width*height*3);
	rightimage.data = malloc(width*height*3);
	leftrect.x = 0; leftrect.y = 0; leftrect.w = width; leftrect.h = height;
	rightrect.x = width; rightrect.y = 0; rightrect.w = width; rightrect.h = height;
	cx = 0.5 * (width-1);
	cy = 0.5 * (height-1);

	fread(leftimage.data,1,width*height,ppm);
	//populate gray level
	for (int i=width*height;i>=0;i--) {
		leftimage.data[i*3+2] = leftimage.data[i*3+1] = leftimage.data[i*3] = leftimage.data[i];
	}
	fclose(ppm);
	Color yellow = {255,255,0};
	List matches = {0,8,NULL};
	List rectList = {0,8,NULL};
	matches.data = malloc(8*sizeof(Match));
	rectList.data = calloc(8,sizeof(SDL_Rect));
	SDL_Rect* currentRect = rectList.data;

	imgcpy(leftimage,leftscreen);
	SDL_BlitSurface(leftscreen,NULL,screen,&leftrect);
	showRightImage(targetIndex++,&matches,rightimage);
	SDL_BlitSurface(rightscreen,NULL,screen,&rightrect);
	SDL_Flip(screen);

	SDL_Event event;
	while (true) {
		while (SDL_PollEvent(&event)) {
			switch(event.type){
				case SDL_KEYDOWN:
					switch( event.key.keysym.sym ){
						case 'b':
						if (showRightImage(targetIndex--,&matches,rightimage)) {
							SDL_BlitSurface(rightscreen,NULL,screen,&rightrect);
							SDL_Flip(screen);
						}
						break;
						case 'n':
						if (showRightImage(targetIndex++,&matches,rightimage)) {
							SDL_BlitSurface(rightscreen,NULL,screen,&rightrect);
							SDL_Flip(screen);
						}
						break;
						case 'm':
						findCorrespondence(matches,rectList);
						SDL_BlitSurface(rightscreen,NULL,screen,&rightrect);
						SDL_Flip(screen);
						break;
						case 'v':
						findCorrespondence(matches,rectList);
						while (showRightImage(targetIndex++,&matches,rightimage)) {
							findCorrespondence(matches,rectList);
							SDL_BlitSurface(rightscreen,NULL,screen,&rightrect);
							SDL_Flip(screen);
						}
						break;
						default:
						break;
					}
					break;
				case SDL_MOUSEBUTTONDOWN:
					if (event.button.button == SDL_BUTTON_LEFT) {
						mouseDrag = true;
						previousX = event.button.x;
						previousY = event.button.y;
						rectList.size++;
					}
					break;
				case SDL_MOUSEMOTION:
					if (mouseDrag) {
						imgcpy(leftimage,leftscreen);
						*currentRect = getVarRect(previousX,previousY,event.motion.x,event.motion.y);
						for (int i=0;i<rectList.size;i++)
							drawRect(leftscreen,((SDL_Rect*)rectList.data)[i],yellow);
						SDL_BlitSurface(leftscreen,NULL,screen,&leftrect);
						SDL_Flip(screen);
					}
					break;
				case SDL_MOUSEBUTTONUP:
					imgcpy(rightimage,rightscreen);
					SDL_BlitSurface(rightscreen,NULL,screen,&rightrect);
					if (mouseDrag) {
						mouseDrag = false;
						SDL_BlitSurface(rightscreen,NULL,screen,&rightrect);
						currentRect++;
					} else {
						imgcpy(leftimage,leftscreen);
						SDL_BlitSurface(leftscreen,NULL,screen,&leftrect);
						currentRect->w = 0;
						currentRect->h = 0;
					}
					SDL_Flip(screen);
					break;
				case SDL_QUIT:
					exit(0);
					break;
			}
		}
		usleep(1000);
	}

	fclose(target_point);
	free(leftimage.data);
	free(rightimage.data);

}