Esempio n. 1
0
bool saveToDirectory(string directory, int i_update, const DistributionGaussian& distribution, const Rollout* rollout_eval, const vector<Rollout*>& rollouts, const VectorXd& weights, const DistributionGaussian& distribution_new, bool overwrite)
{
  vector<DistributionGaussian> distribution_vec;
  distribution_vec.push_back(distribution);

  vector<DistributionGaussian> distribution_new_vec;
  distribution_new_vec.push_back(distribution_new);
  
  return saveToDirectory(directory, i_update, distribution_vec, rollout_eval, rollouts, weights, distribution_new_vec, overwrite);
}
void runEvolutionaryOptimization(
  const CostFunction* const cost_function, 
  const DistributionGaussian* const initial_distribution, 
  const Updater* const updater, 
  int n_updates, 
  int n_samples_per_update, 
  string save_directory,
  bool overwrite,
  bool only_learning_curve)
{

  // Some variables
  MatrixXd samples;
  VectorXd costs, cost_eval;
  // Bookkeeping
  vector<UpdateSummary> update_summaries;
  UpdateSummary update_summary;
  
  if (save_directory.empty()) 
    cout << "init  =  " << "  distribution=" << *initial_distribution;
  
  // Optimization loop
  DistributionGaussian* distribution = initial_distribution->clone();
  for (int i_update=0; i_update<n_updates; i_update++)
  {
    // 0. Get cost of current distribution mean
    cost_function->evaluate(distribution->mean().transpose(),cost_eval);
    
    // 1. Sample from distribution
    distribution->generateSamples(n_samples_per_update, samples);

    // 2. Evaluate the samples
    cost_function->evaluate(samples,costs);
  
    // 3. Update parameters
    updater->updateDistribution(*distribution, samples, costs, *distribution, update_summary);
      
    
    // Some output and/or saving to file (if "directory" is set)
    if (save_directory.empty()) 
    {
      cout << "\t cost_eval=" << cost_eval << endl << i_update+1 << "  " << *distribution;
    }
    else
    {
      update_summary.cost_eval = cost_eval[0];
      update_summaries.push_back(update_summary);
    }
  }
  
  // Save update summaries to file, if necessary
  if (!save_directory.empty())
    saveToDirectory(update_summaries,save_directory,overwrite,only_learning_curve);

}
Esempio n. 3
0
void Package::saveAs(const String& name, bool remove_unused) {
	// type of package
	if (wxDirExists(name)) {
		saveToDirectory(name, remove_unused, false);
	} else {
		saveToZipfile  (name, remove_unused, false);
	}
	filename = name;
	removeTempFiles(remove_unused);
	reopen();
}
Esempio n. 4
0
bool saveToDirectoryNewUpdate(const UpdateSummary& update_summary, std::string directory)
{

    // Make directory if it doesn't already exist
    if (!boost::filesystem::exists(directory))
    {
        if (!boost::filesystem::create_directories(directory))
        {
            cerr << __FILE__ << ":" << __LINE__ << ":";
            cerr << "Couldn't make directory file '" << directory << "'." << endl;
            return false;
        }
        // Directory didn't exist yet, so this must be the first update.
        directory += "/update00001/";
        bool overwrite=true;
        return saveToDirectory(update_summary,directory,overwrite);
    }

    // Find the directory with the highest update
    // todo: this can probably be done more efficiently with boost::filesystem somehow
    int MAX_UPDATE = 99999; // Cannot store more in %05d format
    int i_update=1;
    while (i_update<MAX_UPDATE)
    {
        stringstream stream;
        stream << directory << "/update" << setw(5) << setfill('0') << i_update << "/";
        string directory_update = stream.str();
        if (!boost::filesystem::exists(directory_update))
        {
            // Found a directory that doesn't exist yet!
            bool overwrite=true;
            return saveToDirectory(update_summary,directory_update,overwrite);
        }
        i_update++;
    }

    std::cerr << "Sorry, directory " << directory << " is already full with update subdirectories." << std::endl;
    return false;
}
bool saveToDirectory(const vector<UpdateSummaryParallel>& update_summaries, std::string directory, bool overwrite, bool only_learning_curve)
{
  
  // Save the learning curve
  int n_updates = update_summaries.size();  
  assert(n_updates>0);
  
  int n_parallel = update_summaries[0].distributions.size();
  MatrixXd learning_curve(n_updates,2+n_parallel);
  learning_curve(0,0) = 0; // First evaluation is at 0
  
  for (int i_update=0; i_update<n_updates; i_update++)
  {

    // Number of samples at which an evaluation was performed.
    if (i_update>0)
    {
      int n_samples = update_summaries[i_update].costs.rows();
      learning_curve(i_update,0) = learning_curve(i_update-1,0) + n_samples; 
    }
    
    // The cost of the evaluation at this update
    learning_curve(i_update,1) = update_summaries[i_update].cost_eval;
    
    for (int i_parallel=0; i_parallel<n_parallel; i_parallel++)
    {
      // The largest eigenvalue of the covariance matrix, for each distribution
      DistributionGaussian* distribution = update_summaries[i_update].distributions[i_parallel];
      MatrixXd eigen_values = distribution->covar().eigenvalues().real();
      learning_curve(i_update,2+i_parallel) = sqrt(eigen_values.maxCoeff());
    }
    
  }

  if (!saveMatrix(directory, "learning_curve.txt", learning_curve, overwrite))
    return false;
    
  if (!only_learning_curve)
  {
    // Save all the information in the update summaries
    for (int i_update=0; i_update<n_updates; i_update++)
    {
      stringstream stream;
      stream << directory << "/update" << setw(5) << setfill('0') << i_update+1 << "/";
      if (!saveToDirectory(update_summaries[i_update], stream.str(),overwrite))
        return false;
    }
  }
  return true;
}
Esempio n. 6
0
bool saveToDirectory(string directory, int i_update, const vector<DistributionGaussian>& distribution, const Rollout* rollout_eval, const vector<Rollout*>& rollouts, const VectorXd& weights, const vector<DistributionGaussian>& distribution_new, bool overwrite)
{
  
  VectorXd cost_eval;
  if (rollout_eval!=NULL)
    rollout_eval->cost(cost_eval);
  
  MatrixXd costs(rollouts.size(),rollouts[0]->getNumberOfCostComponents());
  for (unsigned int ii=0; ii<rollouts.size(); ii++)
  {
    VectorXd cur_cost;
    rollouts[ii]->cost(cur_cost);
    costs.row(ii) = cur_cost;
  }
  
  // Save update information
  MatrixXd samples;
  saveToDirectory(directory, i_update, distribution, cost_eval, samples, costs, weights, distribution_new,overwrite);

  stringstream stream;
  stream << directory << "/update" << setw(5) << setfill('0') << i_update << "/";
  string directory_update = stream.str();
  
  // Save rollouts too
  for (unsigned int i_rollout=0; i_rollout<rollouts.size(); i_rollout++)
  {
    stringstream stream;
    stream << directory_update << "/rollout" << setw(3) << setfill('0') << i_rollout+1;
    if (!rollouts[i_rollout]->saveToDirectory(stream.str(),overwrite))
      return false;
  }
  
  if (rollout_eval!=NULL)
    if (rollout_eval->saveToDirectory(directory_update+"/rollout_eval",overwrite))
      return false;
    
  return true;    
}
// This function could have been integrated with the above. But I preferred to duplicate a bit of
// code so that the difference between running an optimziation with a CostFunction or
// Task/TaskSolver is more apparent.
void runEvolutionaryOptimization(
  const Task* const task, 
  const TaskSolver* const task_solver, 
  const DistributionGaussian* const initial_distribution, 
  const Updater* const updater, 
  int n_updates, 
  int n_samples_per_update, 
  string save_directory, 
  bool overwrite,
  bool only_learning_curve)
{
  // Some variables
  MatrixXd samples;
  MatrixXd cost_vars, cost_vars_eval;
  VectorXd costs, cost_eval;
  // Bookkeeping
  vector<UpdateSummary> update_summaries;
  UpdateSummary update_summary;
  
  if (save_directory.empty()) 
    cout << "init  =  " << "  distribution=" << *initial_distribution;
  
  // Optimization loop
  DistributionGaussian* distribution = initial_distribution->clone();
  for (int i_update=0; i_update<n_updates; i_update++)
  {
    // 0. Get cost of current distribution mean
    task_solver->performRollouts(distribution->mean().transpose(),cost_vars_eval);
    task->evaluate(cost_vars_eval,cost_eval);
    
    // 1. Sample from distribution
    distribution->generateSamples(n_samples_per_update, samples);

    // 2A. Perform the roll-outs
    task_solver->performRollouts(samples,cost_vars);
  
    // 2B. Evaluate the samples
    task->evaluate(cost_vars,costs);
  
    // 3. Update parameters
    updater->updateDistribution(*distribution, samples, costs, *distribution, update_summary);
      
    
    // Some output and/or saving to file (if "directory" is set)
    if (save_directory.empty()) 
    {
      cout << "\t cost_eval=" << cost_eval << endl << i_update+1 << "  " << *distribution;
    }
    else
    {
      update_summary.cost_eval = cost_eval[0];
      update_summary.cost_vars_eval = cost_vars_eval;
      update_summary.cost_vars = cost_vars;
      update_summaries.push_back(update_summary);
    }
  }
  
  // Save update summaries to file, if necessary
  if (!save_directory.empty())
  {
    saveToDirectory(update_summaries,save_directory,overwrite,only_learning_curve);
    // If you store only the learning curve, no need to save the script to visualize rollouts    
    if (!only_learning_curve)
      task->savePerformRolloutsPlotScript(save_directory);
  }

}
Esempio n. 8
0
/** \todo Get rid of runOptimizationParallelDeprecated(), and implement in UpdaterCovarAdapation
*/
void runOptimizationParallelDeprecated(
  Task* task, 
  TaskSolver* task_solver, 
  vector<DistributionGaussian*> initial_distributions, 
  Updater* updater, 
  int n_updates, 
  int n_samples_per_update, 
  string save_directory, 
  bool overwrite, 
  bool only_learning_curve)
{  
  // Some variables
  int n_parallel = initial_distributions.size();
  assert(n_parallel>=2);
  
  int n_samples = n_samples_per_update; // Shorthand
  
  VectorXi offsets(n_parallel+1);
  offsets[0] = 0;
  for (int ii=0; ii<n_parallel; ii++)
    offsets[ii+1] = offsets[ii] + initial_distributions[ii]->mean().size();
  int sum_n_dims = offsets[n_parallel];
  
  // n_parallel X n_samples X n_dims
  // Note: n_samples must be the same for all, n_dims varies
  //vector<MatrixXd> sample(n_parallel);
  //for (int ii=0; ii<n_parallel; ii++)
  //  // Pre-allocate memory just to be clear.
  //  sample[ii] = MatrixXd(n_samples_per_update,initial_distributions[ii]->mean().size());

  MatrixXd samples(n_samples,sum_n_dims);
  
  
  // Some variables
  VectorXd sample_eval(sum_n_dims);
  VectorXd cost_eval;
  MatrixXd cost_vars_eval;

  MatrixXd samples_per_parallel;
  MatrixXd cost_vars;
  VectorXd cur_costs;
  VectorXd costs(n_samples);
  VectorXd total_costs(n_samples);
  
  VectorXd weights;
  
  // Bookkeeping
  MatrixXd learning_curve(n_updates,3);
  
  vector<DistributionGaussian> distributions;
  vector<DistributionGaussian> distributions_new;
  for (int ii=0; ii<n_parallel; ii++)
  {
    distributions.push_back(*(initial_distributions[ii]->clone()));
    distributions_new.push_back(*(initial_distributions[ii]->clone()));
  }
  
  // Optimization loop
  for (int i_update=0; i_update<n_updates; i_update++)
  {
    
    // 0. Get cost of current distribution mean
    for (int pp=0; pp<n_parallel; pp++)
      sample_eval.segment(offsets[pp],offsets[pp+1]-offsets[pp]) = distributions[pp].mean().transpose();
    task_solver->performRollout(sample_eval,cost_vars_eval);
    task->evaluateRollout(cost_vars_eval,sample_eval,cost_eval);
    Rollout* rollout_eval = new Rollout(sample_eval,cost_vars_eval,cost_eval);
    
    // 1. Sample from distribution
    for (int pp=0; pp<n_parallel; pp++)
    {
      distributions[pp].generateSamples(n_samples, samples_per_parallel);
      int width = offsets[pp+1]-offsets[pp];
      samples.block(0,offsets[pp],n_samples,width) = samples_per_parallel;
    }
      
    vector<Rollout*> rollouts(n_samples_per_update);
    for (int i_sample=0; i_sample<n_samples_per_update; i_sample++)
    {
        
      // 2. Perform rollouts for the samples
      task_solver->performRollout(samples.row(i_sample), cost_vars);
      
      // 3. Evaluate the last batch of rollouts
      task->evaluateRollout(cost_vars,samples.row(i_sample),cur_costs);

      // Bookkeeping
      costs[i_sample] = cur_costs[0];
      rollouts[i_sample] = new Rollout(samples.row(i_sample),cost_vars,cur_costs);
      
    }
  
    // 4. Update parameters
    for (int pp=0; pp<n_parallel; pp++)
    {
      int width = offsets[pp+1]-offsets[pp];
      samples_per_parallel = samples.block(0,offsets[pp],n_samples,width);
      updater->updateDistribution(distributions[pp], samples_per_parallel, costs, weights, distributions_new[pp]);
    }
      
    // Some output and/or saving to file (if "directory" is set)
    if (save_directory.empty()) 
    {
      cout << i_update+1 << "  cost_eval=" << cost_eval << endl;
    }
    else
    {
      // Update learning curve
      // How many samples so far?
      learning_curve(i_update,0) = i_update*n_samples_per_update;      
      // Cost of evaluation
      learning_curve(i_update,1) = cost_eval[0];
      // Exploration magnitude
      learning_curve(i_update,2) = 0.0;
      for (int pp=0; pp<n_parallel; pp++)
        learning_curve(i_update,2) += sqrt(distributions[pp].maxEigenValue()); 
      // Save more than just learning curve. 
      if (!only_learning_curve)
      {
          saveToDirectory(save_directory,i_update,distributions,rollout_eval,rollouts,weights,distributions_new);
          if (i_update==0)
            task->savePlotRolloutScript(save_directory);
      }
    }
    
    // Distribution is new distribution
    for (int ii=0; ii<n_parallel; ii++)
      distributions[ii] = distributions_new[ii];
  }
  
}
Esempio n. 9
0
void runOptimizationTask(
  const Task* const task, 
  const TaskSolver* const task_solver, 
  const DistributionGaussian* const initial_distribution, 
  const Updater* const updater, 
  int n_updates, 
  int n_samples_per_update, 
  std::string save_directory, 
  bool overwrite,
  bool only_learning_curve)
{
  
  int n_cost_components = task->getNumberOfCostComponents();

  // Some variables
  VectorXd sample_eval;
  MatrixXd cost_vars_eval;
  VectorXd cost_eval(1+n_cost_components);
  
  MatrixXd samples;
  MatrixXd cost_vars;
  VectorXd weights;
  MatrixXd costs(n_samples_per_update,1+n_cost_components);
  
  // tmp variables
  VectorXd total_costs(n_samples_per_update);
  VectorXd cur_cost(1+n_cost_components);
  
  // Bookkeeping
  MatrixXd learning_curve(n_updates,2+n_cost_components);
  MatrixXd exploration_curve(n_updates,2);
  
  if (save_directory.empty()) 
    cout << "init  =  " << "  distribution=" << *initial_distribution;
  
  DistributionGaussian distribution = *(initial_distribution->clone());
  DistributionGaussian distribution_new = *(initial_distribution->clone());
  
  // Optimization loop
  for (int i_update=0; i_update<n_updates; i_update++)
  {
    // 0. Get cost of current distribution mean
    sample_eval = distribution.mean().transpose();
    task_solver->performRollout(sample_eval,cost_vars_eval);
    task->evaluateRollout(cost_vars_eval,sample_eval,cost_eval);
    Rollout* rollout_eval = new Rollout(sample_eval,cost_vars_eval,cost_eval);
    
    // 1. Sample from distribution
    distribution.generateSamples(n_samples_per_update, samples);

    vector<Rollout*> rollouts(n_samples_per_update);
    for (int i_sample=0; i_sample<n_samples_per_update; i_sample++)
    {
      // 2A. Perform the rollout
      task_solver->performRollout(samples.row(i_sample),cost_vars);

      // 2B. Evaluate the rollout
      task->evaluateRollout(cost_vars,samples.row(i_sample),cur_cost);
      costs.row(i_sample) = cur_cost;

      rollouts[i_sample] = new Rollout(samples.row(i_sample),cost_vars,cur_cost);
      
    }
  
    // 3. Update parameters (first column of costs contains sum of cost components)
    total_costs = costs.col(0);
    updater->updateDistribution(distribution, samples, total_costs, weights, distribution_new);
    
    
    // Bookkeeping
    // Some output and/or saving to file (if "directory" is set)
    if (save_directory.empty()) 
    {
      cout << "\t cost_eval=" << cost_eval << endl << i_update+1 << "  " << distribution;
    }
    else
    {
      // Update learning curve
      // How many samples?
      int i_samples = i_update*n_samples_per_update;
      learning_curve(i_update,0) = i_samples;
      // Cost of evaluation
      learning_curve.block(i_update,1,1,1+n_cost_components) = cost_eval.transpose();
      
      // Exploration magnitude
      exploration_curve(i_update,0) = i_samples;
      exploration_curve(i_update,1) = sqrt(distribution.maxEigenValue()); 
      
      // Save more than just learning curve.
      if (!only_learning_curve)
      {
          saveToDirectory(save_directory,i_update,distribution,rollout_eval,rollouts,weights,distribution_new);
          if (i_update==0)
            task->savePlotRolloutScript(save_directory);
      }
    }
    
    // Distribution is new distribution
    distribution = distribution_new;
    
  }
  
  // Save learning curve to file, if necessary
  if (!save_directory.empty())
  {
    // Todo: save cost labels also
    saveMatrix(save_directory, "exploration_curve.txt",exploration_curve,overwrite);
    saveMatrix(save_directory, "learning_curve.txt",learning_curve,overwrite);
  }
}
Esempio n. 10
0
int main(int argc, char *argv[]) {
	SPConfig config = NULL;						    // hold configuration parameters
	char config_filename[CONFIG_FILE_PATH_SIZE];    // the configuration file name
	int knn;										// the number of similar features in each image to find (spKNN from configuration file)
	int num_of_similar_images_to_find;              // the number of similar images (to the query) to find (from configuration file)
	int split_method;                               // holds an int representing the split method: 0=RANDOM, 1= MAX_SPREAD,  2=INCREMENTAL
	bool minGui = false;                            // value of the system variable MinimalGui
	bool extraction_mode;							// indicates if extraction mode on or off
	int num_of_images = 0;   					    // number of images in the directory given by the user in the configuration file
	char** all_images_paths = NULL;					// array with the paths to all the images
	
	int last_extracted_feature = 0;  				// helper - holds the last feature extracted in order to free all extracted features on error
	SPPoint** features_per_image = NULL;   			// helper - holds the features for each images
	int* num_of_features_per_image = NULL;			// holds number of features extracted for each image
	
	char query_image[CONFIG_FILE_PATH_SIZE];        // the query image 
	SPPoint* query_features = NULL;				    // all query features
	int query_num_of_features;					    // number of features in query image
	
	KDTreeNode kd_tree = NULL;						// array holds a KDTree for the images
	int* closest_images = NULL;  				    // array holds the spNumOfSimilarImages indexes of the closest images to the query image
	int print_result;   							// holds the result of the call to PrintMinGuiFalse
	
	int retval = 0;									 // return value - default 0 on success
	char string_holder[CONFIG_FILE_PATH_SIZE];       // helper to hold strings
	sp::ImageProc *improc = NULL;
	SP_CONFIG_MSG msg;
	int i;
	int j;
	int n;

	// validate command line arguments:
	// cmd line arguments are ok if there was no arguments specified (argc == 1) or two arguments specified ( -c and filname)
	if (argc != 3 && argc != 1) {
		printf(INVALID_CMD_LINE_MSG);
		return -1;
	}

	if (argc == 1) {
		strcpy(config_filename, DEFAULT_CONFIG_FILENAME);
		config = spConfigCreate(config_filename, &msg);
		if (msg == SP_CONFIG_CANNOT_OPEN_FILE) {
			printf(ERROR_OPENING_DEFAULT_CONFIG_FILE_MSG, DEFAULT_CONFIG_FILENAME);
		}

		if (msg != SP_CONFIG_SUCCESS) {
			retval = -1;
			goto err; // error is printed inside spConfigCreate
		}
	}
	else { // argc == 3

		// check that second argument is the -c flag
		if (strcmp(argv[1], CMD_LINE_CONFIG_FILENAME_FLAG) != 0) {
			printf(INVALID_CMD_LINE_MSG);
			retval = -1;
			goto err;
		}
	
		strcpy(config_filename, argv[2]);
		config = spConfigCreate(config_filename, &msg);
		if (msg == SP_CONFIG_CANNOT_OPEN_FILE) {
			printf(ERROR_OPENING_CONFIG_FILE_MSG, config_filename);
		}

		if (msg != SP_CONFIG_SUCCESS) {
			retval = -1;
			goto err; // error is printed inside spConfigCreate
		}
	}
	
	// initiate from config
	if (initFromConfig(config, &num_of_images, &num_of_similar_images_to_find, &knn, &split_method, &extraction_mode, &minGui, &all_images_paths) == -1 ) {
		retval = -1; 
		goto err; // error is printed inside initFromConfig 
	}


	// initiate image proc
	improc = new sp::ImageProc(config);

	// extract images features
	if ((num_of_features_per_image = (int*)malloc(sizeof(*num_of_features_per_image) * num_of_images)) == NULL) {
		spLoggerPrintError(ALLOCATION_FAILURE_MSG, __FILE__, __func__, __LINE__);
		retval = -1;
		goto err;
	}

	spLoggerPrintInfo(CHECK_EXTRACTION_MODE_INFO_LOG);
	if (extraction_mode) {	// extraction mode is chosen
		spLoggerPrintMsg(USE_EXTRACTION_MODE_LOG);
		spLoggerPrintInfo(EXTRACT_IMAGES_FEATURES_INFO_LOG);

		if ((features_per_image = (SPPoint**)malloc(sizeof(*features_per_image) * num_of_images)) == NULL) {
			spLoggerPrintError(ALLOCATION_FAILURE_MSG, __FILE__, __func__, __LINE__);
			retval = -1;
			goto err;
		}

		// extract each image features and write them to file
		for (i=0; i < num_of_images; i++) {	
			// extract image features
			if ((features_per_image[i] = improc->getImageFeatures(all_images_paths[i], i, &(num_of_features_per_image[i]))) == NULL) {
				last_extracted_feature = i;
				retval = -1;
				goto err; // error is printed inside  getImageFeatures
			}
		}

		if (saveToDirectory(config, features_per_image, num_of_features_per_image, num_of_images) == -1) {
			retval = -1;
			goto err; // error is printed inside  saveToDirectory
		}
	}

	else { // not extraction mode
		spLoggerPrintMsg(USE_NOT_EXTRACTION_MODE_LOG);
		spLoggerPrintInfo(READ_FEATURES_FROM_FILE_LOG);

		if ((features_per_image = extractFromFiles(config, num_of_features_per_image, num_of_images)) == NULL) {
			retval = -1;
			goto err; // error is printed inside  extractFromFiles
		}
	}
	
	if ((kd_tree = initiateDataStructures(features_per_image, num_of_features_per_image, num_of_images, split_method)) == NULL) {
		retval = -1;
		goto err; // error is printed inside initiateDataStructures
	}

	query:
	while(1) {
		// get a query image from the user
		printf(ENTER_AN_IMAGE_MSG);
		fflush(NULL);
		scanf("%s",query_image);

		// exit if user asked
		if (strcmp (query_image,EXIT_SIGN) == 0) {
			printf(EXIT_MSG);
			fflush(NULL);
			goto err; // free memory and quit 
		}

		if( access( query_image, F_OK ) == -1 ) {
		    printf(FILE_DOESNT_EXIST, query_image);
			goto query;
		}

		// extract query image features
		spLoggerPrintMsg(EXTRACT_QUERY_IMAGE_FEATURES_LOG);
		if ((query_features = improc->getImageFeatures(query_image, num_of_images, &query_num_of_features)) == NULL) {
			retval = -1;
			goto err_inside_loop; // error log is printed inside getImageFeatures	
		}
		
		// print debug log
		if ((n = sprintf(string_holder, NUM_OF_EXTRACTED_FEATURES_DEBUG_LOG, query_num_of_features)) < 0) {
			spLoggerPrintError(GENERAL_ERROR_MSG, __FILE__, __func__, __LINE__);
			retval = -1;
			goto err_inside_loop;
		}

		spLoggerPrintDebug(string_holder, __FILE__, __func__, __LINE__);
		
		//  print log message
		if ((n = sprintf(string_holder, SEARCING_SIMILAR_IMAGES_MSG, num_of_similar_images_to_find)) < 0) {
			spLoggerPrintError(GENERAL_ERROR_MSG, __FILE__, __func__, __LINE__);
			retval = -1;
			goto err_inside_loop;
		}

		spLoggerPrintMsg(string_holder);

		// find similar images to the query image
		closest_images = getKClosestImages(num_of_similar_images_to_find, knn, query_features,
										   kd_tree, query_num_of_features, num_of_images);

		if (closest_images == NULL) { 
			retval = -1;
			goto err_inside_loop; // error is printed to inside getKClosestImages
		}

		// show (display) closest_images images

		//need to show images
		if (minGui==true){ 
			for (i=0; i<num_of_similar_images_to_find; i++){
				//get file path of the images by the indexes in closest_images
				improc->showImage(all_images_paths[closest_images[i]]);
			}
		}

		// i.e. minGui==false,  just need to print images path
		else{
			print_result = PrintMinGuiFalse(query_image, num_of_similar_images_to_find, all_images_paths, closest_images);
			if (print_result == 0) {
				retval = -1;
				goto err_inside_loop; // error is printed inside 
			}

		}
		// free memory before entering the loop again
		
		free(closest_images);
		if (query_features != NULL) {
			for (i=0; i<query_num_of_features; i++) {
				spPointDestroy(query_features[i]);
			}
			free(query_features);
		}
	}

	err_inside_loop:	
		free(closest_images);
		// free query_features
		if (query_features != NULL) {
			for (i=0; i<query_num_of_features; i++) {
				spPointDestroy(query_features[i]);
			}
			free(query_features);
		}

	// done - destroy logger and free everything 
	err:
		spLoggerDestroy();

		// free the kd tree
		DestroyKDTreeNode(kd_tree);
		spConfigDestroy(config);

		// free all images paths
		if (all_images_paths != NULL) {
			for (i = 0; i < num_of_images; i ++) {
				free(all_images_paths[i]);
			}
			free(all_images_paths);
		}

		if (features_per_image != NULL) {
			// free features_per_image
			for (i = 0; i < last_extracted_feature; i ++) {
				if (features_per_image[i] != NULL) {
					for (j = 0; j < num_of_features_per_image[i]; j++) {
						spPointDestroy(features_per_image[i][j]);
					}
				}
			}
			free(features_per_image);
		}
		free(num_of_features_per_image); // must be freed after features_per_image
		if (improc != NULL) {
			delete improc;
		}

	return retval;
}