Пример #1
0
void FaceDetection::start_work(const cv::Mat &image)
{
	in_image = image.clone();
	out_image = image.clone();
	cv::cvtColor(in_image, in_image, cv::COLOR_BGR2GRAY);	// 转为灰度图
	cv::equalizeHist(in_image, in_image);	// 直方图均衡化
	faces.clear();
	cascade_classifier.detectMultiScale(in_image, faces, 1.1, 2, 0 | CV_HAAR_SCALE_IMAGE, cv::Size(30, 30));
	if (!faces.empty())
	{
		for (int i = 0; i != faces.size(); ++i)
			cv::rectangle(out_image, faces[i], cv::Scalar(0, 0, 255), 1, 8, 0);	//	画出人脸部分的矩形框
		if (save)
		{
			faces[0].y -= static_cast<int>(0.1 * faces[0].height);
			faces[0].height += static_cast<int>(0.1 * faces[0].height);
			cv::resize(in_image(faces[0]), in_image, cv::Size(92, 112));	// 可自己定义图片大小
			save_faces(in_image);
			system("cls");
			std::cout << "OK,您已经保存了" << photo_numbers << "张图片了" << std::endl;
			save = false;
		}
		if (recognize)
		{
			faces[0].y -= static_cast<int>(0.1 * faces[0].height);
			faces[0].height += static_cast<int>(0.1 * faces[0].height);
			cv::resize(in_image(faces[0]), in_image, cv::Size(92, 112));
			cv::imwrite(".\\data\\me.pgm", in_image);
			set_have_image(true);
		}
	}
	cv::imshow("当前帧", out_image);
}
Пример #2
0
int main(int argc, char **argv) {
    const int B = 5;    // box filter radius

    Arguments args(argc, argv);

    bool nosched   = args.noschedule;
    int iter       = args.iterations;
    int tile_width = args.block;
    int width      = args.width;
    int height     = args.width;

    Image<float> in_image = generate_random_image<float>(width,height);

    // pad the image with zeros
    int pad = 3*(B+1)+1;
    for (int i=0; i<in_image.width(); i++) {
        for (int j=0; j<in_image.height(); j++) {
            if (i<pad || i>width-pad || j<pad || j>height-pad) {
                in_image(i,j) = 0.0f;
            }
        }
    }

    RecFilter::set_max_threads_per_cuda_warp(128);

    RecFilter b1 = box_filter_order_1(in_image,     width, height, B, tile_width, !nosched);
    RecFilter b2 = box_filter_order_2(b1.as_func(), width, height, B, tile_width, !nosched);

    b2.profile(iter);

    return 0;
}
Пример #3
0
 // INVERSE(s_healpix_inverse)  sphere
 // Project coordinates from cartesian (x, y) to geographic (lon, lat)
 inline void inv(T const& xy_x, T const& xy_y, T& lp_lon, T& lp_lat) const
 {
     /* Check whether (x, y) lies in the HEALPix image */
     if (in_image(xy_x, xy_y, 0, 0, 0) == 0) {
         lp_lon = HUGE_VAL;
         lp_lat = HUGE_VAL;
         BOOST_THROW_EXCEPTION( projection_exception(error_invalid_x_or_y) );
     }
     return healpix_sphere_inverse(xy_x, xy_y, lp_lon, lp_lat);
 }
Пример #4
0
 // INVERSE(s_rhealpix_inverse)  sphere
 // Project coordinates from cartesian (x, y) to geographic (lon, lat)
 inline void inv(T xy_x, T xy_y, T& lp_lon, T& lp_lat) const
 {
     /* Check whether (x, y) lies in the rHEALPix image. */
     if (in_image(xy_x, xy_y, 1, this->m_proj_parm.north_square, this->m_proj_parm.south_square) == 0) {
         lp_lon = HUGE_VAL;
         lp_lat = HUGE_VAL;
         BOOST_THROW_EXCEPTION( projection_exception(error_invalid_x_or_y) );
     }
     combine_caps(xy_x, xy_y, this->m_proj_parm.north_square, this->m_proj_parm.south_square, 1);
     return healpix_sphere_inverse(xy_x, xy_y, lp_lon, lp_lat);
 }
Пример #5
0
 // INVERSE(e_healpix_inverse)  ellipsoid
 // Project coordinates from cartesian (x, y) to geographic (lon, lat)
 inline void inv(T const& xy_x, T const& xy_y, T& lp_lon, T& lp_lat) const
 {
     /* Check whether (x, y) lies in the HEALPix image. */
     if (in_image(xy_x, xy_y, 0, 0, 0) == 0) {
         lp_lon = HUGE_VAL;
         lp_lat = HUGE_VAL;
         BOOST_THROW_EXCEPTION( projection_exception(error_invalid_x_or_y) );
     }
     healpix_sphere_inverse(xy_x, xy_y, lp_lon, lp_lat);
     lp_lat = auth_lat(this->params(), m_proj_parm, lp_lat, 1);
 }
Пример #6
0
static LP s_healpix_inverse(XY xy, PJ *P) { /* sphere */
    LP lp = {0.0,0.0};

    /* Check whether (x, y) lies in the HEALPix image */
    if (in_image(xy.x, xy.y, 0, 0, 0) == 0) {
        lp.lam = HUGE_VAL;
        lp.phi = HUGE_VAL;
        pj_ctx_set_errno(P->ctx, -15);
        return lp;
    }
    return healpix_sphere_inverse(xy);
}
Пример #7
0
bool CGEOM::generate_scene( const SceneGeneratorOptions& sc_opts,
                            Eigen::MatrixXd& mP3D,
                            Eigen::MatrixXd& mMeasT,
                            Eigen::MatrixXd& mMeasN
                          ) {
    const int nNumPoints = sc_opts.nNumPoints;
    Eigen::Matrix3d mK = sc_opts.MakeCameraMatrix();
    //PRINT_MATRIX( mK );

    mP3D.resize( 3, nNumPoints );
    mMeasT.resize( 2, nNumPoints );
    mMeasN.resize( 2, nNumPoints );

    // Generate random image points
    int nNumGenPoints = 0;
    Eigen::Vector3d mP;
    Eigen::Vector2d mMT, mMN;
    const int nMaxNumIter = 100*nNumPoints;
    int nNumIter = 0;
    while( nNumGenPoints < nNumPoints && ++nNumIter <= nMaxNumIter ) {
        mP << rand_range_d( sc_opts.dMinX, sc_opts.dMaxX ),
        rand_range_d( sc_opts.dMinY, sc_opts.dMaxY ),
        rand_range_d( sc_opts.dMinZ, sc_opts.dMaxZ );
        // Project to image
        mMT = CEIGEN::metric( mK * mP );
        // Add noise
        rand_gaussd( 0., sc_opts.dNoise, mMN(0), mMN(1) );
        mMN += mMT;
        if( in_image( mMT, sc_opts.nImageWidth, sc_opts.nImageHeight ) &&
                in_image( mMN, sc_opts.nImageWidth, sc_opts.nImageHeight ) ) {
            mP3D.col( nNumGenPoints ) = mP;
            mMeasT.col( nNumGenPoints ) = mMT;
            mMeasN.col( nNumGenPoints ) = mMN;
            nNumGenPoints++;
        }
    }
    return nNumIter != nMaxNumIter;
}
Пример #8
0
static LP s_rhealpix_inverse(XY xy, PJ *P) { /* sphere */
    struct pj_opaque *Q = P->opaque;
    LP lp = {0.0,0.0};

    /* Check whether (x, y) lies in the rHEALPix image. */
    if (in_image(xy.x, xy.y, 1, Q->north_square, Q->south_square) == 0) {
        lp.lam = HUGE_VAL;
        lp.phi = HUGE_VAL;
        pj_ctx_set_errno(P->ctx, -15);
        return lp;
    }
    xy = combine_caps(xy.x, xy.y, Q->north_square, Q->south_square, 1);
    return healpix_sphere_inverse(xy);
}
Пример #9
0
static LP e_healpix_inverse(XY xy, PJ *P) { /* ellipsoid */
    LP lp = {0.0,0.0};

    /* Check whether (x, y) lies in the HEALPix image. */
    if (in_image(xy.x, xy.y, 0, 0, 0) == 0) {
        lp.lam = HUGE_VAL;
        lp.phi = HUGE_VAL;
        pj_ctx_set_errno(P->ctx, -15);
        return lp;
    }
    lp = healpix_sphere_inverse(xy);
    lp.phi = auth_lat(P, lp.phi, 1);
    return lp;
}
void PointCloudImageCreator::imageCallback(
    const sensor_msgs::Image::ConstPtr &image_msg) {
    cv::Mat in_image = cv_bridge::toCvShare(
       image_msg, sensor_msgs::image_encodings::BGR8)->image;
    boost::mutex::scoped_lock lock(mutex_);
    if (is_mask_image_ && !this->foreground_mask_.empty()) {
       in_image = in_image(this->rect_);
    }
    cv_bridge::CvImagePtr out_msg(new cv_bridge::CvImage);
    out_msg->header = this->header_;
    out_msg->encoding = sensor_msgs::image_encodings::BGR8;
    out_msg->image = in_image.clone();
    pub_image_.publish(out_msg->toImageMsg());
}
Пример #11
0
scm::gl::texture_image_data_ptr load_image_2d(std::string const& filename,
                                              bool create_mips) {
  scm::scoped_ptr<fipImage> in_image(new fipImage);

  if (!in_image->load(filename.c_str())) {
    scm::gl::glerr() << scm::log::error << "texture_loader::load_image_2d(): "
                     << "unable to open file: " << filename << scm::log::end;
    return scm::gl::texture_image_data_ptr();
  }

  FREE_IMAGE_TYPE image_type = in_image->getImageType();
  math::vec2ui image_size(in_image->getWidth(), in_image->getHeight());
  scm::gl::data_format format = scm::gl::FORMAT_NULL;
  unsigned image_bit_count = in_image->getInfoHeader()->biBitCount;

  switch (image_type) {
    case FIT_BITMAP: {
      unsigned num_components = in_image->getBitsPerPixel() / 8;
      switch (num_components) {
        case 1:
          format = scm::gl::FORMAT_R_8;
          break;
        case 2:
          format = scm::gl::FORMAT_RG_8;
          break;
        case 3:
          format = scm::gl::FORMAT_BGR_8;
          break;
        case 4:
          format = scm::gl::FORMAT_BGRA_8;
          break;
      }
    } break;
    case FIT_INT16:
      format = scm::gl::FORMAT_R_16S;
      break;
    case FIT_UINT16:
      format = scm::gl::FORMAT_R_16;
      break;
    case FIT_RGB16:
      format = scm::gl::FORMAT_RGB_16;
      break;
    case FIT_RGBA16:
      format = scm::gl::FORMAT_RGBA_16;
      break;
    case FIT_INT32:
      break;
    case FIT_UINT32:
      break;
    case FIT_FLOAT:
      format = scm::gl::FORMAT_R_32F;
      break;
    case FIT_RGBF:
      format = scm::gl::FORMAT_RGB_32F;
      break;
    case FIT_RGBAF:
      format = scm::gl::FORMAT_RGBA_32F;
      break;
  }

  if (format == scm::gl::FORMAT_NULL) {
    scm::gl::glerr() << scm::log::error << "texture_loader::load_image_2d(): "
                     << "unsupported color format: " << std::hex
                     << in_image->getImageType() << scm::log::end;
    return scm::gl::texture_image_data_ptr();
  }

  scm::gl::texture_image_data::level_vector mip_vec;

  unsigned num_mip_levels = 1;
  if (create_mips) {
    num_mip_levels = scm::gl::util::max_mip_levels(image_size);
  }

  for (unsigned i = 0; i < num_mip_levels; ++i) {
    scm::size_t cur_data_size = 0;
    math::vec2ui lev_size = scm::gl::util::mip_level_dimensions(image_size, i);

    if (i == 0) {
      lev_size = image_size;
      cur_data_size = image_size.x * image_size.y;
      cur_data_size *= channel_count(format);
      cur_data_size *= size_of_channel(format);
    } else {
      cur_data_size = lev_size.x * lev_size.y;
      cur_data_size *= channel_count(format);
      cur_data_size *= size_of_channel(format);

      if (FALSE == in_image->rescale(lev_size.x, lev_size.y, FILTER_LANCZOS3)) {
        scm::gl::glerr() << scm::log::error
                         << "texture_loader::load_image_2d(): "
                         << "unable to scale image (level: " << i
                         << ", dim: " << lev_size << ")" << scm::log::end;
        return scm::gl::texture_image_data_ptr();
      }

      if (in_image->getWidth() != lev_size.x ||
          in_image->getHeight() != lev_size.y) {
        scm::gl::glerr() << scm::log::error
                         << "texture_loader::load_image_2d(): "
                         << "image dimensions changed after resamling (level: "
                         << i << ", dim: " << lev_size << ", type: " << std::hex
                         << in_image->getImageType() << ")" << scm::log::end;
        return scm::gl::texture_image_data_ptr();
      }
      if (in_image->getInfoHeader()->biBitCount != image_bit_count) {
        scm::gl::glerr() << scm::log::error
                         << "texture_loader::load_image_2d(): "
                         << "image bitcount changed after resamling (level: "
                         << i << ", bit_count: " << image_bit_count
                         << ", img_bit_count: "
                         << in_image->getInfoHeader()->biBitCount << ")"
                         << scm::log::end;
        return scm::gl::texture_image_data_ptr();
      }
      if (image_type != in_image->getImageType()) {
        scm::gl::glerr() << scm::log::error
                         << "texture_loader::load_image_2d(): "
                         << "image type changed after resamling (level: " << i
                         << ", dim: " << lev_size << ", type: " << std::hex
                         << in_image->getImageType() << ")" << scm::log::end;
        return scm::gl::texture_image_data_ptr();
      }
    }

    scm::shared_array<unsigned char> cur_data(new unsigned char[cur_data_size]);

    size_t line_pitch = in_image->getScanWidth();
    for (unsigned l = 0; l < lev_size.y; ++l) {
      size_t ls = static_cast<size_t>(lev_size.x) * size_of_format(format);
      uint8_t* s =
          reinterpret_cast<uint8_t*>(in_image->accessPixels()) + line_pitch * l;
      uint8_t* d = reinterpret_cast<uint8_t*>(cur_data.get()) + ls * l;
      std::memcpy(d, s, ls);
    }

    mip_vec.push_back({lev_size, cur_data});
  }

  return boost::make_shared<scm::gl::texture_image_data>(
      scm::gl::texture_image_data::ORIGIN_LOWER_LEFT, format, mip_vec);
}
Пример #12
0
int
geocode_dem (projection_type_t projection_type,	// What we are projection to.
	     project_parameters_t *pp,    // Parameters we project to.
	     datum_type_t datum,                // Datum we project to.
	     // Pixel size of output image, in output projection units
	     // (meters or possibly degrees, if we decide to support
	     // projecting to pseudoprojected form).
	     double pixel_size,
	     resample_method_t resample_method,	// How to resample pixels.
	     const char *input_image, // Base name of input image.
	     const meta_parameters *imd, // Input DEM image metadata.
	     const char *output_image  // Base name of output image.
	     )
{
  int return_code;		// Holds return codes from functions.

  // Function to use to project or unproject between latlon and input
  // or output coordinates.
  projector_t project_input; 	// latlon => input image map projection
  projector_t unproject_input;	// input image_map_projection => latlon
  projector_t project_output;	// latlon => output image map projection
  projector_t unproject_output;	// output image map projection => latlon
  // Like the above, but act on arrays.
  array_projector_t array_project_input, array_unproject_input;
  array_projector_t array_project_output, array_unproject_output;

  // We only deal with reprojection map projected DEMs.
  g_assert (imd->projection != NULL);

  // FIXME: what to do with background value is something that still
  // needs to be determined (probably in consultation with the guys
  // working on terrain correction).
  const float background_value = 0.0;

  // Geocoding to pseudoprojected form presents issues, for example
  // with the meaning of the pixel_size argument, which is taken as a
  // distance in map projection coordinates for all other projections
  // (deciding how to interpret it when projecting to pseudoprojected
  // form is tough), and since there probably isn't much need, we
  // don't allow it.
  g_assert (projection_type != LAT_LONG_PSEUDO_PROJECTION);

  // Get the functions we want to use for projecting and unprojecting.
  set_projection_functions (imd->projection->type, &project_input,
			    &unproject_input, &array_project_input,
			    &array_unproject_input);
  set_projection_functions (projection_type, &project_output,
			    &unproject_output, &array_project_output,
			    &array_unproject_output);

  // Input image dimensions in pixels in x and y directions.
  size_t ii_size_x = imd->general->sample_count;
  size_t ii_size_y = imd->general->line_count;

  // Convenience aliases.
  meta_projection *ipb = imd->projection;
  project_parameters_t *ipp = &imd->projection->param;

  // First we march around the entire outside of the image and compute
  // projection coordinates for every pixel, keeping track of the
  // minimum and maximum projection coordinates in each dimension.
  // This lets us determine the exact extent of the DEM in
  // output projection coordinates.
  asfPrintStatus ("Determining input image extent in projection coordinate "
		  "space... ");

  double min_x = DBL_MAX;
  double max_x = -DBL_MAX;
  double min_y = DBL_MAX;
  double max_y = -DBL_MAX;

  // In going around the edge, we are just trying to determine the
  // extent of the image in the horizontal, so we don't care about
  // height yet.
  { // Scoping block.
    // Number of pixels in the edge of the image.
    size_t edge_point_count = 2 * ii_size_x + 2 * ii_size_y - 4;
    double *lats = g_new0 (double, edge_point_count);
    double *lons = g_new0 (double, edge_point_count);
    size_t current_edge_point = 0;
    size_t ii = 0, jj = 0;
    for ( ; ii < ii_size_x - 1 ; ii++ ) {
      return_code = get_pixel_lat_long (imd, unproject_input, ii, jj,
					&(lats[current_edge_point]),
					&(lons[current_edge_point]));
      g_assert (return_code);
      current_edge_point++;
    }
    for ( ; jj < ii_size_y - 1 ; jj++ ) {
      return_code = get_pixel_lat_long (imd, unproject_input, ii, jj,
					&(lats[current_edge_point]),
					&(lons[current_edge_point]));
      g_assert (return_code);
      current_edge_point++;
    }
    for ( ; ii > 0 ; ii-- ) {
      return_code = get_pixel_lat_long (imd, unproject_input, ii, jj,
					&(lats[current_edge_point]),
					&(lons[current_edge_point]));
      g_assert (return_code);
      current_edge_point++;
    }
    for ( ; jj > 0 ; jj-- ) {
      return_code = get_pixel_lat_long (imd, unproject_input, ii, jj,
					&(lats[current_edge_point]),
					&(lons[current_edge_point]));
      g_assert (return_code);
      current_edge_point++;
    }
    g_assert (current_edge_point == edge_point_count);
    // Pointers to arrays of projected coordinates to be filled in.
    // The projection function will allocate this memory itself.
    double *x = NULL, *y = NULL;
    // Project all the edge pixels.
    return_code = array_project_output (pp, lats, lons, NULL, &x, &y, NULL,
					edge_point_count, datum);
    g_assert (return_code == TRUE);
    // Find the extents of the image in projection coordinates.
    for ( ii = 0 ; ii < edge_point_count ; ii++ ) {
      if ( x[ii] < min_x ) { min_x = x[ii]; }
      if ( x[ii] > max_x ) { max_x = x[ii]; }
      if ( y[ii] < min_y ) { min_y = y[ii]; }
      if ( y[ii] > max_y ) { max_y = y[ii]; }
    }

    free (y);
    free (x);
    g_free (lons);
    g_free (lats);
  }

  asfPrintStatus ("done.\n\n");

  // Issue a warning when the chosen pixel size is smaller than the
  // input pixel size.  FIXME: this condition will really never fire
  // for pseudoprojected image, since the pixels size of the input is
  // tiny (degrees per pixel) and the pixel_size has already been
  // computed in asf_geocode function itself as an arc length on the
  // ground.
  if ( GSL_MIN(imd->general->x_pixel_size,
	       imd->general->y_pixel_size) > pixel_size ) {
    asfPrintWarning
      ("Requested pixel size %f is smaller then the input image resolution "
       "(%le meters).\n", pixel_size,
       GSL_MIN (imd->general->x_pixel_size, imd->general->y_pixel_size));
  }

  // The pixel size requested by the user better not oversample by the
  // factor of 2.  Specifying --force will skip this check.  FIXME:
  // same essential problem as the above condition, but in this case
  // it always goes off.
  //  if (!force_flag && GSL_MIN(imd->general->x_pixel_size,
  //	       imd->general->y_pixel_size) > (2*pixel_size) ) {
  //    report_func
  //      ("Requested pixel size %f is smaller then the minimum implied by half \n"
  //       "the input image resolution (%le meters), this is not supported.\n",
  //       pixel_size, GSL_MIN (imd->general->x_pixel_size,
  //			    imd->general->y_pixel_size));
  //  }

  asfPrintStatus ("Opening input DEM image... ");
  char *input_data_file = (char *) MALLOC(sizeof(char)*(strlen(input_image)+5));
  sprintf(input_data_file, "%s.img", input_image);
  FloatImage *iim
    = float_image_new_from_file (ii_size_x, ii_size_y, input_data_file, 0,
				 FLOAT_IMAGE_BYTE_ORDER_BIG_ENDIAN);
  FREE(input_data_file);
  asfPrintStatus ("done.\n\n");

  // Maximum pixel indicies in output image.
  size_t oix_max = ceil ((max_x - min_x) / pixel_size);
  size_t oiy_max = ceil ((max_y - min_y) / pixel_size);

  // Output image dimensions.
  size_t oi_size_x = oix_max + 1;
  size_t oi_size_y = oiy_max + 1;

  // Output image.
  FloatImage *oim = float_image_new (oi_size_x, oi_size_y);

  // Translate the command line notion of the resampling method into
  // the lingo known by the float_image class.  The compiler is
  // reassured with a default.
  float_image_sample_method_t float_image_sample_method
    = FLOAT_IMAGE_SAMPLE_METHOD_BILINEAR;
  switch ( resample_method ) {
  case RESAMPLE_NEAREST_NEIGHBOR:
    float_image_sample_method = FLOAT_IMAGE_SAMPLE_METHOD_NEAREST_NEIGHBOR;
    break;
  case RESAMPLE_BILINEAR:
    float_image_sample_method = FLOAT_IMAGE_SAMPLE_METHOD_BILINEAR;
    break;
  case RESAMPLE_BICUBIC:
    float_image_sample_method = FLOAT_IMAGE_SAMPLE_METHOD_BICUBIC;
    break;
  default:
    g_assert_not_reached ();
  }

  // We need to find the z coordinates in the output projection of all
  // the pixels in the input DEM.  We store these values in their own
  // FloatImage instance.

  //FloatImage *x_coords = float_image_new (ii_size_x, ii_size_y);
  //FloatImage *y_coords = float_image_new (ii_size_x, ii_size_y);
  FloatImage *z_coords = float_image_new (ii_size_x, ii_size_y);

  // We transform the points using the array transformation function
  // for efficiency, but we don't want to do them all at once, since
  // that would require huge gobs of memory.
  const size_t max_transform_chunk_pixels = 5000000;
  size_t rows_per_chunk = max_transform_chunk_pixels / ii_size_x;
  size_t chunk_pixels = rows_per_chunk * ii_size_x;
  double *chunk_x = g_new0 (double, chunk_pixels);
  double *chunk_y = g_new0 (double, chunk_pixels);
  double *chunk_z = g_new0 (double, chunk_pixels);
  double *lat = g_new0 (double, chunk_pixels);
  double *lon = g_new0 (double, chunk_pixels);
  double *height = g_new0 (double, chunk_pixels);

  asfPrintStatus ("Determining Z coordinates of input pixels in output "
		  "projection space... ");

  // Transform all the chunks, storing results in the z coordinate image.
  size_t ii, jj, kk;		// Index variables.
  for ( ii = 0 ; ii < ii_size_y ; ) {
    size_t rows_remaining = ii_size_y - ii;
    size_t rows_to_load
      = rows_per_chunk < rows_remaining ? rows_per_chunk : rows_remaining;
    for ( jj = 0 ; jj < rows_to_load ; jj++ ) {
      size_t current_image_row = ii + jj;
      for ( kk = 0 ; kk < ii_size_x ; kk++ ) {
	size_t current_chunk_pixel = jj * ii_size_x + kk;
	chunk_x[current_chunk_pixel] = ipb->startX + kk * ipb->perX;
	chunk_y[current_chunk_pixel]
	  = ipb->startY + current_image_row * ipb->perY;
	if ( imd->projection->type == LAT_LONG_PSEUDO_PROJECTION ) {
	  chunk_x[current_chunk_pixel] *= D2R;
	  chunk_y[current_chunk_pixel] *= D2R;
	}
	chunk_z[current_chunk_pixel]
	  = float_image_get_pixel (iim, kk, current_image_row);
      }
    }
    long current_chunk_pixels = rows_to_load * ii_size_x;
    array_unproject_input (ipp, chunk_x, chunk_y, chunk_z, &lat, &lon,
			   &height, current_chunk_pixels, ipb->datum);
    array_project_output (pp, lat, lon, height, &chunk_x, &chunk_y, &chunk_z,
			  current_chunk_pixels, datum);
    for ( jj = 0 ; jj < rows_to_load ; jj++ ) {
      size_t current_image_row = ii + jj;
      for ( kk = 0 ; kk < ii_size_x ; kk++ ) {
	size_t current_chunk_pixel = jj * ii_size_x + kk;
	// Current pixel x, y, z coordinates.
	//float cp_x = (float) chunk_x[current_chunk_pixel];
	//float cp_y = (float) chunk_y[current_chunk_pixel];
	float cp_z = (float) chunk_z[current_chunk_pixel];
	//float_image_set_pixel (x_coords, kk, current_image_row, cp_x);
	//float_image_set_pixel (y_coords, kk, current_image_row, cp_y);
	float_image_set_pixel (z_coords, kk, current_image_row, cp_z);
      }
    }

    ii += rows_to_load;
  }

  asfPrintStatus ("done.\n\n");

#ifdef DEBUG_GEOCODE_DEM_Z_COORDS_IMAGE_AS_JPEG
  // Take a look at the z_coordinate image (for debugging).
  float_image_export_as_jpeg_with_mask_interval (z_coords, "z_coords.jpg",
						 GSL_MAX (z_coords->size_x,
							  z_coords->size_y),
						 -FLT_MAX, -100);
#endif

  g_free (chunk_x);
  g_free (chunk_y);
  g_free (chunk_z);
  g_free (lat);
  g_free (lon);
  g_free (height);

  // Now we want to determine the pixel coordinates in the input which
  // correspond to each of the output pixels.  We can then sample the
  // new height value already computed for that input pixel to
  // determine the pixel value to use as output.

  // We want to proceed in chunks as we did when going in the other
  // direction.
  rows_per_chunk = max_transform_chunk_pixels / oi_size_x;
  chunk_pixels = rows_per_chunk * oi_size_x;
  chunk_x = g_new0 (double, chunk_pixels);
  chunk_y = g_new0 (double, chunk_pixels);
  // We don't have height information in this direction, nor do we care.
  chunk_z = NULL;
  lat = g_new0 (double, chunk_pixels);
  lon = g_new0 (double, chunk_pixels);
  // We don't have height information in this direction, nor do we care.
  height = NULL;

  asfPrintStatus ("Sampling Z coordinates to form pixels in output projection "
		  "space... ");

  // Transform all the chunks, using the results to form the output image.
  for ( ii = 0 ; ii < oi_size_y ; ) {
    size_t rows_remaining = oi_size_y - ii;
    size_t rows_to_load
      = rows_per_chunk < rows_remaining ? rows_per_chunk : rows_remaining;
    for ( jj = 0 ; jj < rows_to_load ; jj++ ) {
      size_t current_image_row = ii + jj;
      for ( kk = 0 ; kk < oi_size_x ; kk++ ) {
	size_t current_chunk_pixel = jj * oi_size_x + kk;
	chunk_x[current_chunk_pixel] = min_x + kk * pixel_size;
	chunk_y[current_chunk_pixel] = max_y - current_image_row * pixel_size;
      }
    }
    long current_chunk_pixels = rows_to_load * oi_size_x;
    array_unproject_output (pp, chunk_x, chunk_y, NULL, &lat, &lon, NULL,
			    current_chunk_pixels, datum);
    array_project_input (ipp, lat, lon, NULL, &chunk_x, &chunk_y, NULL,
			 current_chunk_pixels, ipb->datum);
    if ( imd->projection->type == LAT_LONG_PSEUDO_PROJECTION ) {
      ssize_t ll;     // For (semi)clarity we don't reuse index variable :)
      for ( ll = 0 ; ll < current_chunk_pixels ; ll++ ) {
	chunk_x[ll] *= R2D;
	chunk_y[ll] *= R2D;
      }
    }

    for ( jj = 0 ; jj < rows_to_load ; jj++ ) {
      size_t current_image_row = ii + jj;
      for ( kk = 0 ; kk < oi_size_x ; kk++ ) {
	size_t current_chunk_pixel = jj * oi_size_x + kk;

	// Compute pixel coordinates in input image.
	ssize_t in_x
	  = (chunk_x[current_chunk_pixel] - ipb->startX) / ipb->perX;
	ssize_t in_y
	  = (chunk_y[current_chunk_pixel] - ipb->startY) / ipb->perY;

	if ( in_image (z_coords, in_x, in_y) ) {
	  // FIXME: something needs to be done somewhere about
	  // propogating no data values.
	  float_image_set_pixel (oim, kk, current_image_row,
				 float_image_sample (z_coords, in_x, in_y,
						     resample_method));
	}
	else {
	  float_image_set_pixel (oim, kk, current_image_row, background_value);
	}
      }
    }

    ii += rows_to_load;
  }

  asfPrintStatus ("done.\n\n");

  g_free (chunk_x);
  g_free (chunk_y);
  g_free (lat);
  g_free (lon);

#ifdef DEBUG_GEOCODE_DEM_OUTPUT_IMAGE_AS_JPEG
  // Take a look at the output image (for debugging).
  float_image_export_as_jpeg_with_mask_interval (oim, "oim.jpg",
						 GSL_MAX (oim->size_x,
							  oim->size_y),
						 -FLT_MAX, -100);
#endif

  // Store the output image.
  asfPrintStatus ("Storing output image... ");
  char *output_data_file = 
    (char *) MALLOC(sizeof(char)*(strlen(output_image)+5));
  sprintf(output_data_file, "%s.img", output_image);
  return_code = float_image_store (oim, output_data_file,
				   FLOAT_IMAGE_BYTE_ORDER_BIG_ENDIAN);
  g_assert (return_code == 0);
  asfPrintStatus ("done.\n\n");

  // Now we need some metadata for the output image.  We will just
  // start with the metadata from the input image and add the
  // geocoding parameters.

  char *input_meta_file = (char *) MALLOC(sizeof(char)*(strlen(input_image)+6));
  sprintf(input_meta_file, "%s.meta", input_image);

  char *output_meta_file = 
    (char *) MALLOC(sizeof(char)*(strlen(output_image)+6));
  sprintf(output_meta_file, "%s.meta", output_image);

  meta_parameters *omd = meta_read (input_meta_file);

  // Adjust the metadata to correspond to the output image instead of
  // the input image.

  omd->general->x_pixel_size = pixel_size;
  omd->general->y_pixel_size = pixel_size;
  omd->general->line_count = oi_size_y;
  omd->general->sample_count = oi_size_x;

  // SAR block is not really appropriate for map projected images, but
  // since it ended up with this value that can signify map
  // projectedness in it somehow, we fill it in for safety.
  omd->sar->image_type = 'P';

  // Note that we have already verified that the input image is
  // projected, and since we initialize the output metadata from there
  // we know we will have a projection block.
  omd->projection->type = projection_type;
  omd->projection->startX = min_x;
  omd->projection->startY = max_y;
  omd->projection->perX = pixel_size;
  omd->projection->perY = -pixel_size;
  strcpy (omd->projection->units, "meters");

  // Set the spheroid axes lengths as appropriate for the output datum.
  spheroid_axes_lengths (datum_spheroid (datum), &(omd->projection->re_major),
			 &(omd->projection->re_minor));

  // What the heck, might as well set the ones in the general block as
  // well.
  spheroid_axes_lengths (datum_spheroid (datum), &(omd->general->re_major),
			 &(omd->general->re_minor));

  // Latitude and longitude at center of the output image.  We will
  // set these relative to the spheroid underlying the datum in use
  // for the projected image.  Yeah, that seems appropriate.
  double lat_0, lon_0;
  double center_x = omd->projection->startX + (omd->projection->perX
					       * omd->general->line_count / 2);
  double center_y = (omd->projection->startY
		     + (omd->projection->perY
			* omd->general->sample_count / 2));
  unproject_output (pp, center_x, center_y, ASF_PROJ_NO_HEIGHT, &lat_0, &lon_0,
		    NULL, datum);
  omd->general->center_latitude = R2D * lat_0;
  omd->general->center_longitude = R2D * lon_0;

  // FIXME: We are ignoring the meta_location fields for now since I'm
  // not sure whether they are supposed to refer to the corner pixels
  // or the corners of the data itself.

  if ( lat_0 > 0.0 ) {
    omd->projection->hem = 'N';
  }
  else {
    omd->projection->hem = 'S';
  }

  // Convert the projection parameter values back into degrees.
  to_degrees (projection_type, pp);
  omd->projection->param = *pp;
  meta_write (omd, output_meta_file);

  float_image_free (oim);
  FREE(output_data_file);
  meta_free (omd);
  FREE(input_meta_file);
  FREE(output_meta_file);

  return 0;
}