示例#1
0
int
main (int argc, char **argv)
{
  int                 rank;
  int                 num_procs;
  int                 mpiret;
  sc_MPI_Comm         mpicomm;
  p4est_t            *p4est, *copy;
  p4est_connectivity_t *connectivity;
  int                 i;
  p4est_topidx_t      t;
  size_t              qz;
  p4est_locidx_t      num_quadrants_on_last;
  p4est_locidx_t     *num_quadrants_in_proc;
  p4est_gloidx_t     *pertree1, *pertree2;
  p4est_quadrant_t   *quad;
  p4est_tree_t       *tree;
  user_data_t        *user_data;
  int64_t             sum;
  unsigned            crc;

  mpiret = sc_MPI_Init (&argc, &argv);
  SC_CHECK_MPI (mpiret);
  mpicomm = sc_MPI_COMM_WORLD;
  mpiret = sc_MPI_Comm_rank (mpicomm, &rank);
  SC_CHECK_MPI (mpiret);

  sc_init (mpicomm, 1, 1, NULL, SC_LP_DEFAULT);

  /* create connectivity and forest structures */
#ifdef P4_TO_P8
  connectivity = p8est_connectivity_new_twocubes ();
#else
  connectivity = p4est_connectivity_new_corner ();
#endif
  p4est = p4est_new_ext (mpicomm, connectivity, 15, 0, 0,
                         sizeof (user_data_t), init_fn, NULL);

  pertree1 = P4EST_ALLOC (p4est_gloidx_t, p4est->connectivity->num_trees + 1);
  pertree2 = P4EST_ALLOC (p4est_gloidx_t, p4est->connectivity->num_trees + 1);
  num_procs = p4est->mpisize;
  num_quadrants_in_proc = P4EST_ALLOC (p4est_locidx_t, num_procs);

  /* refine and balance to make the number of elements interesting */
  test_pertree (p4est, NULL, pertree1);
  p4est_refine (p4est, 1, refine_fn, init_fn);
  test_pertree (p4est, NULL, pertree1);

  /* Set an arbitrary partition.
   *
   * Since this is just a test we assume the global number of
   * quadrants will fit in an int32_t
   */
  num_quadrants_on_last = (p4est_locidx_t) p4est->global_num_quadrants;
  for (i = 0; i < num_procs - 1; ++i) {
    num_quadrants_in_proc[i] = (p4est_locidx_t) i + 1;  /* type ok */
    num_quadrants_on_last -= (p4est_locidx_t) i + 1;    /* type ok */
  }
  num_quadrants_in_proc[num_procs - 1] = num_quadrants_on_last;
  SC_CHECK_ABORT (num_quadrants_on_last > 0,
                  "Negative number of quadrants on the last processor");

  /* Save a checksum of the original forest */
  crc = p4est_checksum (p4est);

  /* partition the forest */
  (void) p4est_partition_given (p4est, num_quadrants_in_proc);
  test_pertree (p4est, pertree1, pertree2);

  /* Double check that we didn't loose any quads */
  SC_CHECK_ABORT (crc == p4est_checksum (p4est),
                  "bad checksum, missing a quad");

  /* count the actual number of quadrants per proc */
  SC_CHECK_ABORT (num_quadrants_in_proc[rank]
                  == p4est->local_num_quadrants,
                  "partition failed, wrong number of quadrants");

  /* check user data content */
  for (t = p4est->first_local_tree; t <= p4est->last_local_tree; ++t) {
    tree = p4est_tree_array_index (p4est->trees, t);
    for (qz = 0; qz < tree->quadrants.elem_count; ++qz) {
      quad = p4est_quadrant_array_index (&tree->quadrants, qz);
      user_data = (user_data_t *) quad->p.user_data;
      sum = quad->x + quad->y + quad->level;

      SC_CHECK_ABORT (user_data->a == t, "bad user_data, a");
      SC_CHECK_ABORT (user_data->sum == sum, "bad user_data, sum");
    }
  }

  /* do a weighted partition with uniform weights */
  p4est_partition (p4est, 0, weight_one);
  test_pertree (p4est, pertree1, pertree2);
  SC_CHECK_ABORT (crc == p4est_checksum (p4est),
                  "bad checksum after uniformly weighted partition");

  /* copy the p4est */
  copy = p4est_copy (p4est, 1);
  SC_CHECK_ABORT (crc == p4est_checksum (copy), "bad checksum after copy");

  /* do a weighted partition with many zero weights */
  weight_counter = 0;
  weight_index = (rank == 1) ? 1342 : 0;
  p4est_partition (copy, 0, weight_once);
  test_pertree (copy, pertree1, pertree2);
  SC_CHECK_ABORT (crc == p4est_checksum (copy),
                  "bad checksum after unevenly weighted partition 1");

  /* do a weighted partition with many zero weights */
  weight_counter = 0;
  weight_index = 0;
  p4est_partition (copy, 0, weight_once);
  test_pertree (copy, pertree1, pertree2);
  SC_CHECK_ABORT (crc == p4est_checksum (copy),
                  "bad checksum after unevenly weighted partition 2");

  /* do a weighted partition with many zero weights
   *
   * Since this is just a test we assume the local number of
   * quadrants will fit in an int
   */
  weight_counter = 0;
  weight_index =
    (rank == num_procs - 1) ? ((int) copy->local_num_quadrants - 1) : 0;
  p4est_partition (copy, 0, weight_once);
  test_pertree (copy, pertree1, pertree2);
  SC_CHECK_ABORT (crc == p4est_checksum (copy),
                  "bad checksum after unevenly weighted partition 3");

  /* check user data content */
  for (t = copy->first_local_tree; t <= copy->last_local_tree; ++t) {
    tree = p4est_tree_array_index (copy->trees, t);
    for (qz = 0; qz < tree->quadrants.elem_count; ++qz) {
      quad = p4est_quadrant_array_index (&tree->quadrants, qz);
      user_data = (user_data_t *) quad->p.user_data;
      sum = quad->x + quad->y + quad->level;

      SC_CHECK_ABORT (user_data->a == t, "bad user_data, a");
      SC_CHECK_ABORT (user_data->sum == sum, "bad user_data, sum");
    }
  }

  /* Add another test.  Overwrites pertree1, pertree2 */
  test_partition_circle (mpicomm, connectivity, pertree1, pertree2);

  /* clean up and exit */
  P4EST_FREE (pertree1);
  P4EST_FREE (pertree2);
  P4EST_FREE (num_quadrants_in_proc);
  p4est_destroy (p4est);
  p4est_destroy (copy);
  p4est_connectivity_destroy (connectivity);
  sc_finalize ();

  mpiret = sc_MPI_Finalize ();
  SC_CHECK_MPI (mpiret);

  return 0;
}
示例#2
0
/**
 * Runs all tests.
 */
int
main (int argc, char **argv)
{
  const char         *this_fn_name = P4EST_STRING "_test_subcomm";
  /* options */
  const p4est_locidx_t min_quadrants = 15;
  const int           min_level = 4;
  const int           fill_uniform = 0;
  /* parallel environment */
  sc_MPI_Comm         mpicomm = sc_MPI_COMM_WORLD;
  int                 mpisize, submpisize;
  int                 mpiret;
#ifdef P4EST_ENABLE_DEBUG
  int                 rank;
#endif
  /* p4est */
  p4est_connectivity_t *connectivity;
  p4est_t            *p4est;
  p4est_locidx_t     *partition;

  /* initialize MPI */
  mpiret = sc_MPI_Init (&argc, &argv);
  SC_CHECK_MPI (mpiret);

  /* exit if MPI communicator cannot be reduced */
  mpiret = sc_MPI_Comm_size (mpicomm, &mpisize);
  SC_CHECK_MPI (mpiret);
  if (mpisize == 1) {
    mpiret = sc_MPI_Finalize ();
    SC_CHECK_MPI (mpiret);
    return 0;
  }

  /* initialize p4est */
  sc_init (mpicomm, 1, 1, NULL, SC_LP_DEFAULT);
  p4est_init (NULL, SC_LP_DEFAULT);

  /* create connectivity */
#ifdef P4_TO_P8
  connectivity = p8est_connectivity_new_unitcube ();
#else
  connectivity = p4est_connectivity_new_unitsquare ();
#endif

  /* create p4est object */
  p4est = p4est_new_ext (mpicomm, connectivity,
                         min_quadrants, min_level, fill_uniform,
                         0, NULL, NULL);

  /* write vtk: new */
  p4est_vtk_write_file (p4est, NULL, P4EST_STRING "_subcomm_new");

  /* set variables pertaining to the parallel environment */
#ifdef P4EST_ENABLE_DEBUG
  rank = p4est->mpirank;
#endif
  submpisize = mpisize / 2;
  P4EST_ASSERT (submpisize <= p4est->global_num_quadrants);

  /* construct partitioning with empty ranks */
  {
    p4est_locidx_t      n_quads_per_proc, n_quads_leftover;
    int                 p;

    partition = P4EST_ALLOC (p4est_locidx_t, mpisize);
    n_quads_per_proc = p4est->global_num_quadrants / submpisize;
    n_quads_leftover = p4est->global_num_quadrants -
      (n_quads_per_proc * submpisize);
    for (p = 0; p < mpisize; p++) {
      if (p % 2) {              /* if this rank will get quadrants */
        partition[p] = n_quads_per_proc;
      }
      else {                    /* if this rank will be empty */
        partition[p] = 0;
      }
    }
    partition[1] += n_quads_leftover;

    /* check partitioning */
#ifdef P4EST_ENABLE_DEBUG
    {
      p4est_gloidx_t      sum = 0;

      for (p = 0; p < mpisize; p++) {
        sum += (p4est_gloidx_t) partition[p];
      }
      P4EST_ASSERT (sum == p4est->global_num_quadrants);
    }
#endif
  }

  /*
   * Test 1: Reduce MPI communicator to non-empty ranks
   */

  P4EST_GLOBAL_INFOF ("%s: Into test 1\n", this_fn_name);
  {
    p4est_t            *p4est_subcomm;
    int                 is_nonempty;

    /* create p4est copy and re-partition */
    p4est_subcomm = p4est_copy_ext (p4est, 1, 1);
    (void) p4est_partition_given (p4est_subcomm, partition);

    /* write vtk: partitioned */
    p4est_vtk_write_file (p4est_subcomm, NULL, P4EST_STRING "_subcomm_part");

    /* reduce MPI communicator to non-empty ranks */
    is_nonempty = p4est_comm_parallel_env_reduce (&p4est_subcomm);
    P4EST_ASSERT ((is_nonempty && 0 < partition[rank]) ||
                  (!is_nonempty && 0 == partition[rank]));

    if (is_nonempty) {
      /* write vtk: reduced communicator */
      p4est_vtk_write_file (p4est_subcomm, NULL,
                            P4EST_STRING "_subcomm_sub1");

      /* destroy the p4est that has a reduced MPI communicator */
      p4est_destroy (p4est_subcomm);
    }
  }
  mpiret = sc_MPI_Barrier (mpicomm);
  SC_CHECK_MPI (mpiret);
  P4EST_GLOBAL_INFOF ("%s: Done test 1\n", this_fn_name);

  /*
   * Test 2: Reduce MPI communicator to non-empty ranks, but now the MPI
   * communicator is not owned
   */

  P4EST_GLOBAL_INFOF ("%s: Into test 2\n", this_fn_name);
  {
    p4est_t            *p4est_subcomm;
    int                 is_nonempty;

    /* create p4est copy and re-partition */
    p4est_subcomm = p4est_copy_ext (p4est, 1, 0 /* don't dup. comm. */ );
    (void) p4est_partition_given (p4est_subcomm, partition);

    /* reduce MPI communicator to non-empty ranks */
    is_nonempty = p4est_comm_parallel_env_reduce (&p4est_subcomm);
    P4EST_ASSERT ((is_nonempty && 0 < partition[rank]) ||
                  (!is_nonempty && 0 == partition[rank]));

    if (is_nonempty) {
      /* destroy the p4est that has a reduced MPI communicator */
      p4est_destroy (p4est_subcomm);
    }
  }
  mpiret = sc_MPI_Barrier (mpicomm);
  SC_CHECK_MPI (mpiret);
  P4EST_GLOBAL_INFOF ("%s: Done test 2\n", this_fn_name);

  /*
   * Test 3: Reduce MPI communicator to non-empty ranks, but keep rank 0
   */

  P4EST_GLOBAL_INFOF ("%s: Into test 3\n", this_fn_name);
  {
    p4est_t            *p4est_subcomm;
    int                 sub_exists;
    sc_MPI_Group        group, group_reserve;
    int                 reserve_range[1][3];

    /* create group of full MPI communicator */
    mpiret = sc_MPI_Comm_group (mpicomm, &group);
    SC_CHECK_MPI (mpiret);

    /* create sub-group containing only rank 0 */
    reserve_range[0][0] = 0;
    reserve_range[0][1] = 0;
    reserve_range[0][2] = 1;
    mpiret =
      sc_MPI_Group_range_incl (group, 1, reserve_range, &group_reserve);
    SC_CHECK_MPI (mpiret);

    /* create p4est copy and re-partition */
    p4est_subcomm = p4est_copy_ext (p4est, 1, 1);
    (void) p4est_partition_given (p4est_subcomm, partition);

    /* reduce MPI communicator to non-empty ranks, but keep rank 0 */
    sub_exists = p4est_comm_parallel_env_reduce_ext (&p4est_subcomm,
                                                     group_reserve, 1, NULL);
    P4EST_ASSERT ((sub_exists && (0 < partition[rank] || rank == 0)) ||
                  (!sub_exists && 0 == partition[rank]));

    if (sub_exists) {
      /* write vtk: reduced communicator */
      p4est_vtk_write_file (p4est_subcomm, NULL,
                            P4EST_STRING "_subcomm_sub3");

      /* destroy the p4est that has a reduced MPI communicator */
      p4est_destroy (p4est_subcomm);
    }
  }
  mpiret = sc_MPI_Barrier (mpicomm);
  SC_CHECK_MPI (mpiret);
  P4EST_GLOBAL_INFOF ("%s: Done test 3\n", this_fn_name);

  /*
   * Test 4: Reduce MPI communicator to non-empty ranks, but keep last 2 ranks
   */

  P4EST_GLOBAL_INFOF ("%s: Into test 4\n", this_fn_name);
  {
    p4est_t            *p4est_subcomm;
    int                 sub_exists;
    sc_MPI_Group        group, group_reserve;
    int                 reserve_range[1][3];

    /* create group of full MPI communicator */
    mpiret = sc_MPI_Comm_group (mpicomm, &group);
    SC_CHECK_MPI (mpiret);

    /* create sub-group containing only last 2 ranks */
    reserve_range[0][0] = SC_MAX (0, mpisize - 2);
    reserve_range[0][1] = mpisize - 1;
    reserve_range[0][2] = 1;
    mpiret =
      sc_MPI_Group_range_incl (group, 1, reserve_range, &group_reserve);
    SC_CHECK_MPI (mpiret);

    /* create p4est copy and re-partition */
    p4est_subcomm = p4est_copy_ext (p4est, 1, 1);
    (void) p4est_partition_given (p4est_subcomm, partition);

    /* reduce MPI communicator to non-empty ranks, but keep last 2 ranks */
    sub_exists = p4est_comm_parallel_env_reduce_ext (&p4est_subcomm,
                                                     group_reserve, 0, NULL);
    P4EST_ASSERT ((sub_exists && (0 < partition[rank] || mpisize - 2 <= rank))
                  || (!sub_exists && 0 == partition[rank]));

    if (sub_exists) {
      /* write vtk: reduced communicator */
      p4est_vtk_write_file (p4est_subcomm, NULL,
                            P4EST_STRING "_subcomm_sub4");

      /* destroy the p4est that has a reduced MPI communicator */
      p4est_destroy (p4est_subcomm);
    }
  }
  mpiret = sc_MPI_Barrier (mpicomm);
  SC_CHECK_MPI (mpiret);
  P4EST_GLOBAL_INFOF ("%s: Done test 4\n", this_fn_name);

  /* destroy */
  P4EST_FREE (partition);
  p4est_destroy (p4est);
  p4est_connectivity_destroy (connectivity);

  /* finalize */
  sc_finalize ();
  mpiret = sc_MPI_Finalize ();
  SC_CHECK_MPI (mpiret);

  return 0;
}
示例#3
0
static void
test_partition_circle (sc_MPI_Comm mpicomm,
                       p4est_connectivity_t * connectivity,
                       p4est_gloidx_t * pertree1, p4est_gloidx_t * pertree2)
{
  int                 i, j;
  int                 num_procs;
  int                 empty_proc1, empty_proc2;
  unsigned            crc1, crc2;
  p4est_gloidx_t      global_num;
  p4est_locidx_t     *new_counts;
  p4est_t            *p4est, *copy;

  /* Create a forest and make a copy */

  circle_count = 0;
  p4est = p4est_new_ext (mpicomm, connectivity, 0, 3, 1,
                         sizeof (int), circle_init, NULL);
  num_procs = p4est->mpisize;
  test_pertree (p4est, NULL, pertree1);

  global_num = p4est->global_num_quadrants;
  crc1 = p4est_checksum (p4est);
  copy = p4est_copy (p4est, 1);
  P4EST_ASSERT (p4est_checksum (copy) == crc1);

  new_counts = P4EST_ALLOC (p4est_locidx_t, num_procs);

  /* Partition with one empty processor */
  if (num_procs > 1) {
    P4EST_GLOBAL_INFO ("First circle partition\n");
    empty_proc1 = num_procs / 3;
    j = 0;
    for (i = 0; i < num_procs; ++i) {
      if (i == empty_proc1) {
        new_counts[i] = 0;
      }
      else {
        new_counts[i] =
          p4est_partition_cut_gloidx (global_num, j + 1, num_procs - 1) -
          p4est_partition_cut_gloidx (global_num, j, num_procs - 1);
        P4EST_ASSERT (new_counts[i] >= 0);
        ++j;
      }
    }
    P4EST_ASSERT (j == num_procs - 1);
    p4est_partition_given (p4est, new_counts);
    test_pertree (p4est, pertree1, pertree2);
    crc2 = p4est_checksum (p4est);
    SC_CHECK_ABORT (crc1 == crc2, "First checksum mismatch");
  }

  /* Partition with two empty processors */
  if (num_procs > 2) {
    P4EST_GLOBAL_INFO ("Second circle partition\n");
    empty_proc1 = (2 * num_procs) / 3 - 2;
    empty_proc2 = (2 * num_procs) / 3;
    j = 0;
    for (i = 0; i < num_procs; ++i) {
      if (i == empty_proc1 || i == empty_proc2) {
        new_counts[i] = 0;
      }
      else {
        new_counts[i] =
          p4est_partition_cut_gloidx (global_num, j + 1, num_procs - 2) -
          p4est_partition_cut_gloidx (global_num, j, num_procs - 2);
        P4EST_ASSERT (new_counts[i] >= 0);
        ++j;
      }
    }
    P4EST_ASSERT (j == num_procs - 2);
    p4est_partition_given (p4est, new_counts);
    test_pertree (p4est, pertree1, pertree2);
    crc2 = p4est_checksum (p4est);
    SC_CHECK_ABORT (crc1 == crc2, "Second checksum mismatch");
  }

  /* Uniform partition */
  P4EST_GLOBAL_INFO ("Third circle partition\n");
  p4est_partition (p4est, 0, NULL);
  test_pertree (p4est, pertree1, pertree2);
  crc2 = p4est_checksum (p4est);
  SC_CHECK_ABORT (crc1 == crc2, "Third checksum mismatch");
  SC_CHECK_ABORT (p4est_is_equal (p4est, copy, 1), "Forest mismatch");

  P4EST_FREE (new_counts);
  p4est_destroy (copy);
  p4est_destroy (p4est);
}