Пример #1
0
static void
cgraph_build_cdtor_fns (void)
{
  if (!VEC_empty (tree, static_ctors))
    {
      gcc_assert (!targetm.have_ctors_dtors);
      qsort (VEC_address (tree, static_ctors),
	     VEC_length (tree, static_ctors), 
	     sizeof (tree),
	     compare_ctor);
      build_cdtor (/*ctor_p=*/true,
		   VEC_address (tree, static_ctors),
		   VEC_length (tree, static_ctors)); 
      VEC_truncate (tree, static_ctors, 0);
    }

  if (!VEC_empty (tree, static_dtors))
    {
      gcc_assert (!targetm.have_ctors_dtors);
      qsort (VEC_address (tree, static_dtors),
	     VEC_length (tree, static_dtors), 
	     sizeof (tree),
	     compare_dtor);
      build_cdtor (/*ctor_p=*/false,
		   VEC_address (tree, static_dtors),
		   VEC_length (tree, static_dtors)); 
      VEC_truncate (tree, static_dtors, 0);
    }
}
static bool
reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
{
  VEC (edge, heap) *stack = NULL;
  edge e;
  edge_iterator ei;
  sbitmap visited;
  bool ret;

  if (va_arg_bb == va_start_bb)
    return true;

  if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
    return false;

  visited = sbitmap_alloc (last_basic_block);
  sbitmap_zero (visited);
  ret = true;

  FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
    VEC_safe_push (edge, heap, stack, e);

  while (! VEC_empty (edge, stack))
    {
      basic_block src;

      e = VEC_pop (edge, stack);
      src = e->src;

      if (e->flags & EDGE_COMPLEX)
	{
	  ret = false;
	  break;
	}

      if (src == va_start_bb)
	continue;

      /* va_arg_bb can be executed more times than va_start_bb.  */
      if (src == va_arg_bb)
	{
	  ret = false;
	  break;
	}

      gcc_assert (src != ENTRY_BLOCK_PTR);

      if (! TEST_BIT (visited, src->index))
	{
	  SET_BIT (visited, src->index);
	  FOR_EACH_EDGE (e, ei, src->preds)
	    VEC_safe_push (edge, heap, stack, e);
	}
    }

  VEC_free (edge, heap, stack);
  sbitmap_free (visited);
  return ret;
}
Пример #3
0
int
btrace_data_empty (struct btrace_data *data)
{
  switch (data->format)
    {
    case BTRACE_FORMAT_NONE:
      return 1;

    case BTRACE_FORMAT_BTS:
      return VEC_empty (btrace_block_s, data->variant.bts.blocks);
    }

  internal_error (__FILE__, __LINE__, _("Unkown branch trace format."));
}
Пример #4
0
tree
make_ssa_name_fn (struct function *fn, tree var, gimple stmt)
{
  tree t;
  use_operand_p imm;

  gcc_assert (DECL_P (var));

  /* If our free list has an element, then use it.  */
  if (!VEC_empty (tree, FREE_SSANAMES (fn)))
    {
      t = VEC_pop (tree, FREE_SSANAMES (fn));
#ifdef GATHER_STATISTICS
      ssa_name_nodes_reused++;
#endif

      /* The node was cleared out when we put it on the free list, so
	 there is no need to do so again here.  */
      gcc_assert (ssa_name (SSA_NAME_VERSION (t)) == NULL);
      VEC_replace (tree, SSANAMES (fn), SSA_NAME_VERSION (t), t);
    }
  else
    {
      t = make_node (SSA_NAME);
      SSA_NAME_VERSION (t) = VEC_length (tree, SSANAMES (fn));
      VEC_safe_push (tree, gc, SSANAMES (fn), t);
#ifdef GATHER_STATISTICS
      ssa_name_nodes_created++;
#endif
    }

  TREE_TYPE (t) = TREE_TYPE (var);
  SSA_NAME_VAR (t) = var;
  SSA_NAME_DEF_STMT (t) = stmt;
  SSA_NAME_PTR_INFO (t) = NULL;
  SSA_NAME_IN_FREE_LIST (t) = 0;
  SSA_NAME_IS_DEFAULT_DEF (t) = 0;
  imm = &(SSA_NAME_IMM_USE_NODE (t));
  imm->use = NULL;
  imm->prev = imm;
  imm->next = imm;
  imm->loc.ssa_name = t;

  return t;
}
Пример #5
0
static void
lto_symtab_merge_decls_2 (void **slot, bool diagnosed_p)
{
  lto_symtab_entry_t prevailing, e;
  VEC(tree, heap) *mismatches = NULL;
  unsigned i;
  tree decl;

  /* Nothing to do for a single entry.  */
  prevailing = (lto_symtab_entry_t) *slot;
  if (!prevailing->next)
    return;

  /* Try to merge each entry with the prevailing one.  */
  for (e = prevailing->next; e; e = e->next)
    {
      if (!lto_symtab_merge (prevailing, e)
	  && !diagnosed_p)
	VEC_safe_push (tree, heap, mismatches, e->decl);
    }
  if (VEC_empty (tree, mismatches))
    return;

  /* Diagnose all mismatched re-declarations.  */
  FOR_EACH_VEC_ELT (tree, mismatches, i, decl)
    {
      if (!types_compatible_p (TREE_TYPE (prevailing->decl), TREE_TYPE (decl)))
	diagnosed_p |= warning_at (DECL_SOURCE_LOCATION (decl), 0,
				   "type of %qD does not match original "
				   "declaration", decl);

      else if ((DECL_USER_ALIGN (prevailing->decl) && DECL_USER_ALIGN (decl))
	       && DECL_ALIGN (prevailing->decl) < DECL_ALIGN (decl))
	{
	  diagnosed_p |= warning_at (DECL_SOURCE_LOCATION (decl), 0,
				     "alignment of %qD is bigger than "
				     "original declaration", decl);
	}
    }
  if (diagnosed_p)
    inform (DECL_SOURCE_LOCATION (prevailing->decl),
	    "previously declared here");

  VEC_free (tree, heap, mismatches);
}
Пример #6
0
static struct btrace_function *
ftrace_new_gap (struct btrace_function *prev, int errcode)
{
  struct btrace_function *bfun;

  /* We hijack prev if it was empty.  */
  if (prev != NULL && prev->errcode == 0
      && VEC_empty (btrace_insn_s, prev->insn))
    bfun = prev;
  else
    bfun = ftrace_new_function (prev, NULL, NULL);

  bfun->errcode = errcode;

  ftrace_debug (bfun, "new gap");

  return bfun;
}
Пример #7
0
static struct btrace_function *
ftrace_find_call (struct gdbarch *gdbarch, struct btrace_function *bfun)
{
  for (; bfun != NULL; bfun = bfun->up)
    {
      struct btrace_insn *last;
      CORE_ADDR pc;

      /* We do not allow empty function segments.  */
      gdb_assert (!VEC_empty (btrace_insn_s, bfun->insn));

      last = VEC_last (btrace_insn_s, bfun->insn);
      pc = last->pc;

      if (gdbarch_insn_is_call (gdbarch, pc))
	break;
    }

  return bfun;
}
Пример #8
0
static inline gimple
allocate_phi_node (size_t len)
{
  gimple phi;
  size_t bucket = NUM_BUCKETS - 2;
  size_t size = sizeof (struct gimple_statement_phi)
	        + (len - 1) * sizeof (struct phi_arg_d);

  if (free_phinode_count)
    for (bucket = len - 2; bucket < NUM_BUCKETS - 2; bucket++)
      if (free_phinodes[bucket])
	break;

  /* If our free list has an element, then use it.  */
  if (bucket < NUM_BUCKETS - 2
      && gimple_phi_capacity (VEC_index (gimple, free_phinodes[bucket], 0))
	 >= len)
    {
      free_phinode_count--;
      phi = VEC_pop (gimple, free_phinodes[bucket]);
      if (VEC_empty (gimple, free_phinodes[bucket]))
	VEC_free (gimple, gc, free_phinodes[bucket]);
#ifdef GATHER_STATISTICS
      phi_nodes_reused++;
#endif
    }
  else
    {
      phi = (gimple) ggc_alloc (size);
#ifdef GATHER_STATISTICS
      phi_nodes_created++;
	{
	  enum gimple_alloc_kind kind = gimple_alloc_kind (GIMPLE_PHI);
          gimple_alloc_counts[(int) kind]++;
          gimple_alloc_sizes[(int) kind] += size;
	}
#endif
    }

  return phi;
}
Пример #9
0
static struct btrace_thread_info *
require_btrace (void)
{
  struct thread_info *tp;
  struct btrace_thread_info *btinfo;

  DEBUG ("require");

  tp = find_thread_ptid (inferior_ptid);
  if (tp == NULL)
    error (_("No thread."));

  btrace_fetch (tp);

  btinfo = &tp->btrace;

  if (VEC_empty (btrace_inst_s, btinfo->itrace))
    error (_("No trace."));

  return btinfo;
}
Пример #10
0
static void
create_common (gfc_common_head *com, segment_info *head, bool saw_equiv)
{
  segment_info *s, *next_s;
  tree union_type;
  tree *field_link;
  tree field;
  tree field_init = NULL_TREE;
  record_layout_info rli;
  tree decl;
  bool is_init = false;
  bool is_saved = false;

  /* Declare the variables inside the common block.
     If the current common block contains any equivalence object, then
     make a UNION_TYPE node, otherwise RECORD_TYPE. This will let the
     alias analyzer work well when there is no address overlapping for
     common variables in the current common block.  */
  if (saw_equiv)
    union_type = make_node (UNION_TYPE);
  else
    union_type = make_node (RECORD_TYPE);

  rli = start_record_layout (union_type);
  field_link = &TYPE_FIELDS (union_type);

  /* Check for overlapping initializers and replace them with a single,
     artificial field that contains all the data.  */
  if (saw_equiv)
    field = get_init_field (head, union_type, &field_init, rli);
  else
    field = NULL_TREE;

  if (field != NULL_TREE)
    {
      is_init = true;
      *field_link = field;
      field_link = &DECL_CHAIN (field);
    }

  for (s = head; s; s = s->next)
    {
      build_field (s, union_type, rli);

      /* Link the field into the type.  */
      *field_link = s->field;
      field_link = &DECL_CHAIN (s->field);

      /* Has initial value.  */
      if (s->sym->value)
        is_init = true;

      /* Has SAVE attribute.  */
      if (s->sym->attr.save)
        is_saved = true;
    }

  finish_record_layout (rli, true);

  if (com)
    decl = build_common_decl (com, union_type, is_init);
  else
    decl = build_equiv_decl (union_type, is_init, is_saved);

  if (is_init)
    {
      tree ctor, tmp;
      VEC(constructor_elt,gc) *v = NULL;

      if (field != NULL_TREE && field_init != NULL_TREE)
	CONSTRUCTOR_APPEND_ELT (v, field, field_init);
      else
	for (s = head; s; s = s->next)
	  {
	    if (s->sym->value)
	      {
		/* Add the initializer for this field.  */
		tmp = gfc_conv_initializer (s->sym->value, &s->sym->ts,
					    TREE_TYPE (s->field),
					    s->sym->attr.dimension,
					    s->sym->attr.pointer
					    || s->sym->attr.allocatable, false);

		CONSTRUCTOR_APPEND_ELT (v, s->field, tmp);
	      }
	  }

      gcc_assert (!VEC_empty (constructor_elt, v));
      ctor = build_constructor (union_type, v);
      TREE_CONSTANT (ctor) = 1;
      TREE_STATIC (ctor) = 1;
      DECL_INITIAL (decl) = ctor;

#ifdef ENABLE_CHECKING
      {
	tree field, value;
	unsigned HOST_WIDE_INT idx;
	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, field, value)
	  gcc_assert (TREE_CODE (field) == FIELD_DECL);
      }
#endif
    }

  /* Build component reference for each variable.  */
  for (s = head; s; s = next_s)
    {
      tree var_decl;

      var_decl = build_decl (s->sym->declared_at.lb->location,
			     VAR_DECL, DECL_NAME (s->field),
			     TREE_TYPE (s->field));
      TREE_STATIC (var_decl) = TREE_STATIC (decl);
      /* Mark the variable as used in order to avoid warnings about
	 unused variables.  */
      TREE_USED (var_decl) = 1;
      if (s->sym->attr.use_assoc)
	DECL_IGNORED_P (var_decl) = 1;
      if (s->sym->attr.target)
	TREE_ADDRESSABLE (var_decl) = 1;
      /* Fake variables are not visible from other translation units. */
      TREE_PUBLIC (var_decl) = 0;

      /* To preserve identifier names in COMMON, chain to procedure
         scope unless at top level in a module definition.  */
      if (com
          && s->sym->ns->proc_name
          && s->sym->ns->proc_name->attr.flavor == FL_MODULE)
	var_decl = pushdecl_top_level (var_decl);
      else
	gfc_add_decl_to_function (var_decl);

      SET_DECL_VALUE_EXPR (var_decl,
			   fold_build3_loc (input_location, COMPONENT_REF,
					    TREE_TYPE (s->field),
					    decl, s->field, NULL_TREE));
      DECL_HAS_VALUE_EXPR_P (var_decl) = 1;
      GFC_DECL_COMMON_OR_EQUIV (var_decl) = 1;

      if (s->sym->attr.assign)
	{
	  gfc_allocate_lang_decl (var_decl);
	  GFC_DECL_ASSIGN (var_decl) = 1;
	  GFC_DECL_STRING_LEN (var_decl) = GFC_DECL_STRING_LEN (s->field);
	  GFC_DECL_ASSIGN_ADDR (var_decl) = GFC_DECL_ASSIGN_ADDR (s->field);
	}

      s->sym->backend_decl = var_decl;

      next_s = s->next;
      free (s);
    }
}
Пример #11
0
static int
btrace_stitch_bts (struct btrace_data_bts *btrace, struct thread_info *tp)
{
  struct btrace_thread_info *btinfo;
  struct btrace_function *last_bfun;
  struct btrace_insn *last_insn;
  btrace_block_s *first_new_block;

  btinfo = &tp->btrace;
  last_bfun = btinfo->end;
  gdb_assert (last_bfun != NULL);
  gdb_assert (!VEC_empty (btrace_block_s, btrace->blocks));

  /* If the existing trace ends with a gap, we just glue the traces
     together.  We need to drop the last (i.e. chronologically first) block
     of the new trace,  though, since we can't fill in the start address.*/
  if (VEC_empty (btrace_insn_s, last_bfun->insn))
    {
      VEC_pop (btrace_block_s, btrace->blocks);
      return 0;
    }

  /* Beware that block trace starts with the most recent block, so the
     chronologically first block in the new trace is the last block in
     the new trace's block vector.  */
  first_new_block = VEC_last (btrace_block_s, btrace->blocks);
  last_insn = VEC_last (btrace_insn_s, last_bfun->insn);

  /* If the current PC at the end of the block is the same as in our current
     trace, there are two explanations:
       1. we executed the instruction and some branch brought us back.
       2. we have not made any progress.
     In the first case, the delta trace vector should contain at least two
     entries.
     In the second case, the delta trace vector should contain exactly one
     entry for the partial block containing the current PC.  Remove it.  */
  if (first_new_block->end == last_insn->pc
      && VEC_length (btrace_block_s, btrace->blocks) == 1)
    {
      VEC_pop (btrace_block_s, btrace->blocks);
      return 0;
    }

  DEBUG ("stitching %s to %s", ftrace_print_insn_addr (last_insn),
	 core_addr_to_string_nz (first_new_block->end));

  /* Do a simple sanity check to make sure we don't accidentally end up
     with a bad block.  This should not occur in practice.  */
  if (first_new_block->end < last_insn->pc)
    {
      warning (_("Error while trying to read delta trace.  Falling back to "
		 "a full read."));
      return -1;
    }

  /* We adjust the last block to start at the end of our current trace.  */
  gdb_assert (first_new_block->begin == 0);
  first_new_block->begin = last_insn->pc;

  /* We simply pop the last insn so we can insert it again as part of
     the normal branch trace computation.
     Since instruction iterators are based on indices in the instructions
     vector, we don't leave any pointers dangling.  */
  DEBUG ("pruning insn at %s for stitching",
	 ftrace_print_insn_addr (last_insn));

  VEC_pop (btrace_insn_s, last_bfun->insn);

  /* The instructions vector may become empty temporarily if this has
     been the only instruction in this function segment.
     This violates the invariant but will be remedied shortly by
     btrace_compute_ftrace when we add the new trace.  */

  /* The only case where this would hurt is if the entire trace consisted
     of just that one instruction.  If we remove it, we might turn the now
     empty btrace function segment into a gap.  But we don't want gaps at
     the beginning.  To avoid this, we remove the entire old trace.  */
  if (last_bfun == btinfo->begin && VEC_empty (btrace_insn_s, last_bfun->insn))
    btrace_clear (tp);

  return 0;
}
Пример #12
0
static struct btrace_function *
ftrace_update_function (struct btrace_function *bfun, CORE_ADDR pc)
{
  struct bound_minimal_symbol bmfun;
  struct minimal_symbol *mfun;
  struct symbol *fun;
  struct btrace_insn *last;

  /* Try to determine the function we're in.  We use both types of symbols
     to avoid surprises when we sometimes get a full symbol and sometimes
     only a minimal symbol.  */
  fun = find_pc_function (pc);
  bmfun = lookup_minimal_symbol_by_pc (pc);
  mfun = bmfun.minsym;

  if (fun == NULL && mfun == NULL)
    DEBUG_FTRACE ("no symbol at %s", core_addr_to_string_nz (pc));

  /* If we didn't have a function or if we had a gap before, we create one.  */
  if (bfun == NULL || bfun->errcode != 0)
    return ftrace_new_function (bfun, mfun, fun);

  /* Check the last instruction, if we have one.
     We do this check first, since it allows us to fill in the call stack
     links in addition to the normal flow links.  */
  last = NULL;
  if (!VEC_empty (btrace_insn_s, bfun->insn))
    last = VEC_last (btrace_insn_s, bfun->insn);

  if (last != NULL)
    {
      switch (last->iclass)
	{
	case BTRACE_INSN_RETURN:
	  {
	    const char *fname;

	    /* On some systems, _dl_runtime_resolve returns to the resolved
	       function instead of jumping to it.  From our perspective,
	       however, this is a tailcall.
	       If we treated it as return, we wouldn't be able to find the
	       resolved function in our stack back trace.  Hence, we would
	       lose the current stack back trace and start anew with an empty
	       back trace.  When the resolved function returns, we would then
	       create a stack back trace with the same function names but
	       different frame id's.  This will confuse stepping.  */
	    fname = ftrace_print_function_name (bfun);
	    if (strcmp (fname, "_dl_runtime_resolve") == 0)
	      return ftrace_new_tailcall (bfun, mfun, fun);

	    return ftrace_new_return (bfun, mfun, fun);
	  }

	case BTRACE_INSN_CALL:
	  /* Ignore calls to the next instruction.  They are used for PIC.  */
	  if (last->pc + last->size == pc)
	    break;

	  return ftrace_new_call (bfun, mfun, fun);

	case BTRACE_INSN_JUMP:
	  {
	    CORE_ADDR start;

	    start = get_pc_function_start (pc);

	    /* If we can't determine the function for PC, we treat a jump at
	       the end of the block as tail call.  */
	    if (start == 0 || start == pc)
	      return ftrace_new_tailcall (bfun, mfun, fun);
	  }
	}
    }

  /* Check if we're switching functions for some other reason.  */
  if (ftrace_function_switched (bfun, mfun, fun))
    {
      DEBUG_FTRACE ("switching from %s in %s at %s",
		    ftrace_print_insn_addr (last),
		    ftrace_print_function_name (bfun),
		    ftrace_print_filename (bfun));

      return ftrace_new_switch (bfun, mfun, fun);
    }

  return bfun;
}
Пример #13
0
/*
 * Look up the memory region cooresponding to ADDR.
 */
struct mem_region *
lookup_mem_region (CORE_ADDR addr)
{
  static struct mem_region region;
  struct mem_region *m;
  CORE_ADDR lo;
  CORE_ADDR hi;
  int ix;

  require_target_regions ();

  /* First we initialize LO and HI so that they describe the entire
     memory space.  As we process the memory region chain, they are
     redefined to describe the minimal region containing ADDR.  LO
     and HI are used in the case where no memory region is defined
     that contains ADDR.  If a memory region is disabled, it is
     treated as if it does not exist.  The initial values for LO
     and HI represent the bottom and top of memory.  */

  lo = 0;
  hi = 0;

  /* Either find memory range containing ADDRESS, or set LO and HI
     to the nearest boundaries of an existing memory range.
     
     If we ever want to support a huge list of memory regions, this
     check should be replaced with a binary search (probably using
     VEC_lower_bound).  */
  for (ix = 0; VEC_iterate (mem_region_s, mem_region_list, ix, m); ix++)
    {
      if (m->enabled_p == 1)
	{
	  /* If the address is in the memory region, return that
	     memory range.  */
	  if (addr >= m->lo && (addr < m->hi || m->hi == 0))
	    return m;

	  /* This (correctly) won't match if m->hi == 0, representing
	     the top of the address space, because CORE_ADDR is unsigned;
	     no value of LO is less than zero.  */
	  if (addr >= m->hi && lo < m->hi)
	    lo = m->hi;

	  /* This will never set HI to zero; if we're here and ADDR
	     is at or below M, and the region starts at zero, then ADDR
	     would have been in the region.  */
	  if (addr <= m->lo && (hi == 0 || hi > m->lo))
	    hi = m->lo;
	}
    }

  /* Because no region was found, we must cons up one based on what
     was learned above.  */
  region.lo = lo;
  region.hi = hi;

  /* When no memory map is defined at all, we always return 
     'default_mem_attrib', so that we do not make all memory 
     inaccessible for targets that don't provide a memory map.  */
  if (inaccessible_by_default && !VEC_empty (mem_region_s, mem_region_list))
    region.attrib = unknown_mem_attrib;
  else
    region.attrib = default_mem_attrib;

  return &region;
}
Пример #14
0
static struct btrace_function *
ftrace_update_function (struct gdbarch *gdbarch,
			struct btrace_function *bfun, CORE_ADDR pc)
{
  struct bound_minimal_symbol bmfun;
  struct minimal_symbol *mfun;
  struct symbol *fun;
  struct btrace_insn *last;

  /* Try to determine the function we're in.  We use both types of symbols
     to avoid surprises when we sometimes get a full symbol and sometimes
     only a minimal symbol.  */
  fun = find_pc_function (pc);
  bmfun = lookup_minimal_symbol_by_pc (pc);
  mfun = bmfun.minsym;

  if (fun == NULL && mfun == NULL)
    DEBUG_FTRACE ("no symbol at %s", core_addr_to_string_nz (pc));

  /* If we didn't have a function before, we create one.  */
  if (bfun == NULL)
    return ftrace_new_function (bfun, mfun, fun);

  /* Check the last instruction, if we have one.
     We do this check first, since it allows us to fill in the call stack
     links in addition to the normal flow links.  */
  last = NULL;
  if (!VEC_empty (btrace_insn_s, bfun->insn))
    last = VEC_last (btrace_insn_s, bfun->insn);

  if (last != NULL)
    {
      CORE_ADDR lpc;

      lpc = last->pc;

      /* Check for returns.  */
      if (gdbarch_insn_is_ret (gdbarch, lpc))
	return ftrace_new_return (gdbarch, bfun, mfun, fun);

      /* Check for calls.  */
      if (gdbarch_insn_is_call (gdbarch, lpc))
	{
	  int size;

	  size = gdb_insn_length (gdbarch, lpc);

	  /* Ignore calls to the next instruction.  They are used for PIC.  */
	  if (lpc + size != pc)
	    return ftrace_new_call (bfun, mfun, fun);
	}
    }

  /* Check if we're switching functions for some other reason.  */
  if (ftrace_function_switched (bfun, mfun, fun))
    {
      DEBUG_FTRACE ("switching from %s in %s at %s",
		    ftrace_print_insn_addr (last),
		    ftrace_print_function_name (bfun),
		    ftrace_print_filename (bfun));

      if (last != NULL)
	{
	  CORE_ADDR start, lpc;

	  start = get_pc_function_start (pc);

	  /* If we can't determine the function for PC, we treat a jump at
	     the end of the block as tail call.  */
	  if (start == 0)
	    start = pc;

	  lpc = last->pc;

	  /* Jumps indicate optimized tail calls.  */
	  if (start == pc && gdbarch_insn_is_jump (gdbarch, lpc))
	    return ftrace_new_tailcall (bfun, mfun, fun);
	}

      return ftrace_new_switch (bfun, mfun, fun);
    }

  return bfun;
}