Beispiel #1
0
static void
record_common_node (struct streamer_tree_cache_d *cache, tree node)
{
  /* If we recursively end up at nodes we do not want to preload simply don't.
     ???  We'd want to verify that this doesn't happen, or alternatively
     do not recurse at all.  */
  if (node == char_type_node)
    return;

  gcc_checking_assert (node != boolean_type_node
		       && node != boolean_true_node
		       && node != boolean_false_node);

  /* We have to make sure to fill exactly the same number of
     elements for all frontends.  That can include NULL trees.
     As our hash table can't deal with zero entries we'll simply stream
     a random other tree.  A NULL tree never will be looked up so it
     doesn't matter which tree we replace it with, just to be sure
     use error_mark_node.  */
  if (!node)
    node = error_mark_node;

  /* ???  FIXME, devise a better hash value.  But the hash needs to be equal
     for all frontend and lto1 invocations.  So just use the position
     in the cache as hash value.  */
  streamer_tree_cache_append (cache, node, cache->nodes.length ());

  if (POINTER_TYPE_P (node)
      || TREE_CODE (node) == COMPLEX_TYPE
      || TREE_CODE (node) == ARRAY_TYPE)
    record_common_node (cache, TREE_TYPE (node));
  else if (TREE_CODE (node) == RECORD_TYPE)
    {
      /* The FIELD_DECLs of structures should be shared, so that every
	 COMPONENT_REF uses the same tree node when referencing a field.
	 Pointer equality between FIELD_DECLs is used by the alias
	 machinery to compute overlapping component references (see
	 nonoverlapping_component_refs_p and
	 nonoverlapping_component_refs_of_decl_p).  */
      for (tree f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f))
	record_common_node (cache, f);
    }
}
Beispiel #2
0
void
init_exception_processing (void)
{
  tree tmp;

  /* void std::terminate (); */
  push_namespace (std_identifier);
  tmp = build_function_type_list (void_type_node, NULL_TREE);
  terminate_fn = build_cp_library_fn_ptr ("terminate", tmp,
					   ECF_NOTHROW | ECF_NORETURN
					   | ECF_COLD);
  gcc_checking_assert (TREE_THIS_VOLATILE (terminate_fn)
		       && TREE_NOTHROW (terminate_fn));
  pop_namespace ();

  /* void __cxa_call_unexpected(void *); */
  tmp = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
  call_unexpected_fn
    = push_throw_library_fn (get_identifier ("__cxa_call_unexpected"), tmp);
}
Beispiel #3
0
static void
insert_to_assembler_name_hash (symtab_node node)
{
  gcc_checking_assert (!node->symbol.previous_sharing_asm_name
		       && !node->symbol.next_sharing_asm_name);
  if (assembler_name_hash)
    {
      void **aslot;
      tree name = DECL_ASSEMBLER_NAME (node->symbol.decl);

      aslot = htab_find_slot_with_hash (assembler_name_hash, name,
					decl_assembler_name_hash (name),
					INSERT);
      gcc_assert (*aslot != node);
      node->symbol.next_sharing_asm_name = (symtab_node)*aslot;
      if (*aslot != NULL)
	((symtab_node)*aslot)->symbol.previous_sharing_asm_name = node;
      *aslot = node;
    }

}
Beispiel #4
0
tree
convert (tree type, tree expr)
{
  tree e = expr;
  enum tree_code code;

  if (type == TREE_TYPE (expr))
    return expr;

  if (TREE_CODE (type) == ERROR_MARK
      || TREE_CODE (expr) == ERROR_MARK
      || TREE_CODE (TREE_TYPE (expr)) == ERROR_MARK)
    return expr;

  gcc_checking_assert (TREE_CODE (TREE_TYPE (expr)) != VOID_TYPE);

  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (expr)))
    return fold_build1_loc (input_location, NOP_EXPR, type, expr);

  code = TREE_CODE (type);
  if (code == VOID_TYPE)
    return fold_build1_loc (input_location, CONVERT_EXPR, type, e);
  if (code == BOOLEAN_TYPE)
    return fold_build1_loc (input_location, NOP_EXPR, type,
			    truthvalue_conversion (e));
  if (code == INTEGER_TYPE)
    return fold (convert_to_integer (type, e));
  if (code == POINTER_TYPE || code == REFERENCE_TYPE)
    return fold (convert_to_pointer (type, e));
  if (code == REAL_TYPE)
    return fold (convert_to_real (type, e));
  if (code == COMPLEX_TYPE)
    return fold (convert_to_complex (type, e));
  if (code == VECTOR_TYPE)
    return fold (convert_to_vector (type, e));

  gcc_unreachable ();
}
Beispiel #5
0
symtab_node
symtab_get_node (const_tree decl)
{
  symtab_node *slot;
  struct symtab_node_base key;

  gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
		       || (TREE_CODE (decl) == VAR_DECL
			   && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
			       || in_lto_p)));

  if (!symtab_hash)
    return NULL;

  key.decl = CONST_CAST2 (tree, const_tree, decl);

  slot = (symtab_node *) htab_find_slot (symtab_hash, &key,
					 NO_INSERT);

  if (slot)
    return *slot;
  return NULL;
}
Beispiel #6
0
static void
gen_conditions_for_pow (gimple pow_call, vec<gimple> conds,
                        unsigned *nconds)
{
  tree base, expn;
  enum tree_code bc;

  gcc_checking_assert (check_pow (pow_call));

  *nconds = 0;

  base = gimple_call_arg (pow_call, 0);
  expn = gimple_call_arg (pow_call, 1);

  bc = TREE_CODE (base);

  if (bc == REAL_CST)
    gen_conditions_for_pow_cst_base (base, expn, conds, nconds);
  else if (bc == SSA_NAME)
    gen_conditions_for_pow_int_base (base, expn, conds, nconds);
  else
    gcc_unreachable ();
}
Beispiel #7
0
static void
emit_case_bit_tests (gswitch *swtch, tree index_expr,
		     tree minval, tree range, tree maxval)
{
  struct case_bit_test test[MAX_CASE_BIT_TESTS];
  unsigned int i, j, k;
  unsigned int count;

  basic_block switch_bb = gimple_bb (swtch);
  basic_block default_bb, new_default_bb, new_bb;
  edge default_edge;
  bool update_dom = dom_info_available_p (CDI_DOMINATORS);

  vec<basic_block> bbs_to_fix_dom = vNULL;

  tree index_type = TREE_TYPE (index_expr);
  tree unsigned_index_type = unsigned_type_for (index_type);
  unsigned int branch_num = gimple_switch_num_labels (swtch);

  gimple_stmt_iterator gsi;
  gassign *shift_stmt;

  tree idx, tmp, csui;
  tree word_type_node = lang_hooks.types.type_for_mode (word_mode, 1);
  tree word_mode_zero = fold_convert (word_type_node, integer_zero_node);
  tree word_mode_one = fold_convert (word_type_node, integer_one_node);
  int prec = TYPE_PRECISION (word_type_node);
  wide_int wone = wi::one (prec);

  memset (&test, 0, sizeof (test));

  /* Get the edge for the default case.  */
  tmp = gimple_switch_default_label (swtch);
  default_bb = label_to_block (CASE_LABEL (tmp));
  default_edge = find_edge (switch_bb, default_bb);

  /* Go through all case labels, and collect the case labels, profile
     counts, and other information we need to build the branch tests.  */
  count = 0;
  for (i = 1; i < branch_num; i++)
    {
      unsigned int lo, hi;
      tree cs = gimple_switch_label (swtch, i);
      tree label = CASE_LABEL (cs);
      edge e = find_edge (switch_bb, label_to_block (label));
      for (k = 0; k < count; k++)
	if (e == test[k].target_edge)
	  break;

      if (k == count)
	{
	  gcc_checking_assert (count < MAX_CASE_BIT_TESTS);
	  test[k].mask = wi::zero (prec);
	  test[k].target_edge = e;
	  test[k].label = label;
	  test[k].bits = 1;
	  count++;
	}
      else
        test[k].bits++;

      lo = tree_to_uhwi (int_const_binop (MINUS_EXPR,
					  CASE_LOW (cs), minval));
      if (CASE_HIGH (cs) == NULL_TREE)
	hi = lo;
      else
	hi = tree_to_uhwi (int_const_binop (MINUS_EXPR,
					    CASE_HIGH (cs), minval));

      for (j = lo; j <= hi; j++)
	test[k].mask |= wi::lshift (wone, j);
    }

  qsort (test, count, sizeof (*test), case_bit_test_cmp);

  /* If all values are in the 0 .. BITS_PER_WORD-1 range, we can get rid of
     the minval subtractions, but it might make the mask constants more
     expensive.  So, compare the costs.  */
  if (compare_tree_int (minval, 0) > 0
      && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
    {
      int cost_diff;
      HOST_WIDE_INT m = tree_to_uhwi (minval);
      rtx reg = gen_raw_REG (word_mode, 10000);
      bool speed_p = optimize_bb_for_speed_p (gimple_bb (swtch));
      cost_diff = set_rtx_cost (gen_rtx_PLUS (word_mode, reg,
					      GEN_INT (-m)), speed_p);
      for (i = 0; i < count; i++)
	{
	  rtx r = immed_wide_int_const (test[i].mask, word_mode);
	  cost_diff += set_src_cost (gen_rtx_AND (word_mode, reg, r),
				     word_mode, speed_p);
	  r = immed_wide_int_const (wi::lshift (test[i].mask, m), word_mode);
	  cost_diff -= set_src_cost (gen_rtx_AND (word_mode, reg, r),
				     word_mode, speed_p);
	}
      if (cost_diff > 0)
	{
	  for (i = 0; i < count; i++)
	    test[i].mask = wi::lshift (test[i].mask, m);
	  minval = build_zero_cst (TREE_TYPE (minval));
	  range = maxval;
	}
    }

  /* We generate two jumps to the default case label.
     Split the default edge, so that we don't have to do any PHI node
     updating.  */
  new_default_bb = split_edge (default_edge);

  if (update_dom)
    {
      bbs_to_fix_dom.create (10);
      bbs_to_fix_dom.quick_push (switch_bb);
      bbs_to_fix_dom.quick_push (default_bb);
      bbs_to_fix_dom.quick_push (new_default_bb);
    }

  /* Now build the test-and-branch code.  */

  gsi = gsi_last_bb (switch_bb);

  /* idx = (unsigned)x - minval.  */
  idx = fold_convert (unsigned_index_type, index_expr);
  idx = fold_build2 (MINUS_EXPR, unsigned_index_type, idx,
		     fold_convert (unsigned_index_type, minval));
  idx = force_gimple_operand_gsi (&gsi, idx,
				  /*simple=*/true, NULL_TREE,
				  /*before=*/true, GSI_SAME_STMT);

  /* if (idx > range) goto default */
  range = force_gimple_operand_gsi (&gsi,
				    fold_convert (unsigned_index_type, range),
				    /*simple=*/true, NULL_TREE,
				    /*before=*/true, GSI_SAME_STMT);
  tmp = fold_build2 (GT_EXPR, boolean_type_node, idx, range);
  new_bb = hoist_edge_and_branch_if_true (&gsi, tmp, default_edge, update_dom);
  if (update_dom)
    bbs_to_fix_dom.quick_push (new_bb);
  gcc_assert (gimple_bb (swtch) == new_bb);
  gsi = gsi_last_bb (new_bb);

  /* Any blocks dominated by the GIMPLE_SWITCH, but that are not successors
     of NEW_BB, are still immediately dominated by SWITCH_BB.  Make it so.  */
  if (update_dom)
    {
      vec<basic_block> dom_bbs;
      basic_block dom_son;

      dom_bbs = get_dominated_by (CDI_DOMINATORS, new_bb);
      FOR_EACH_VEC_ELT (dom_bbs, i, dom_son)
	{
	  edge e = find_edge (new_bb, dom_son);
	  if (e && single_pred_p (e->dest))
	    continue;
	  set_immediate_dominator (CDI_DOMINATORS, dom_son, switch_bb);
	  bbs_to_fix_dom.safe_push (dom_son);
	}
      dom_bbs.release ();
    }
Beispiel #8
0
static void
record_common_node (struct streamer_tree_cache_d *cache, tree node)
{
  /* If we recursively end up at nodes we do not want to preload simply don't.
     ???  We'd want to verify that this doesn't happen, or alternatively
     do not recurse at all.  */
  if (node == char_type_node)
    return;

  gcc_checking_assert (node != boolean_type_node
		       && node != boolean_true_node
		       && node != boolean_false_node);

  /* We have to make sure to fill exactly the same number of
     elements for all frontends.  That can include NULL trees.
     As our hash table can't deal with zero entries we'll simply stream
     a random other tree.  A NULL tree never will be looked up so it
     doesn't matter which tree we replace it with, just to be sure
     use error_mark_node.  */
  if (!node)
    node = error_mark_node;

  /* ???  FIXME, devise a better hash value.  But the hash needs to be equal
     for all frontend and lto1 invocations.  So just use the position
     in the cache as hash value.  */
  streamer_tree_cache_append (cache, node, cache->nodes.length ());

  switch (TREE_CODE (node))
    {
    case ERROR_MARK:
    case FIELD_DECL:
    case FIXED_POINT_TYPE:
    case IDENTIFIER_NODE:
    case INTEGER_CST:
    case INTEGER_TYPE:
    case POINTER_BOUNDS_TYPE:
    case REAL_TYPE:
    case TREE_LIST:
    case VOID_CST:
    case VOID_TYPE:
      /* No recursive trees.  */
      break;
    case ARRAY_TYPE:
    case POINTER_TYPE:
    case REFERENCE_TYPE:
      record_common_node (cache, TREE_TYPE (node));
      break;
    case COMPLEX_TYPE:
      /* Verify that a complex type's component type (node_type) has been
	 handled already (and we thus don't need to recurse here).  */
      verify_common_node_recorded (cache, TREE_TYPE (node));
      break;
    case RECORD_TYPE:
      /* The FIELD_DECLs of structures should be shared, so that every
	 COMPONENT_REF uses the same tree node when referencing a field.
	 Pointer equality between FIELD_DECLs is used by the alias
	 machinery to compute overlapping component references (see
	 nonoverlapping_component_refs_p and
	 nonoverlapping_component_refs_of_decl_p).  */
      for (tree f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f))
	record_common_node (cache, f);
      break;
    default:
      /* Unexpected tree code.  */
      gcc_unreachable ();
    }
}
static void
emit_case_bit_tests (gimple swtch, tree index_expr,
		     tree minval, tree range)
{
  struct case_bit_test test[MAX_CASE_BIT_TESTS];
  unsigned int i, j, k;
  unsigned int count;

  basic_block switch_bb = gimple_bb (swtch);
  basic_block default_bb, new_default_bb, new_bb;
  edge default_edge;
  bool update_dom = dom_info_available_p (CDI_DOMINATORS);

  vec<basic_block> bbs_to_fix_dom = vNULL;

  tree index_type = TREE_TYPE (index_expr);
  tree unsigned_index_type = unsigned_type_for (index_type);
  unsigned int branch_num = gimple_switch_num_labels (swtch);

  gimple_stmt_iterator gsi;
  gimple shift_stmt;

  tree idx, tmp, csui;
  tree word_type_node = lang_hooks.types.type_for_mode (word_mode, 1);
  tree word_mode_zero = fold_convert (word_type_node, integer_zero_node);
  tree word_mode_one = fold_convert (word_type_node, integer_one_node);

  memset (&test, 0, sizeof (test));

  /* Get the edge for the default case.  */
  tmp = gimple_switch_default_label (swtch);
  default_bb = label_to_block (CASE_LABEL (tmp));
  default_edge = find_edge (switch_bb, default_bb);

  /* Go through all case labels, and collect the case labels, profile
     counts, and other information we need to build the branch tests.  */
  count = 0;
  for (i = 1; i < branch_num; i++)
    {
      unsigned int lo, hi;
      tree cs = gimple_switch_label (swtch, i);
      tree label = CASE_LABEL (cs);
      edge e = find_edge (switch_bb, label_to_block (label));
      for (k = 0; k < count; k++)
	if (e == test[k].target_edge)
	  break;

      if (k == count)
	{
	  gcc_checking_assert (count < MAX_CASE_BIT_TESTS);
	  test[k].hi = 0;
	  test[k].lo = 0;
	  test[k].target_edge = e;
	  test[k].label = label;
	  test[k].bits = 1;
	  count++;
	}
      else
        test[k].bits++;

      lo = tree_to_uhwi (int_const_binop (MINUS_EXPR,
					  CASE_LOW (cs), minval));
      if (CASE_HIGH (cs) == NULL_TREE)
	hi = lo;
      else
	hi = tree_to_uhwi (int_const_binop (MINUS_EXPR,
					    CASE_HIGH (cs), minval));

      for (j = lo; j <= hi; j++)
        if (j >= HOST_BITS_PER_WIDE_INT)
	  test[k].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
	else
	  test[k].lo |= (HOST_WIDE_INT) 1 << j;
    }

  qsort (test, count, sizeof (*test), case_bit_test_cmp);

  /* We generate two jumps to the default case label.
     Split the default edge, so that we don't have to do any PHI node
     updating.  */
  new_default_bb = split_edge (default_edge);

  if (update_dom)
    {
      bbs_to_fix_dom.create (10);
      bbs_to_fix_dom.quick_push (switch_bb);
      bbs_to_fix_dom.quick_push (default_bb);
      bbs_to_fix_dom.quick_push (new_default_bb);
    }

  /* Now build the test-and-branch code.  */

  gsi = gsi_last_bb (switch_bb);

  /* idx = (unsigned)x - minval.  */
  idx = fold_convert (unsigned_index_type, index_expr);
  idx = fold_build2 (MINUS_EXPR, unsigned_index_type, idx,
		     fold_convert (unsigned_index_type, minval));
  idx = force_gimple_operand_gsi (&gsi, idx,
				  /*simple=*/true, NULL_TREE,
				  /*before=*/true, GSI_SAME_STMT);

  /* if (idx > range) goto default */
  range = force_gimple_operand_gsi (&gsi,
				    fold_convert (unsigned_index_type, range),
				    /*simple=*/true, NULL_TREE,
				    /*before=*/true, GSI_SAME_STMT);
  tmp = fold_build2 (GT_EXPR, boolean_type_node, idx, range);
  new_bb = hoist_edge_and_branch_if_true (&gsi, tmp, default_edge, update_dom);
  if (update_dom)
    bbs_to_fix_dom.quick_push (new_bb);
  gcc_assert (gimple_bb (swtch) == new_bb);
  gsi = gsi_last_bb (new_bb);

  /* Any blocks dominated by the GIMPLE_SWITCH, but that are not successors
     of NEW_BB, are still immediately dominated by SWITCH_BB.  Make it so.  */
  if (update_dom)
    {
      vec<basic_block> dom_bbs;
      basic_block dom_son;

      dom_bbs = get_dominated_by (CDI_DOMINATORS, new_bb);
      FOR_EACH_VEC_ELT (dom_bbs, i, dom_son)
	{
	  edge e = find_edge (new_bb, dom_son);
	  if (e && single_pred_p (e->dest))
	    continue;
	  set_immediate_dominator (CDI_DOMINATORS, dom_son, switch_bb);
	  bbs_to_fix_dom.safe_push (dom_son);
	}
      dom_bbs.release ();
    }
Beispiel #10
0
tree
ctor_for_folding (tree decl)
{
  varpool_node *node, *real_node;
  tree real_decl;

  if (TREE_CODE (decl) != VAR_DECL
      && TREE_CODE (decl) != CONST_DECL)
    return error_mark_node;

  if (TREE_CODE (decl) == CONST_DECL
      || DECL_IN_CONSTANT_POOL (decl))
    return DECL_INITIAL (decl);

  if (TREE_THIS_VOLATILE (decl))
    return error_mark_node;

  /* Do not care about automatic variables.  Those are never initialized
     anyway, because gimplifier exapnds the code.  */
  if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
    {
      gcc_assert (!TREE_PUBLIC (decl));
      return error_mark_node;
    }

  gcc_assert (TREE_CODE (decl) == VAR_DECL);

  node = varpool_get_node (decl);
  if (node)
    {
      real_node = varpool_variable_node (node);
      real_decl = real_node->decl;
    }
  else
    real_decl = decl;

  /* See if we are dealing with alias.
     In most cases alias is just alternative symbol pointing to a given
     constructor.  This allows us to use interposition rules of DECL
     constructor of REAL_NODE.  However weakrefs are special by being just
     alternative name of their target (if defined).  */
  if (decl != real_decl)
    {
      gcc_assert (!DECL_INITIAL (decl)
		  || DECL_INITIAL (decl) == error_mark_node);
      if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
	{
	  node = varpool_alias_target (node);
	  decl = node->decl;
	}
    }

  /* Vtables are defined by their types and must match no matter of interposition
     rules.  */
  if (DECL_VIRTUAL_P (real_decl))
    {
      gcc_checking_assert (TREE_READONLY (real_decl));
      return DECL_INITIAL (real_decl);
    }

  /* If there is no constructor, we have nothing to do.  */
  if (DECL_INITIAL (real_decl) == error_mark_node)
    return error_mark_node;

  /* Non-readonly alias of readonly variable is also de-facto readonly,
     because the variable itself is in readonly section.  
     We also honnor READONLY flag on alias assuming that user knows
     what he is doing.  */
  if (!TREE_READONLY (decl) && !TREE_READONLY (real_decl))
    return error_mark_node;

  /* Variables declared 'const' without an initializer
     have zero as the initializer if they may not be
     overridden at link or run time.  */
  if (!DECL_INITIAL (real_decl)
      && (DECL_EXTERNAL (decl) || decl_replaceable_p (decl)))
    return error_mark_node;

  /* Variables declared `const' with an initializer are considered
     to not be overwritable with different initializer by default. 

     ??? Previously we behaved so for scalar variables but not for array
     accesses.  */
  return DECL_INITIAL (real_decl);
}
Beispiel #11
0
static bool
lto_symtab_merge_p (tree prevailing, tree decl)
{
  if (TREE_CODE (prevailing) != TREE_CODE (decl))
    {
      if (symtab->dump_file)
	fprintf (symtab->dump_file, "Not merging decls; "
		 "TREE_CODE mismatch\n");
      return false;
    }
  gcc_checking_assert (TREE_CHAIN (prevailing) == TREE_CHAIN (decl));
  
  if (TREE_CODE (prevailing) == FUNCTION_DECL)
    {
      if (DECL_BUILT_IN (prevailing) != DECL_BUILT_IN (decl))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "DECL_BUILT_IN mismatch\n");
	  return false;
	}
      if (DECL_BUILT_IN (prevailing)
	  && (DECL_BUILT_IN_CLASS (prevailing) != DECL_BUILT_IN_CLASS (decl)
	      || DECL_FUNCTION_CODE (prevailing) != DECL_FUNCTION_CODE (decl)))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "DECL_BUILT_IN_CLASS or CODE mismatch\n");
	  return false;
	}
    }

  /* FIXME: after MPX is removed, use flags_from_decl_or_type
     function instead.  PR lto/85248.  */
  if (DECL_ATTRIBUTES (prevailing) != DECL_ATTRIBUTES (decl))
    {
      tree prev_attr = lookup_attribute ("error", DECL_ATTRIBUTES (prevailing));
      tree attr = lookup_attribute ("error", DECL_ATTRIBUTES (decl));
      if ((prev_attr == NULL) != (attr == NULL)
	  || (prev_attr && !attribute_value_equal (prev_attr, attr)))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "error attribute mismatch\n");
	  return false;
	}

      prev_attr = lookup_attribute ("warning", DECL_ATTRIBUTES (prevailing));
      attr = lookup_attribute ("warning", DECL_ATTRIBUTES (decl));
      if ((prev_attr == NULL) != (attr == NULL)
	  || (prev_attr && !attribute_value_equal (prev_attr, attr)))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "warning attribute mismatch\n");
	  return false;
	}

      prev_attr = lookup_attribute ("noreturn", DECL_ATTRIBUTES (prevailing));
      attr = lookup_attribute ("noreturn", DECL_ATTRIBUTES (decl));
      if ((prev_attr == NULL) != (attr == NULL))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "noreturn attribute mismatch\n");
	  return false;
	}
    }
  return true;
}
Beispiel #12
0
static inline tree
chrec_fold_plus_poly_poly (enum tree_code code,
			   tree type,
			   tree poly0,
			   tree poly1)
{
  tree left, right;
  struct loop *loop0 = get_chrec_loop (poly0);
  struct loop *loop1 = get_chrec_loop (poly1);
  tree rtype = code == POINTER_PLUS_EXPR ? chrec_type (poly1) : type;

  gcc_assert (poly0);
  gcc_assert (poly1);
  gcc_assert (TREE_CODE (poly0) == POLYNOMIAL_CHREC);
  gcc_assert (TREE_CODE (poly1) == POLYNOMIAL_CHREC);
  if (POINTER_TYPE_P (chrec_type (poly0)))
    gcc_checking_assert (ptrofftype_p (chrec_type (poly1))
			 && useless_type_conversion_p (type, chrec_type (poly0)));
  else
    gcc_checking_assert (useless_type_conversion_p (type, chrec_type (poly0))
			 && useless_type_conversion_p (type, chrec_type (poly1)));

  /*
    {a, +, b}_1 + {c, +, d}_2  ->  {{a, +, b}_1 + c, +, d}_2,
    {a, +, b}_2 + {c, +, d}_1  ->  {{c, +, d}_1 + a, +, b}_2,
    {a, +, b}_x + {c, +, d}_x  ->  {a+c, +, b+d}_x.  */
  if (flow_loop_nested_p (loop0, loop1))
    {
      if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
	return build_polynomial_chrec
	  (CHREC_VARIABLE (poly1),
	   chrec_fold_plus (type, poly0, CHREC_LEFT (poly1)),
	   CHREC_RIGHT (poly1));
      else
	return build_polynomial_chrec
	  (CHREC_VARIABLE (poly1),
	   chrec_fold_minus (type, poly0, CHREC_LEFT (poly1)),
	   chrec_fold_multiply (type, CHREC_RIGHT (poly1),
				SCALAR_FLOAT_TYPE_P (type)
				? build_real (type, dconstm1)
				: build_int_cst_type (type, -1)));
    }

  if (flow_loop_nested_p (loop1, loop0))
    {
      if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
	return build_polynomial_chrec
	  (CHREC_VARIABLE (poly0),
	   chrec_fold_plus (type, CHREC_LEFT (poly0), poly1),
	   CHREC_RIGHT (poly0));
      else
	return build_polynomial_chrec
	  (CHREC_VARIABLE (poly0),
	   chrec_fold_minus (type, CHREC_LEFT (poly0), poly1),
	   CHREC_RIGHT (poly0));
    }

  /* This function should never be called for chrecs of loops that
     do not belong to the same loop nest.  */
  gcc_assert (loop0 == loop1);

  if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
    {
      left = chrec_fold_plus
	(type, CHREC_LEFT (poly0), CHREC_LEFT (poly1));
      right = chrec_fold_plus
	(rtype, CHREC_RIGHT (poly0), CHREC_RIGHT (poly1));
    }
  else
    {
      left = chrec_fold_minus
	(type, CHREC_LEFT (poly0), CHREC_LEFT (poly1));
      right = chrec_fold_minus
	(type, CHREC_RIGHT (poly0), CHREC_RIGHT (poly1));
    }

  if (chrec_zerop (right))
    return left;
  else
    return build_polynomial_chrec
      (CHREC_VARIABLE (poly0), left, right);
}
Beispiel #13
0
static tree
chrec_fold_plus_1 (enum tree_code code, tree type,
		   tree op0, tree op1)
{
  if (automatically_generated_chrec_p (op0)
      || automatically_generated_chrec_p (op1))
    return chrec_fold_automatically_generated_operands (op0, op1);

  switch (TREE_CODE (op0))
    {
    case POLYNOMIAL_CHREC:
      gcc_checking_assert
	(!chrec_contains_symbols_defined_in_loop (op0, CHREC_VARIABLE (op0)));
      switch (TREE_CODE (op1))
	{
	case POLYNOMIAL_CHREC:
	  gcc_checking_assert
	    (!chrec_contains_symbols_defined_in_loop (op1,
						      CHREC_VARIABLE (op1)));
	  return chrec_fold_plus_poly_poly (code, type, op0, op1);

	CASE_CONVERT:
	  if (tree_contains_chrecs (op1, NULL))
	    return chrec_dont_know;

	default:
	  if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
	    return build_polynomial_chrec
	      (CHREC_VARIABLE (op0),
	       chrec_fold_plus (type, CHREC_LEFT (op0), op1),
	       CHREC_RIGHT (op0));
	  else
	    return build_polynomial_chrec
	      (CHREC_VARIABLE (op0),
	       chrec_fold_minus (type, CHREC_LEFT (op0), op1),
	       CHREC_RIGHT (op0));
	}

    CASE_CONVERT:
      if (tree_contains_chrecs (op0, NULL))
	return chrec_dont_know;

    default:
      switch (TREE_CODE (op1))
	{
	case POLYNOMIAL_CHREC:
	  gcc_checking_assert
	    (!chrec_contains_symbols_defined_in_loop (op1,
						      CHREC_VARIABLE (op1)));
	  if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
	    return build_polynomial_chrec
	      (CHREC_VARIABLE (op1),
	       chrec_fold_plus (type, op0, CHREC_LEFT (op1)),
	       CHREC_RIGHT (op1));
	  else
	    return build_polynomial_chrec
	      (CHREC_VARIABLE (op1),
	       chrec_fold_minus (type, op0, CHREC_LEFT (op1)),
	       chrec_fold_multiply (type, CHREC_RIGHT (op1),
				    SCALAR_FLOAT_TYPE_P (type)
				    ? build_real (type, dconstm1)
				    : build_int_cst_type (type, -1)));

	CASE_CONVERT:
	  if (tree_contains_chrecs (op1, NULL))
	    return chrec_dont_know;

	default:
	  {
	    int size = 0;
	    if ((tree_contains_chrecs (op0, &size)
		 || tree_contains_chrecs (op1, &size))
		&& size < PARAM_VALUE (PARAM_SCEV_MAX_EXPR_SIZE))
	      return build2 (code, type, op0, op1);
	    else if (size < PARAM_VALUE (PARAM_SCEV_MAX_EXPR_SIZE))
	      {
		if (code == POINTER_PLUS_EXPR)
		  return fold_build_pointer_plus (fold_convert (type, op0),
						  op1);
		else
		  return fold_build2 (code, type,
				      fold_convert (type, op0),
				      fold_convert (type, op1));
	      }
	    else
	      return chrec_dont_know;
	  }
	}
    }
}
Beispiel #14
0
/* Create clone of E in the node N represented by CALL_EXPR the callgraph.  */
struct cgraph_edge *
cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
		   gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
		   int freq_scale, bool update_original)
{
  struct cgraph_edge *new_edge;
  gcov_type count = apply_probability (e->count, count_scale);
  gcov_type freq;

  /* We do not want to ignore loop nest after frequency drops to 0.  */
  if (!freq_scale)
    freq_scale = 1;
  freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
  if (freq > CGRAPH_FREQ_MAX)
    freq = CGRAPH_FREQ_MAX;

  if (e->indirect_unknown_callee)
    {
      tree decl;

      if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
	{
	  struct cgraph_node *callee = cgraph_get_node (decl);
	  gcc_checking_assert (callee);
	  new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq);
	}
      else
	{
	  new_edge = cgraph_create_indirect_edge (n, call_stmt,
						  e->indirect_info->ecf_flags,
						  count, freq);
	  *new_edge->indirect_info = *e->indirect_info;
	}
    }
  else
    {
      new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq);
      if (e->indirect_info)
	{
	  new_edge->indirect_info
	    = ggc_alloc_cleared_cgraph_indirect_call_info ();
	  *new_edge->indirect_info = *e->indirect_info;
	}
    }

  new_edge->inline_failed = e->inline_failed;
  new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
  new_edge->lto_stmt_uid = stmt_uid;
  /* Clone flags that depend on call_stmt availability manually.  */
  new_edge->can_throw_external = e->can_throw_external;
  new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p;
  new_edge->speculative = e->speculative;
  if (update_original)
    {
      e->count -= new_edge->count;
      if (e->count < 0)
	e->count = 0;
    }
  cgraph_call_edge_duplication_hooks (e, new_edge);
  return new_edge;
}
Beispiel #15
0
static tree
fold_const_call_1 (built_in_function fn, tree type, tree arg0, tree arg1)
{
  machine_mode mode = TYPE_MODE (type);
  machine_mode arg0_mode = TYPE_MODE (TREE_TYPE (arg0));
  machine_mode arg1_mode = TYPE_MODE (TREE_TYPE (arg1));

  if (arg0_mode == arg1_mode
      && real_cst_p (arg0)
      && real_cst_p (arg1))
    {
      gcc_checking_assert (SCALAR_FLOAT_MODE_P (arg0_mode));
      if (mode == arg0_mode)
	{
	  /* real, real -> real.  */
	  REAL_VALUE_TYPE result;
	  if (fold_const_call_sss (&result, fn, TREE_REAL_CST_PTR (arg0),
				   TREE_REAL_CST_PTR (arg1),
				   REAL_MODE_FORMAT (mode)))
	    return build_real (type, result);
	}
      return NULL_TREE;
    }

  if (real_cst_p (arg0)
      && integer_cst_p (arg1))
    {
      gcc_checking_assert (SCALAR_FLOAT_MODE_P (arg0_mode));
      if (mode == arg0_mode)
	{
	  /* real, int -> real.  */
	  REAL_VALUE_TYPE result;
	  if (fold_const_call_sss (&result, fn, TREE_REAL_CST_PTR (arg0),
				   arg1, REAL_MODE_FORMAT (mode)))
	    return build_real (type, result);
	}
      return NULL_TREE;
    }

  if (integer_cst_p (arg0)
      && real_cst_p (arg1))
    {
      gcc_checking_assert (SCALAR_FLOAT_MODE_P (arg1_mode));
      if (mode == arg1_mode)
	{
	  /* int, real -> real.  */
	  REAL_VALUE_TYPE result;
	  if (fold_const_call_sss (&result, fn, arg0,
				   TREE_REAL_CST_PTR (arg1),
				   REAL_MODE_FORMAT (mode)))
	    return build_real (type, result);
	}
      return NULL_TREE;
    }

  if (arg0_mode == arg1_mode
      && complex_cst_p (arg0)
      && complex_cst_p (arg1))
    {
      gcc_checking_assert (COMPLEX_MODE_P (arg0_mode));
      machine_mode inner_mode = GET_MODE_INNER (arg0_mode);
      tree arg0r = TREE_REALPART (arg0);
      tree arg0i = TREE_IMAGPART (arg0);
      tree arg1r = TREE_REALPART (arg1);
      tree arg1i = TREE_IMAGPART (arg1);
      if (mode == arg0_mode
	  && real_cst_p (arg0r)
	  && real_cst_p (arg0i)
	  && real_cst_p (arg1r)
	  && real_cst_p (arg1i))
	{
	  /* complex real, complex real -> complex real.  */
	  REAL_VALUE_TYPE result_real, result_imag;
	  if (fold_const_call_ccc (&result_real, &result_imag, fn,
				   TREE_REAL_CST_PTR (arg0r),
				   TREE_REAL_CST_PTR (arg0i),
				   TREE_REAL_CST_PTR (arg1r),
				   TREE_REAL_CST_PTR (arg1i),
				   REAL_MODE_FORMAT (inner_mode)))
	    return build_complex (type,
				  build_real (TREE_TYPE (type), result_real),
				  build_real (TREE_TYPE (type), result_imag));
	}
      return NULL_TREE;
    }

  return NULL_TREE;
}
Beispiel #16
0
static tree
fold_const_call_1 (built_in_function fn, tree type, tree arg)
{
  machine_mode mode = TYPE_MODE (type);
  machine_mode arg_mode = TYPE_MODE (TREE_TYPE (arg));

  if (integer_cst_p (arg))
    {
      if (SCALAR_INT_MODE_P (mode))
	{
	  wide_int result;
	  if (fold_const_call_ss (&result, fn, arg, TYPE_PRECISION (type),
				  TREE_TYPE (arg)))
	    return wide_int_to_tree (type, result);
	}
      return NULL_TREE;
    }

  if (real_cst_p (arg))
    {
      gcc_checking_assert (SCALAR_FLOAT_MODE_P (arg_mode));
      if (mode == arg_mode)
	{
	  /* real -> real.  */
	  REAL_VALUE_TYPE result;
	  if (fold_const_call_ss (&result, fn, TREE_REAL_CST_PTR (arg),
				  REAL_MODE_FORMAT (mode)))
	    return build_real (type, result);
	}
      else if (COMPLEX_MODE_P (mode)
	       && GET_MODE_INNER (mode) == arg_mode)
	{
	  /* real -> complex real.  */
	  REAL_VALUE_TYPE result_real, result_imag;
	  if (fold_const_call_cs (&result_real, &result_imag, fn,
				  TREE_REAL_CST_PTR (arg),
				  REAL_MODE_FORMAT (arg_mode)))
	    return build_complex (type,
				  build_real (TREE_TYPE (type), result_real),
				  build_real (TREE_TYPE (type), result_imag));
	}
      else if (INTEGRAL_TYPE_P (type))
	{
	  /* real -> int.  */
	  wide_int result;
	  if (fold_const_call_ss (&result, fn,
				  TREE_REAL_CST_PTR (arg),
				  TYPE_PRECISION (type),
				  REAL_MODE_FORMAT (arg_mode)))
	    return wide_int_to_tree (type, result);
	}
      return NULL_TREE;
    }

  if (complex_cst_p (arg))
    {
      gcc_checking_assert (COMPLEX_MODE_P (arg_mode));
      machine_mode inner_mode = GET_MODE_INNER (arg_mode);
      tree argr = TREE_REALPART (arg);
      tree argi = TREE_IMAGPART (arg);
      if (mode == arg_mode
	  && real_cst_p (argr)
	  && real_cst_p (argi))
	{
	  /* complex real -> complex real.  */
	  REAL_VALUE_TYPE result_real, result_imag;
	  if (fold_const_call_cc (&result_real, &result_imag, fn,
				  TREE_REAL_CST_PTR (argr),
				  TREE_REAL_CST_PTR (argi),
				  REAL_MODE_FORMAT (inner_mode)))
	    return build_complex (type,
				  build_real (TREE_TYPE (type), result_real),
				  build_real (TREE_TYPE (type), result_imag));
	}
      if (mode == inner_mode
	  && real_cst_p (argr)
	  && real_cst_p (argi))
	{
	  /* complex real -> real.  */
	  REAL_VALUE_TYPE result;
	  if (fold_const_call_sc (&result, fn,
				  TREE_REAL_CST_PTR (argr),
				  TREE_REAL_CST_PTR (argi),
				  REAL_MODE_FORMAT (inner_mode)))
	    return build_real (type, result);
	}
      return NULL_TREE;
    }

  return NULL_TREE;
}
Beispiel #17
0
tree
ubsan_create_data (const char *name, const location_t *ploc,
		   const struct ubsan_mismatch_data *mismatch, ...)
{
  va_list args;
  tree ret, t;
  tree fields[5];
  vec<tree, va_gc> *saved_args = NULL;
  size_t i = 0;
  location_t loc = UNKNOWN_LOCATION;

  /* Firstly, create a pointer to type descriptor type.  */
  tree td_type = ubsan_type_descriptor_type ();
  TYPE_READONLY (td_type) = 1;
  td_type = build_pointer_type (td_type);

  /* Create the structure type.  */
  ret = make_node (RECORD_TYPE);
  if (ploc != NULL)
    {
      loc = LOCATION_LOCUS (*ploc);
      fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
			      ubsan_source_location_type ());
      DECL_CONTEXT (fields[i]) = ret;
      i++;
    }

  va_start (args, mismatch);
  for (t = va_arg (args, tree); t != NULL_TREE;
       i++, t = va_arg (args, tree))
    {
      gcc_checking_assert (i < 3);
      /* Save the tree arguments for later use.  */
      vec_safe_push (saved_args, t);
      fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
			      td_type);
      DECL_CONTEXT (fields[i]) = ret;
      if (i)
	DECL_CHAIN (fields[i - 1]) = fields[i];
    }
  va_end (args);

  if (mismatch != NULL)
    {
      /* We have to add two more decls.  */
      fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
			      pointer_sized_int_node);
      DECL_CONTEXT (fields[i]) = ret;
      DECL_CHAIN (fields[i - 1]) = fields[i];
      i++;

      fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
			      unsigned_char_type_node);
      DECL_CONTEXT (fields[i]) = ret;
      DECL_CHAIN (fields[i - 1]) = fields[i];
      i++;
    }

  TYPE_FIELDS (ret) = fields[0];
  TYPE_NAME (ret) = get_identifier (name);
  layout_type (ret);

  /* Now, fill in the type.  */
  char tmp_name[32];
  static unsigned int ubsan_var_id_num;
  ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_data", ubsan_var_id_num++);
  tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name),
			 ret);
  TREE_STATIC (var) = 1;
  TREE_PUBLIC (var) = 0;
  DECL_ARTIFICIAL (var) = 1;
  DECL_IGNORED_P (var) = 1;
  DECL_EXTERNAL (var) = 0;

  vec<constructor_elt, va_gc> *v;
  vec_alloc (v, i);
  tree ctor = build_constructor (ret, v);

  /* If desirable, set the __ubsan_source_location element.  */
  if (ploc != NULL)
    CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, ubsan_source_location (loc));

  size_t nelts = vec_safe_length (saved_args);
  for (i = 0; i < nelts; i++)
    {
      t = (*saved_args)[i];
      CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
    }

  if (mismatch != NULL)
    {
      /* Append the pointer data.  */
      CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, mismatch->align);
      CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, mismatch->ckind);
    }

  TREE_CONSTANT (ctor) = 1;
  TREE_STATIC (ctor) = 1;
  DECL_INITIAL (var) = ctor;
  varpool_finalize_decl (var);

  return var;
}
Beispiel #18
0
static bool
lto_symtab_merge_p (tree prevailing, tree decl)
{
  if (TREE_CODE (prevailing) != TREE_CODE (decl))
    {
      if (symtab->dump_file)
	fprintf (symtab->dump_file, "Not merging decls; "
		 "TREE_CODE mismatch\n");
      return false;
    }
  gcc_checking_assert (TREE_CHAIN (prevailing) == TREE_CHAIN (decl));
  
  if (TREE_CODE (prevailing) == FUNCTION_DECL)
    {
      if (DECL_BUILT_IN (prevailing) != DECL_BUILT_IN (decl))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "DECL_BUILT_IN mismatch\n");
	  return false;
	}
      if (DECL_BUILT_IN (prevailing)
	  && (DECL_BUILT_IN_CLASS (prevailing) != DECL_BUILT_IN_CLASS (decl)
	      || DECL_FUNCTION_CODE (prevailing) != DECL_FUNCTION_CODE (decl)))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "DECL_BUILT_IN_CLASS or CODE mismatch\n");
	  return false;
	}
    }
  if (DECL_ATTRIBUTES (prevailing) != DECL_ATTRIBUTES (decl))
    {
      tree prev_attr = lookup_attribute ("error", DECL_ATTRIBUTES (prevailing));
      tree attr = lookup_attribute ("error", DECL_ATTRIBUTES (decl));
      if ((prev_attr == NULL) != (attr == NULL)
	  || (prev_attr
	      && TREE_VALUE (TREE_VALUE (prev_attr))
		 != TREE_VALUE (TREE_VALUE (attr))))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "error attribute mismatch\n");
	  return false;
	}

      prev_attr = lookup_attribute ("warning", DECL_ATTRIBUTES (prevailing));
      attr = lookup_attribute ("warning", DECL_ATTRIBUTES (decl));
      if ((prev_attr == NULL) != (attr == NULL)
	  || (prev_attr
	      && TREE_VALUE (TREE_VALUE (prev_attr))
		 != TREE_VALUE (TREE_VALUE (attr))))
	{
          if (symtab->dump_file)
	    fprintf (symtab->dump_file, "Not merging decls; "
		     "warning attribute mismatch\n");
	  return false;
	}
    }
  return true;
}
alloc_pool
create_alloc_pool (const char *name, size_t size, size_t num)
{
  alloc_pool pool;
  size_t header_size;

  gcc_checking_assert (name);

  /* Make size large enough to store the list header.  */
  if (size < sizeof (alloc_pool_list))
    size = sizeof (alloc_pool_list);

  /* Now align the size to a multiple of 4.  */
  size = align_eight (size);

#ifdef ENABLE_CHECKING
  /* Add the aligned size of ID.  */
  size += offsetof (allocation_object, u.data);
#endif

  /* Um, we can't really allocate 0 elements per block.  */
  gcc_checking_assert (num);

  /* Allocate memory for the pool structure.  */
  pool = XNEW (struct alloc_pool_def);

  /* Now init the various pieces of our pool structure.  */
  pool->name = /*xstrdup (name)*/name;
  pool->elt_size = size;
  pool->elts_per_block = num;

  if (GATHER_STATISTICS)
    {
      struct alloc_pool_descriptor *desc = allocate_pool_descriptor (name);
      desc->elt_size = size;
      desc->created++;
    }

  /* List header size should be a multiple of 8.  */
  header_size = align_eight (sizeof (struct alloc_pool_list_def));

  pool->block_size = (size * num) + header_size;
  pool->returned_free_list = NULL;
  pool->virgin_free_list = NULL;
  pool->virgin_elts_remaining = 0;
  pool->elts_allocated = 0;
  pool->elts_free = 0;
  pool->blocks_allocated = 0;
  pool->block_list = NULL;

#ifdef ENABLE_CHECKING
  /* Increase the last used ID and use it for this pool.
     ID == 0 is used for free elements of pool so skip it.  */
  last_id++;
  if (last_id == 0)
    last_id++;

  pool->id = last_id;
#endif

  return (pool);
}
Beispiel #20
0
static void
lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
{
  gimple *stmt = gsi_stmt (*gsi);

  gimple_set_block (stmt, data->block);

  switch (gimple_code (stmt))
    {
    case GIMPLE_BIND:
      lower_gimple_bind (gsi, data);
      /* Propagate fallthruness.  */
      return;

    case GIMPLE_COND:
    case GIMPLE_GOTO:
    case GIMPLE_SWITCH:
      data->cannot_fallthru = true;
      gsi_next (gsi);
      return;

    case GIMPLE_RETURN:
      if (data->cannot_fallthru)
	{
	  gsi_remove (gsi, false);
	  /* Propagate fallthruness.  */
	}
      else
	{
	  lower_gimple_return (gsi, data);
	  data->cannot_fallthru = true;
	}
      return;

    case GIMPLE_TRY:
      if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
	lower_try_catch (gsi, data);
      else
	{
	  /* It must be a GIMPLE_TRY_FINALLY.  */
	  bool cannot_fallthru;
	  lower_sequence (gimple_try_eval_ptr (stmt), data);
	  cannot_fallthru = data->cannot_fallthru;

	  /* The finally clause is always executed after the try clause,
	     so if it does not fall through, then the try-finally will not
	     fall through.  Otherwise, if the try clause does not fall
	     through, then when the finally clause falls through it will
	     resume execution wherever the try clause was going.  So the
	     whole try-finally will only fall through if both the try
	     clause and the finally clause fall through.  */
	  data->cannot_fallthru = false;
	  lower_sequence (gimple_try_cleanup_ptr (stmt), data);
	  data->cannot_fallthru |= cannot_fallthru;
	  gsi_next (gsi);
	}
      return;

    case GIMPLE_EH_ELSE:
      {
	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
	lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
	lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
      }
      break;

    case GIMPLE_DEBUG:
      gcc_checking_assert (cfun->debug_nonbind_markers);
      /* We can't possibly have debug bind stmts before lowering, we
	 first emit them when entering SSA.  */
      gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
      /* Propagate fallthruness.  */
      /* If the function (e.g. from PCH) had debug stmts, but they're
	 disabled for this compilation, remove them.  */
      if (!MAY_HAVE_DEBUG_MARKER_STMTS)
	gsi_remove (gsi, true);
      else
	gsi_next (gsi);
      return;

    case GIMPLE_NOP:
    case GIMPLE_ASM:
    case GIMPLE_ASSIGN:
    case GIMPLE_PREDICT:
    case GIMPLE_LABEL:
    case GIMPLE_EH_MUST_NOT_THROW:
    case GIMPLE_OMP_FOR:
    case GIMPLE_OMP_SECTIONS:
    case GIMPLE_OMP_SECTIONS_SWITCH:
    case GIMPLE_OMP_SECTION:
    case GIMPLE_OMP_SINGLE:
    case GIMPLE_OMP_MASTER:
    case GIMPLE_OMP_TASKGROUP:
    case GIMPLE_OMP_ORDERED:
    case GIMPLE_OMP_CRITICAL:
    case GIMPLE_OMP_RETURN:
    case GIMPLE_OMP_ATOMIC_LOAD:
    case GIMPLE_OMP_ATOMIC_STORE:
    case GIMPLE_OMP_CONTINUE:
      break;

    case GIMPLE_CALL:
      {
	tree decl = gimple_call_fndecl (stmt);
	unsigned i;

	for (i = 0; i < gimple_call_num_args (stmt); i++)
	  {
	    tree arg = gimple_call_arg (stmt, i);
	    if (EXPR_P (arg))
	      TREE_SET_BLOCK (arg, data->block);
	  }

	if (decl
	    && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
	  {
	    if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
	      {
		lower_builtin_setjmp (gsi);
		data->cannot_fallthru = false;
		return;
	      }
	    else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
		     && flag_tree_bit_ccp
		     && gimple_builtin_call_types_compatible_p (stmt, decl))
	      {
		lower_builtin_posix_memalign (gsi);
		return;
	      }
	  }

	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
	  {
	    data->cannot_fallthru = true;
	    gsi_next (gsi);
	    return;
	  }
      }
      break;

    case GIMPLE_OMP_PARALLEL:
    case GIMPLE_OMP_TASK:
    case GIMPLE_OMP_TARGET:
    case GIMPLE_OMP_TEAMS:
    case GIMPLE_OMP_GRID_BODY:
      data->cannot_fallthru = false;
      lower_omp_directive (gsi, data);
      data->cannot_fallthru = false;
      return;

    case GIMPLE_TRANSACTION:
      lower_sequence (gimple_transaction_body_ptr (
			as_a <gtransaction *> (stmt)),
		      data);
      break;

    default:
      gcc_unreachable ();
    }

  data->cannot_fallthru = false;
  gsi_next (gsi);
}
Beispiel #21
0
tree
lambda_expr_this_capture (tree lambda, bool add_capture_p)
{
  tree result;

  tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);

  /* In unevaluated context this isn't an odr-use, so don't capture.  */
  if (cp_unevaluated_operand)
    add_capture_p = false;

  /* Try to default capture 'this' if we can.  */
  if (!this_capture
      && (!add_capture_p
          || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
    {
      tree lambda_stack = NULL_TREE;
      tree init = NULL_TREE;

      /* If we are in a lambda function, we can move out until we hit:
           1. a non-lambda function or NSDMI,
           2. a lambda function capturing 'this', or
           3. a non-default capturing lambda function.  */
      for (tree tlambda = lambda; ;)
	{
          lambda_stack = tree_cons (NULL_TREE,
                                    tlambda,
                                    lambda_stack);

	  if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
	      && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
	    {
	      /* In an NSDMI, we don't have a function to look up the decl in,
		 but the fake 'this' pointer that we're using for parsing is
		 in scope_chain.  */
	      init = scope_chain->x_current_class_ptr;
	      gcc_checking_assert
		(init && (TREE_TYPE (TREE_TYPE (init))
			  == current_nonlambda_class_type ()));
	      break;
	    }

	  tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
	  tree containing_function = decl_function_context (closure_decl);

	  if (containing_function == NULL_TREE)
	    /* We ran out of scopes; there's no 'this' to capture.  */
	    break;

	  if (!LAMBDA_FUNCTION_P (containing_function))
	    {
	      /* We found a non-lambda function.  */
	      if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
		/* First parameter is 'this'.  */
		init = DECL_ARGUMENTS (containing_function);
	      break;
	    }

	  tlambda
            = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));

          if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
	    {
	      /* An outer lambda has already captured 'this'.  */
	      init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
	      break;
	    }

	  if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
	    /* An outer lambda won't let us capture 'this'.  */
	    break;
	}

      if (init)
        {
          if (add_capture_p)
	    this_capture = add_default_capture (lambda_stack,
					        /*id=*/this_identifier,
					        init);
          else
	    this_capture = init;
        }
    }

  if (cp_unevaluated_operand)
    result = this_capture;
  else if (!this_capture)
    {
      if (add_capture_p)
	{
	  error ("%<this%> was not captured for this lambda function");
	  result = error_mark_node;
	}
      else
	result = NULL_TREE;
    }
  else
    {
      /* To make sure that current_class_ref is for the lambda.  */
      gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
		  == LAMBDA_EXPR_CLOSURE (lambda));

      result = this_capture;

      /* If 'this' is captured, each use of 'this' is transformed into an
	 access to the corresponding unnamed data member of the closure
	 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
	 ensures that the transformed expression is an rvalue. ] */
      result = rvalue (result);
    }

  return result;
}
Beispiel #22
0
tree
ubsan_create_data (const char *name, location_t loc, ...)
{
    va_list args;
    tree ret, t;
    tree fields[3];
    VEC(tree, gc) *saved_args = NULL;
    size_t i = 0;

    /* Firstly, create a pointer to type descriptor type.  */
    tree td_type = ubsan_type_descriptor_type ();
    TYPE_READONLY (td_type) = 1;
    td_type = build_pointer_type (td_type);

    /* Create the structure type.  */
    ret = make_node (RECORD_TYPE);
    if (loc != UNKNOWN_LOCATION)
    {
        fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
                                ubsan_source_location_type ());
        DECL_CONTEXT (fields[i]) = ret;
        i++;
    }

    va_start (args, loc);
    for (t = va_arg (args, tree); t != NULL_TREE;
            i++, t = va_arg (args, tree))
    {
        gcc_checking_assert (i < 3);
        /* Save the tree argument for later use.  */
        VEC_safe_push (tree, gc, saved_args, t);
        fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
                                td_type);
        DECL_CONTEXT (fields[i]) = ret;
        if (i)
            DECL_CHAIN (fields[i - 1]) = fields[i];
    }
    TYPE_FIELDS (ret) = fields[0];
    TYPE_NAME (ret) = get_identifier (name);
    layout_type (ret);
    va_end (args);

    /* Now, fill in the type.  */
    char tmp_name[32];
    static unsigned int ubsan_var_id_num;
    ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_data", ubsan_var_id_num++);
    tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name),
                           ret);
    TREE_STATIC (var) = 1;
    TREE_PUBLIC (var) = 0;
    DECL_ARTIFICIAL (var) = 1;
    DECL_IGNORED_P (var) = 1;
    DECL_EXTERNAL (var) = 0;

    VEC(constructor_elt, gc) *v;
    v = VEC_alloc (constructor_elt, gc, i);

    tree ctor = build_constructor (ret, v);

    /* If desirable, set the __ubsan_source_location element.  */
    if (loc != UNKNOWN_LOCATION)
        CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, ubsan_source_location (loc));

    size_t nelts = VEC_length (tree, saved_args);
    for (i = 0; i < nelts; i++)
    {
        t = VEC_index (tree, saved_args, i);
        CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
    }

    TREE_CONSTANT (ctor) = 1;
    TREE_STATIC (ctor) = 1;
    DECL_INITIAL (var) = ctor;
    rest_of_decl_compilation (var, 1, 0);

    return var;
}
Beispiel #23
0
tree
chrec_fold_multiply (tree type,
		     tree op0,
		     tree op1)
{
  if (automatically_generated_chrec_p (op0)
      || automatically_generated_chrec_p (op1))
    return chrec_fold_automatically_generated_operands (op0, op1);

  switch (TREE_CODE (op0))
    {
    case POLYNOMIAL_CHREC:
      gcc_checking_assert
	(!chrec_contains_symbols_defined_in_loop (op0, CHREC_VARIABLE (op0)));
      switch (TREE_CODE (op1))
	{
	case POLYNOMIAL_CHREC:
	  gcc_checking_assert
	    (!chrec_contains_symbols_defined_in_loop (op1,
						      CHREC_VARIABLE (op1)));
	  return chrec_fold_multiply_poly_poly (type, op0, op1);

	CASE_CONVERT:
	  if (tree_contains_chrecs (op1, NULL))
	    return chrec_dont_know;

	default:
	  if (integer_onep (op1))
	    return op0;
	  if (integer_zerop (op1))
	    return build_int_cst (type, 0);

	  return build_polynomial_chrec
	    (CHREC_VARIABLE (op0),
	     chrec_fold_multiply (type, CHREC_LEFT (op0), op1),
	     chrec_fold_multiply (type, CHREC_RIGHT (op0), op1));
	}

    CASE_CONVERT:
      if (tree_contains_chrecs (op0, NULL))
	return chrec_dont_know;

    default:
      if (integer_onep (op0))
	return op1;

      if (integer_zerop (op0))
    	return build_int_cst (type, 0);

      switch (TREE_CODE (op1))
	{
	case POLYNOMIAL_CHREC:
	  gcc_checking_assert
	    (!chrec_contains_symbols_defined_in_loop (op1,
						      CHREC_VARIABLE (op1)));
	  return build_polynomial_chrec
	    (CHREC_VARIABLE (op1),
	     chrec_fold_multiply (type, CHREC_LEFT (op1), op0),
	     chrec_fold_multiply (type, CHREC_RIGHT (op1), op0));

	CASE_CONVERT:
	  if (tree_contains_chrecs (op1, NULL))
	    return chrec_dont_know;

	default:
	  if (integer_onep (op1))
	    return op0;
	  if (integer_zerop (op1))
	    return build_int_cst (type, 0);
	  return fold_build2 (MULT_EXPR, type, op0, op1);
	}
    }
}
Beispiel #24
0
HOST_WIDE_INT
abs_hwi (HOST_WIDE_INT x)
{
  gcc_checking_assert (x != HOST_WIDE_INT_MIN);
  return x >= 0 ? x : -x;
}
Beispiel #25
0
static void
lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
{
  bool cannot_fallthru;
  gimple *stmt = gsi_stmt (*gsi);
  gimple_stmt_iterator i;

  /* We don't handle GIMPLE_TRY_FINALLY.  */
  gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);

  lower_sequence (gimple_try_eval_ptr (stmt), data);
  cannot_fallthru = data->cannot_fallthru;

  i = gsi_start (*gimple_try_cleanup_ptr (stmt));
  switch (gimple_code (gsi_stmt (i)))
    {
    case GIMPLE_CATCH:
      /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
	 catch expression and a body.  The whole try/catch may fall
	 through iff any of the catch bodies falls through.  */
      for (; !gsi_end_p (i); gsi_next (&i))
	{
	  data->cannot_fallthru = false;
	  lower_sequence (gimple_catch_handler_ptr (
                            as_a <gcatch *> (gsi_stmt (i))),
			  data);
	  if (!data->cannot_fallthru)
	    cannot_fallthru = false;
	}
      break;

    case GIMPLE_EH_FILTER:
      /* The exception filter expression only matters if there is an
	 exception.  If the exception does not match EH_FILTER_TYPES,
	 we will execute EH_FILTER_FAILURE, and we will fall through
	 if that falls through.  If the exception does match
	 EH_FILTER_TYPES, the stack unwinder will continue up the
	 stack, so we will not fall through.  We don't know whether we
	 will throw an exception which matches EH_FILTER_TYPES or not,
	 so we just ignore EH_FILTER_TYPES and assume that we might
	 throw an exception which doesn't match.  */
      data->cannot_fallthru = false;
      lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
      if (!data->cannot_fallthru)
	cannot_fallthru = false;
      break;

    case GIMPLE_DEBUG:
      gcc_checking_assert (gimple_debug_begin_stmt_p (stmt));
      break;

    default:
      /* This case represents statements to be executed when an
	 exception occurs.  Those statements are implicitly followed
	 by a GIMPLE_RESX to resume execution after the exception.  So
	 in this case the try/catch never falls through.  */
      data->cannot_fallthru = false;
      lower_sequence (gimple_try_cleanup_ptr (stmt), data);
      break;
    }

  data->cannot_fallthru = cannot_fallthru;
  gsi_next (gsi);
}
Beispiel #26
0
/* INSN is being moved to its place in the schedule, after LAST.  */
static void
begin_move_insn (rtx insn, rtx last)
{
  if (BLOCK_FOR_INSN (insn) == last_bb
      /* INSN is a jump in the last block, ...  */
      && control_flow_insn_p (insn)
      /* that is going to be moved over some instructions.  */
      && last != PREV_INSN (insn))
    {
      edge e;
      basic_block bb;

      /* An obscure special case, where we do have partially dead
	 instruction scheduled after last control flow instruction.
	 In this case we can create new basic block.  It is
	 always exactly one basic block last in the sequence.  */

      e = find_fallthru_edge (last_bb->succs);

      gcc_checking_assert (!e || !(e->flags & EDGE_COMPLEX));

      gcc_checking_assert (BLOCK_FOR_INSN (insn) == last_bb
			   && !IS_SPECULATION_CHECK_P (insn)
			   && BB_HEAD (last_bb) != insn
			   && BB_END (last_bb) == insn);

      {
	rtx x;

	x = NEXT_INSN (insn);
	if (e)
	  gcc_checking_assert (NOTE_P (x) || LABEL_P (x));
	else
	  gcc_checking_assert (BARRIER_P (x));
      }

      if (e)
	{
	  bb = split_edge (e);
	  gcc_assert (NOTE_INSN_BASIC_BLOCK_P (BB_END (bb)));
	}
      else
	{
	  /* Create an empty unreachable block after the INSN.  */
	  rtx next = NEXT_INSN (insn);
	  if (next && BARRIER_P (next))
	    next = NEXT_INSN (next);
	  bb = create_basic_block (next, NULL_RTX, last_bb);
	}

      /* split_edge () creates BB before E->DEST.  Keep in mind, that
	 this operation extends scheduling region till the end of BB.
	 Hence, we need to shift NEXT_TAIL, so haifa-sched.c won't go out
	 of the scheduling region.  */
      current_sched_info->next_tail = NEXT_INSN (BB_END (bb));
      gcc_assert (current_sched_info->next_tail);

      /* Append new basic block to the end of the ebb.  */
      sched_init_only_bb (bb, last_bb);
      gcc_assert (last_bb == bb);
    }
}