Example #1
0
tree
gimple_simplify (enum built_in_function fn, tree type,
		 tree arg0, tree arg1, tree arg2,
		 gimple_seq *seq, tree (*valueize)(tree))
{
  if (constant_for_folding (arg0)
      && constant_for_folding (arg1)
      && constant_for_folding (arg2))
    {
      tree decl = builtin_decl_implicit (fn);
      if (decl)
	{
	  tree args[3];
	  args[0] = arg0;
	  args[1] = arg1;
	  args[2] = arg2;
	  tree res = fold_builtin_n (UNKNOWN_LOCATION, decl, args, 3, false);
	  if (res)
	    {
	      /* fold_builtin_n wraps the result inside a NOP_EXPR.  */
	      STRIP_NOPS (res);
	      res = fold_convert (type, res);
	      if (CONSTANT_CLASS_P (res))
		return res;
	    }
	}
    }

  code_helper rcode;
  tree ops[3] = {};
  if (!gimple_simplify (&rcode, ops, seq, valueize,
			fn, type, arg0, arg1, arg2))
    return NULL_TREE;
  return maybe_push_res_to_seq (rcode, type, ops, seq);
}
Example #2
0
int
expand_catch_class (treetreehash_entry **entry, int)
{
  struct treetreehash_entry *ite = *entry;
  tree addr = TREE_VALUE ((tree)ite->value);
  tree decl;
  STRIP_NOPS (addr);
  decl = TREE_OPERAND (addr, 0);
  rest_of_decl_compilation (decl, global_bindings_p (), 0);
  return true;
}
Example #3
0
static bool
gimple_resimplify1 (gimple_seq *seq,
		    code_helper *res_code, tree type, tree *res_ops,
		    tree (*valueize)(tree))
{
  if (constant_for_folding (res_ops[0]))
    {
      tree tem = NULL_TREE;
      if (res_code->is_tree_code ())
	tem = const_unop (*res_code, type, res_ops[0]);
      else
	{
	  tree decl = builtin_decl_implicit (*res_code);
	  if (decl)
	    {
	      tem = fold_builtin_n (UNKNOWN_LOCATION, decl, res_ops, 1, false);
	      if (tem)
		{
		  /* fold_builtin_n wraps the result inside a NOP_EXPR.  */
		  STRIP_NOPS (tem);
		  tem = fold_convert (type, tem);
		}
	    }
	}
      if (tem != NULL_TREE
	  && CONSTANT_CLASS_P (tem))
	{
	  res_ops[0] = tem;
	  res_ops[1] = NULL_TREE;
	  res_ops[2] = NULL_TREE;
	  *res_code = TREE_CODE (res_ops[0]);
	  return true;
	}
    }

  code_helper res_code2;
  tree res_ops2[3] = {};
  if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
		       *res_code, type, res_ops[0]))
    {
      *res_code = res_code2;
      res_ops[0] = res_ops2[0];
      res_ops[1] = res_ops2[1];
      res_ops[2] = res_ops2[2];
      return true;
    }

  return false;
}
Example #4
0
File: symtab.c Project: lv88h/gcc
ipa_ref *
symtab_node::maybe_create_reference (tree val, enum ipa_ref_use use_type,
				     gimple stmt)
{
  STRIP_NOPS (val);
  if (TREE_CODE (val) != ADDR_EXPR)
    return NULL;
  val = get_base_var (val);
  if (val && (TREE_CODE (val) == FUNCTION_DECL
	       || TREE_CODE (val) == VAR_DECL))
    {
      symtab_node *referred = symtab_node::get (val);
      gcc_checking_assert (referred);
      return create_reference (referred, use_type, stmt);
    }
  return NULL;
}
Example #5
0
struct ipa_ref *
ipa_maybe_record_reference (symtab_node *referring_node, tree val,
			    enum ipa_ref_use use_type, gimple stmt)
{
  STRIP_NOPS (val);
  if (TREE_CODE (val) != ADDR_EXPR)
    return NULL;
  val = get_base_var (val);
  if (val && (TREE_CODE (val) == FUNCTION_DECL
	       || TREE_CODE (val) == VAR_DECL))
    {
      symtab_node *referred = symtab_get_node (val);
      gcc_checking_assert (referred);
      return ipa_record_reference (referring_node, referred,
				   use_type, stmt);
    }
  return NULL;
}
Example #6
0
static tree
check_noexcept_r (tree *tp, int * /*walk_subtrees*/, void * /*data*/)
{
  tree t = *tp;
  enum tree_code code = TREE_CODE (t);
  if ((code == CALL_EXPR && CALL_EXPR_FN (t))
      || code == AGGR_INIT_EXPR)
    {
      /* We can only use the exception specification of the called function
	 for determining the value of a noexcept expression; we can't use
	 TREE_NOTHROW, as it might have a different value in another
	 translation unit, creating ODR problems.

         We could use TREE_NOTHROW (t) for !TREE_PUBLIC fns, though... */
      tree fn = (code == AGGR_INIT_EXPR
		 ? AGGR_INIT_EXPR_FN (t) : CALL_EXPR_FN (t));
      tree type = TREE_TYPE (fn);
      gcc_assert (POINTER_TYPE_P (type));
      type = TREE_TYPE (type);

      STRIP_NOPS (fn);
      if (TREE_CODE (fn) == ADDR_EXPR)
	fn = TREE_OPERAND (fn, 0);
      if (TREE_CODE (fn) == FUNCTION_DECL)
	{
	  /* We do use TREE_NOTHROW for ABI internals like __dynamic_cast,
	     and for C library functions known not to throw.  */
	  if (DECL_EXTERN_C_P (fn)
	      && (DECL_ARTIFICIAL (fn)
		  || nothrow_libfn_p (fn)))
	    return TREE_NOTHROW (fn) ? NULL_TREE : fn;
	  /* A call to a constexpr function is noexcept if the call
	     is a constant expression.  */
	  if (DECL_DECLARED_CONSTEXPR_P (fn)
	      && is_sub_constant_expr (t))
	    return NULL_TREE;
	}
      if (!TYPE_NOTHROW_P (type))
	return fn;
    }

  return NULL_TREE;
}
Example #7
0
static bool
wrapper_parm_cb (const void *key0, void **val0, void *data)
{
  struct wrapper_data *wd = (struct wrapper_data *) data;
  tree arg = * (tree *)&key0;
  tree val = (tree)*val0;
  tree parm;

  if (val == error_mark_node || val == arg)
    return true;

  if (TREE_CODE (val) == PAREN_EXPR)
    {
      /* We should not reach here with a register receiver.
	 We may see a register variable modified in the
	 argument list.  Because register variables are
	 worker-local we don't need to work hard to support
	 them in code that spawns.  */
      if ((TREE_CODE (arg) == VAR_DECL) && DECL_HARD_REGISTER (arg))
	{
	  error_at (EXPR_LOCATION (arg),
		    "explicit register variable %qD may not be modified in "
		    "spawn", arg);
	  arg = null_pointer_node;
	}
      else
	arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (arg)), arg);
	
      val = TREE_OPERAND (val, 0);
      *val0 = val;
      gcc_assert (TREE_CODE (val) == INDIRECT_REF);
      parm = TREE_OPERAND (val, 0);
      STRIP_NOPS (parm);
    }
  else
    parm = val;
  TREE_CHAIN (parm) = wd->parms;
  wd->parms = parm;
  wd->argtypes = tree_cons (NULL_TREE, TREE_TYPE (parm), wd->argtypes); 
  wd->arglist = tree_cons (NULL_TREE, arg, wd->arglist); 
  return true;
}
Example #8
0
static void
record_type_list (cgraph_node *node, tree list)
{
  for (; list; list = TREE_CHAIN (list))
    {
      tree type = TREE_VALUE (list);
      
      if (TYPE_P (type))
	type = lookup_type_for_runtime (type);
      STRIP_NOPS (type);
      if (TREE_CODE (type) == ADDR_EXPR)
	{
	  type = TREE_OPERAND (type, 0);
	  if (TREE_CODE (type) == VAR_DECL)
	    {
	      varpool_node *vnode = varpool_node::get_create (type);
	      node->create_reference (vnode, IPA_REF_ADDR);
	    }
	}
    }
}
Example #9
0
static void
record_type_list (struct cgraph_node *node, tree list)
{
  for (; list; list = TREE_CHAIN (list))
    {
      tree type = TREE_VALUE (list);
      
      if (TYPE_P (type))
	type = lookup_type_for_runtime (type);
      STRIP_NOPS (type);
      if (TREE_CODE (type) == ADDR_EXPR)
	{
	  type = TREE_OPERAND (type, 0);
	  if (TREE_CODE (type) == VAR_DECL)
	    {
	      varpool_node *vnode = varpool_node_for_decl (type);
	      ipa_record_reference (node,
				    vnode,
				    IPA_REF_ADDR, NULL);
	    }
	}
    }
}
Example #10
0
static bool
gimple_resimplify3 (gimple_seq *seq,
		    code_helper *res_code, tree type, tree *res_ops,
		    tree (*valueize)(tree))
{
  if (constant_for_folding (res_ops[0]) && constant_for_folding (res_ops[1])
      && constant_for_folding (res_ops[2]))
    {
      tree tem = NULL_TREE;
      if (res_code->is_tree_code ())
	tem = fold_ternary/*_to_constant*/ (*res_code, type, res_ops[0],
					    res_ops[1], res_ops[2]);
      else
	{
	  tree decl = builtin_decl_implicit (*res_code);
	  if (decl)
	    {
	      tem = fold_builtin_n (UNKNOWN_LOCATION, decl, res_ops, 3, false);
	      if (tem)
		{
		  /* fold_builtin_n wraps the result inside a NOP_EXPR.  */
		  STRIP_NOPS (tem);
		  tem = fold_convert (type, tem);
		}
	    }
	}
      if (tem != NULL_TREE
	  && CONSTANT_CLASS_P (tem))
	{
	  res_ops[0] = tem;
	  res_ops[1] = NULL_TREE;
	  res_ops[2] = NULL_TREE;
	  *res_code = TREE_CODE (res_ops[0]);
	  return true;
	}
    }

  /* Canonicalize operand order.  */
  bool canonicalized = false;
  if (res_code->is_tree_code ()
      && commutative_ternary_tree_code (*res_code)
      && tree_swap_operands_p (res_ops[0], res_ops[1], false))
    {
      tree tem = res_ops[0];
      res_ops[0] = res_ops[1];
      res_ops[1] = tem;
      canonicalized = true;
    }

  code_helper res_code2;
  tree res_ops2[3] = {};
  if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
		       *res_code, type,
		       res_ops[0], res_ops[1], res_ops[2]))
    {
      *res_code = res_code2;
      res_ops[0] = res_ops2[0];
      res_ops[1] = res_ops2[1];
      res_ops[2] = res_ops2[2];
      return true;
    }

  return canonicalized;
}
void
tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
{
  aff_tree tmp;
  enum tree_code code;
  tree cst, core, toffset;
  HOST_WIDE_INT bitpos, bitsize;
  enum machine_mode mode;
  int unsignedp, volatilep;

  STRIP_NOPS (expr);

  code = TREE_CODE (expr);
  switch (code)
    {
    case INTEGER_CST:
      aff_combination_const (comb, type, tree_to_double_int (expr));
      return;

    case POINTER_PLUS_EXPR:
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
      aff_combination_add (comb, &tmp);
      return;

    case PLUS_EXPR:
    case MINUS_EXPR:
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
      if (code == MINUS_EXPR)
	aff_combination_scale (&tmp, double_int_minus_one);
      aff_combination_add (comb, &tmp);
      return;

    case MULT_EXPR:
      cst = TREE_OPERAND (expr, 1);
      if (TREE_CODE (cst) != INTEGER_CST)
	break;
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      aff_combination_scale (comb, tree_to_double_int (cst));
      return;

    case NEGATE_EXPR:
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      aff_combination_scale (comb, double_int_minus_one);
      return;

    case BIT_NOT_EXPR:
      /* ~x = -x - 1 */
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      aff_combination_scale (comb, double_int_minus_one);
      aff_combination_add_cst (comb, double_int_minus_one);
      return;

    case ADDR_EXPR:
      /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR.  */
      if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
	{
	  expr = TREE_OPERAND (expr, 0);
	  tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
	  tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
	  aff_combination_add (comb, &tmp);
	  return;
	}
      core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
				  &toffset, &mode, &unsignedp, &volatilep,
				  false);
      if (bitpos % BITS_PER_UNIT != 0)
	break;
      aff_combination_const (comb, type,
			     double_int::from_uhwi (bitpos / BITS_PER_UNIT));
      core = build_fold_addr_expr (core);
      if (TREE_CODE (core) == ADDR_EXPR)
	aff_combination_add_elt (comb, core, double_int_one);
      else
	{
	  tree_to_aff_combination (core, type, &tmp);
	  aff_combination_add (comb, &tmp);
	}
      if (toffset)
	{
	  tree_to_aff_combination (toffset, type, &tmp);
	  aff_combination_add (comb, &tmp);
	}
      return;

    case MEM_REF:
      if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
	tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
				 type, comb);
      else if (integer_zerop (TREE_OPERAND (expr, 1)))
	{
	  aff_combination_elt (comb, type, expr);
	  return;
	}
      else
	aff_combination_elt (comb, type,
			     build2 (MEM_REF, TREE_TYPE (expr),
				     TREE_OPERAND (expr, 0),
				     build_int_cst
				      (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
      tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
      aff_combination_add (comb, &tmp);
      return;

    default:
      break;
    }

  aff_combination_elt (comb, type, expr);
}
Example #12
0
void
tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
{
  aff_tree tmp;
  enum tree_code code;
  tree cst, core, toffset;
  poly_int64 bitpos, bitsize, bytepos;
  machine_mode mode;
  int unsignedp, reversep, volatilep;

  STRIP_NOPS (expr);

  code = TREE_CODE (expr);
  switch (code)
    {
    case POINTER_PLUS_EXPR:
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
      aff_combination_add (comb, &tmp);
      return;

    case PLUS_EXPR:
    case MINUS_EXPR:
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
      if (code == MINUS_EXPR)
	aff_combination_scale (&tmp, -1);
      aff_combination_add (comb, &tmp);
      return;

    case MULT_EXPR:
      cst = TREE_OPERAND (expr, 1);
      if (TREE_CODE (cst) != INTEGER_CST)
	break;
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      aff_combination_scale (comb, wi::to_widest (cst));
      return;

    case NEGATE_EXPR:
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      aff_combination_scale (comb, -1);
      return;

    case BIT_NOT_EXPR:
      /* ~x = -x - 1 */
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      aff_combination_scale (comb, -1);
      aff_combination_add_cst (comb, -1);
      return;

    case ADDR_EXPR:
      /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR.  */
      if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
	{
	  expr = TREE_OPERAND (expr, 0);
	  tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
	  tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
	  aff_combination_add (comb, &tmp);
	  return;
	}
      core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
				  &toffset, &mode, &unsignedp, &reversep,
				  &volatilep);
      if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
	break;
      aff_combination_const (comb, type, bytepos);
      if (TREE_CODE (core) == MEM_REF)
	{
	  tree mem_offset = TREE_OPERAND (core, 1);
	  aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
	  core = TREE_OPERAND (core, 0);
	}
      else
	core = build_fold_addr_expr (core);

      if (TREE_CODE (core) == ADDR_EXPR)
	aff_combination_add_elt (comb, core, 1);
      else
	{
	  tree_to_aff_combination (core, type, &tmp);
	  aff_combination_add (comb, &tmp);
	}
      if (toffset)
	{
	  tree_to_aff_combination (toffset, type, &tmp);
	  aff_combination_add (comb, &tmp);
	}
      return;

    case MEM_REF:
      if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
	tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
				 type, comb);
      else if (integer_zerop (TREE_OPERAND (expr, 1)))
	{
	  aff_combination_elt (comb, type, expr);
	  return;
	}
      else
	aff_combination_elt (comb, type,
			     build2 (MEM_REF, TREE_TYPE (expr),
				     TREE_OPERAND (expr, 0),
				     build_int_cst
				      (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
      tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
      aff_combination_add (comb, &tmp);
      return;

    CASE_CONVERT:
      {
	tree otype = TREE_TYPE (expr);
	tree inner = TREE_OPERAND (expr, 0);
	tree itype = TREE_TYPE (inner);
	enum tree_code icode = TREE_CODE (inner);

	/* In principle this is a valid folding, but it isn't necessarily
	   an optimization, so do it here and not in fold_unary.  */
	if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR)
	    && TREE_CODE (itype) == INTEGER_TYPE
	    && TREE_CODE (otype) == INTEGER_TYPE
	    && TYPE_PRECISION (otype) > TYPE_PRECISION (itype))
	  {
	    tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1);

	    /* If inner type has undefined overflow behavior, fold conversion
	       for below two cases:
		 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
		 (T1)(X + X)     -> (T1)X + (T1)X.  */
	    if (TYPE_OVERFLOW_UNDEFINED (itype)
		&& (TREE_CODE (op1) == INTEGER_CST
		    || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0))))
	      {
		op0 = fold_convert (otype, op0);
		op1 = fold_convert (otype, op1);
		expr = fold_build2 (icode, otype, op0, op1);
		tree_to_aff_combination (expr, type, comb);
		return;
	      }
	    wide_int minv, maxv;
	    /* If inner type has wrapping overflow behavior, fold conversion
	       for below case:
		 (T1)(X - CST) -> (T1)X - (T1)CST
	       if X - CST doesn't overflow by range information.  Also handle
	       (T1)(X + CST) as (T1)(X - (-CST)).  */
	    if (TYPE_UNSIGNED (itype)
		&& TYPE_OVERFLOW_WRAPS (itype)
		&& TREE_CODE (op0) == SSA_NAME
		&& TREE_CODE (op1) == INTEGER_CST
		&& icode != MULT_EXPR
		&& get_range_info (op0, &minv, &maxv) == VR_RANGE)
	      {
		if (icode == PLUS_EXPR)
		  op1 = wide_int_to_tree (itype, -wi::to_wide (op1));
		if (wi::geu_p (minv, wi::to_wide (op1)))
		  {
		    op0 = fold_convert (otype, op0);
		    op1 = fold_convert (otype, op1);
		    expr = fold_build2 (MINUS_EXPR, otype, op0, op1);
		    tree_to_aff_combination (expr, type, comb);
		    return;
		  }
	      }
	  }
      }
      break;

    default:
      {
	if (poly_int_tree_p (expr))
	  {
	    aff_combination_const (comb, type, wi::to_poly_widest (expr));
	    return;
	  }
	break;
      }
    }

  aff_combination_elt (comb, type, expr);
}
Example #13
0
static tree
expand_sec_reduce_builtin (tree an_builtin_fn, tree *new_var)
{
  tree new_var_type = NULL_TREE, func_parm, new_yes_expr, new_no_expr;
  tree array_ind_value = NULL_TREE, new_no_ind, new_yes_ind, new_no_list;
  tree new_yes_list, new_cond_expr, new_expr = NULL_TREE; 
  vec<tree, va_gc> *array_list = NULL, *array_operand = NULL;
  size_t list_size = 0, rank = 0, ii = 0;
  tree  body, an_init, loop_with_init = alloc_stmt_list ();
  tree array_op0, comp_node = NULL_TREE;
  tree call_fn = NULL_TREE, identity_value = NULL_TREE;
  tree init = NULL_TREE, cond_init = NULL_TREE;
  enum tree_code code = NOP_EXPR;
  location_t location = UNKNOWN_LOCATION;
  vec<vec<an_parts> > an_info = vNULL;
  auto_vec<an_loop_parts> an_loop_info;
  enum built_in_function an_type =
    is_cilkplus_reduce_builtin (CALL_EXPR_FN (an_builtin_fn));
  vec <tree, va_gc> *func_args;
  
  if (an_type == BUILT_IN_NONE)
    return NULL_TREE;

  if (an_type != BUILT_IN_CILKPLUS_SEC_REDUCE
      && an_type != BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING)
    func_parm = CALL_EXPR_ARG (an_builtin_fn, 0);
  else
    {
      call_fn = CALL_EXPR_ARG (an_builtin_fn, 2);

      /* We need to do this because we are "faking" the builtin function types,
	 so the compiler does a bunch of typecasts and this will get rid of
	 all that!  */
      STRIP_NOPS (call_fn);
      if (TREE_CODE (call_fn) != OVERLOAD
	  && TREE_CODE (call_fn) != FUNCTION_DECL)
	call_fn = TREE_OPERAND (call_fn, 0);
      identity_value = CALL_EXPR_ARG (an_builtin_fn, 0);
      func_parm = CALL_EXPR_ARG (an_builtin_fn, 1);
      STRIP_NOPS (identity_value);
    }
  STRIP_NOPS (func_parm);
  
  location = EXPR_LOCATION (an_builtin_fn);
  
  /* Note about using find_rank (): If find_rank returns false, then it must
     have already reported an error, thus we just return an error_mark_node
     without any doing any error emission.  */  
  if (!find_rank (location, an_builtin_fn, an_builtin_fn, true, &rank))
      return error_mark_node;
  if (rank == 0)
    {
      error_at (location, "Invalid builtin arguments");
      return error_mark_node;
    }
  else if (rank > 1 
	   && (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND
	       || an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND))
    { 
      error_at (location, "__sec_reduce_min_ind or __sec_reduce_max_ind cannot "
		"have arrays with dimension greater than 1");
      return error_mark_node;
    }
  
  extract_array_notation_exprs (func_parm, true, &array_list);
  list_size = vec_safe_length (array_list);
  switch (an_type)
    {
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ADD:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUL:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN:
      new_var_type = TREE_TYPE ((*array_list)[0]);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_NONZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_NONZERO:
      new_var_type = boolean_type_node;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND:
      new_var_type = size_type_node;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE:
      if (call_fn && identity_value)
	new_var_type = TREE_TYPE ((*array_list)[0]);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING:
      new_var_type = NULL_TREE;
      break;
    default:
      gcc_unreachable ();
    }
    
  if (new_var_type && TREE_CODE (new_var_type) == ARRAY_TYPE)
    new_var_type = TREE_TYPE (new_var_type);
  an_loop_info.safe_grow_cleared (rank);

  an_init = push_stmt_list ();

  /* Assign the array notation components to variable so that they can satisfy
     the exec-once rule.  */
  for (ii = 0; ii < list_size; ii++)
    if (TREE_CODE ((*array_list)[ii]) == ARRAY_NOTATION_REF)
      {
	tree anode = (*array_list)[ii];
	make_triplet_val_inv (&ARRAY_NOTATION_START (anode));
	make_triplet_val_inv (&ARRAY_NOTATION_LENGTH (anode));
	make_triplet_val_inv (&ARRAY_NOTATION_STRIDE (anode));
      }
  cilkplus_extract_an_triplets (array_list, list_size, rank, &an_info);
  for (ii = 0; ii < rank; ii++)
    {
      tree typ = ptrdiff_type_node;

      /* In this place, we are using get_temp_regvar instead of 
	 create_temporary_var if an_type is SEC_REDUCE_MAX/MIN_IND because
	 the array_ind_value depends on this value being initalized to 0.  */
      if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND
	  || an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND) 
	an_loop_info[ii].var = get_temp_regvar (typ, build_zero_cst (typ));
      else
	{
	  an_loop_info[ii].var = create_temporary_var (typ);
	  add_decl_expr (an_loop_info[ii].var);
	}
      an_loop_info[ii].ind_init = 
	build_x_modify_expr (location, an_loop_info[ii].var, INIT_EXPR,
			     build_zero_cst (typ), tf_warning_or_error);
    }
  array_operand = create_array_refs (location, an_info, an_loop_info,
				      list_size, rank);
  replace_array_notations (&func_parm, true, array_list, array_operand);
  
  if (!TREE_TYPE (func_parm))      
    TREE_TYPE (func_parm) = TREE_TYPE ((*array_list)[0]);
  
  create_cmp_incr (location, &an_loop_info, rank, an_info, tf_warning_or_error);
  if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND
      || an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND) 
    array_ind_value = get_temp_regvar (TREE_TYPE (func_parm), func_parm);

  array_op0 = (*array_operand)[0];
  if (INDIRECT_REF_P (array_op0))
    array_op0 = TREE_OPERAND (array_op0, 0);
  switch (an_type)
    {
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ADD:
      code = PLUS_EXPR;
      init = build_zero_cst (new_var_type);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUL:
      code = MULT_EXPR;
      init = build_one_cst (new_var_type);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_NONZERO:
      code = ((an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO) ? EQ_EXPR
	: NE_EXPR);
      init = build_zero_cst (new_var_type);
      cond_init = build_one_cst (new_var_type);
      comp_node = build_zero_cst (TREE_TYPE (func_parm));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_NONZERO:
      code = ((an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO) ? NE_EXPR
	: EQ_EXPR);
      init = build_one_cst (new_var_type);
      cond_init = build_zero_cst (new_var_type);
      comp_node = build_zero_cst (TREE_TYPE (func_parm));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX:
      code = MAX_EXPR;
      init = (TYPE_MIN_VALUE (new_var_type) ? TYPE_MIN_VALUE (new_var_type)
	: func_parm);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN:
      code = MIN_EXPR;
      init = (TYPE_MAX_VALUE (new_var_type) ? TYPE_MAX_VALUE (new_var_type)
	: func_parm);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND:
      code = (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND ? LE_EXPR
	: GE_EXPR);
      init = an_loop_info[0].var;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE:
      init = identity_value;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING:
      init = NULL_TREE;
      break;
    default:
      gcc_unreachable ();
    }

  if (an_type != BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING)
    *new_var = get_temp_regvar (new_var_type, init);
  else
    *new_var = NULL_TREE;

  switch (an_type)
    {
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ADD:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUL:      
      new_expr = build_x_modify_expr (location, *new_var, code, func_parm,
				      tf_warning_or_error);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_NONZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_NONZERO:
      /* In all these cases, assume the false case is true and as soon as
	 we find a true case,  set the true flag on and latch it in.  */
      new_yes_expr = build_x_modify_expr (location, *new_var, NOP_EXPR,
					  cond_init, tf_warning_or_error);
      new_no_expr = build_x_modify_expr (location, *new_var, NOP_EXPR,
					 *new_var, tf_warning_or_error);
      new_cond_expr = build_x_binary_op
	(location, code, func_parm, TREE_CODE (func_parm), comp_node,
	 TREE_CODE (comp_node), NULL, tf_warning_or_error);
      new_expr = build_x_conditional_expr (location, new_cond_expr,
					   new_yes_expr, new_no_expr,
					   tf_warning_or_error);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN:
      new_cond_expr = build_x_binary_op
	(location, code, *new_var, TREE_CODE (*new_var), func_parm,
	 TREE_CODE (func_parm), NULL, tf_warning_or_error);
      new_expr = build_x_modify_expr (location, *new_var, NOP_EXPR, func_parm,
				      tf_warning_or_error);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND:
      new_yes_expr = build_x_modify_expr (location, array_ind_value, NOP_EXPR,
					  func_parm, tf_warning_or_error);
      new_no_expr = build_x_modify_expr (location, array_ind_value, NOP_EXPR,
					 array_ind_value, tf_warning_or_error);
      if (list_size > 1)
	new_yes_ind = build_x_modify_expr (location, *new_var, NOP_EXPR,
					   an_loop_info[0].var,
					   tf_warning_or_error);
      else
	new_yes_ind = build_x_modify_expr (location, *new_var, NOP_EXPR,
					   TREE_OPERAND (array_op0, 1),
					   tf_warning_or_error);
      new_no_ind = build_x_modify_expr (location, *new_var, NOP_EXPR, *new_var,
					tf_warning_or_error);
      new_yes_list = alloc_stmt_list ();
      append_to_statement_list (new_yes_ind, &new_yes_list);
      append_to_statement_list (new_yes_expr, &new_yes_list);

      new_no_list = alloc_stmt_list ();
      append_to_statement_list (new_no_ind, &new_no_list);
      append_to_statement_list (new_no_expr, &new_no_list);

      new_cond_expr = build_x_binary_op (location, code, array_ind_value,
					 TREE_CODE (array_ind_value), func_parm,
					 TREE_CODE (func_parm), NULL,
					 tf_warning_or_error);
      new_expr = build_x_conditional_expr (location, new_cond_expr,
					   new_yes_list, new_no_list,
					   tf_warning_or_error);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING:
      func_args = make_tree_vector ();
      if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE)
	vec_safe_push (func_args, *new_var);
      else
	vec_safe_push (func_args, identity_value);
      vec_safe_push (func_args, func_parm);

      new_expr = finish_call_expr (call_fn, &func_args, false, true,
				   tf_warning_or_error);
      if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE)
	new_expr = build_x_modify_expr (location, *new_var, NOP_EXPR, new_expr,
					tf_warning_or_error);
      release_tree_vector (func_args);
      break;
    default:
      gcc_unreachable ();
    }
  an_init = pop_stmt_list (an_init);
  append_to_statement_list (an_init, &loop_with_init);
  body = new_expr;

  for (ii = 0; ii < rank; ii++)
    {
      tree new_loop = push_stmt_list ();
      create_an_loop (an_loop_info[ii].ind_init, an_loop_info[ii].cmp,
		      an_loop_info[ii].incr, body);
      body = pop_stmt_list (new_loop);
    }
  append_to_statement_list (body, &loop_with_init);

  release_vec_vec (an_info);

  return loop_with_init;
}
Example #14
0
/* User-deallocate; we emit the code directly from the front-end, and the
   logic is the same as the previous library function:

    void
    deallocate (void *pointer, GFC_INTEGER_4 * stat)
    {
      if (!pointer)
	{
	  if (stat)
	    *stat = 1;
	  else
	    runtime_error ("Attempt to DEALLOCATE unallocated memory.");
	}
      else
	{
	  free (pointer);
	  if (stat)
	    *stat = 0;
	}
    }

   In this front-end version, status doesn't have to be GFC_INTEGER_4.
   Moreover, if CAN_FAIL is true, then we will not emit a runtime error,
   even when no status variable is passed to us (this is used for
   unconditional deallocation generated by the front-end at end of
   each procedure).
   
   If a runtime-message is possible, `expr' must point to the original
   expression being deallocated for its locus and variable name.

   For coarrays, "pointer" must be the array descriptor and not its
   "data" component.  */
tree
gfc_deallocate_with_status (tree pointer, tree status, tree errmsg,
			    tree errlen, tree label_finish,
			    bool can_fail, gfc_expr* expr, bool coarray)
{
  stmtblock_t null, non_null;
  tree cond, tmp, error;
  tree status_type = NULL_TREE;
  tree caf_decl = NULL_TREE;

  if (coarray)
    {
      gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (pointer)));
      caf_decl = pointer;
      pointer = gfc_conv_descriptor_data_get (caf_decl);
      STRIP_NOPS (pointer);
    }

  cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, pointer,
			  build_int_cst (TREE_TYPE (pointer), 0));

  /* When POINTER is NULL, we set STATUS to 1 if it's present, otherwise
     we emit a runtime error.  */
  gfc_start_block (&null);
  if (!can_fail)
    {
      tree varname;

      gcc_assert (expr && expr->expr_type == EXPR_VARIABLE && expr->symtree);

      varname = gfc_build_cstring_const (expr->symtree->name);
      varname = gfc_build_addr_expr (pchar_type_node, varname);

      error = gfc_trans_runtime_error (true, &expr->where,
				       "Attempt to DEALLOCATE unallocated '%s'",
				       varname);
    }
  else
    error = build_empty_stmt (input_location);

  if (status != NULL_TREE && !integer_zerop (status))
    {
      tree cond2;

      status_type = TREE_TYPE (TREE_TYPE (status));
      cond2 = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
			       status, build_int_cst (TREE_TYPE (status), 0));
      tmp = fold_build2_loc (input_location, MODIFY_EXPR, status_type,
			     fold_build1_loc (input_location, INDIRECT_REF,
					      status_type, status),
			     build_int_cst (status_type, 1));
      error = fold_build3_loc (input_location, COND_EXPR, void_type_node,
			       cond2, tmp, error);
    }

  gfc_add_expr_to_block (&null, error);

  /* When POINTER is not NULL, we free it.  */
  gfc_start_block (&non_null);
  if (!coarray || gfc_option.coarray != GFC_FCOARRAY_LIB)
    {
      tmp = build_call_expr_loc (input_location,
				 builtin_decl_explicit (BUILT_IN_FREE), 1,
				 fold_convert (pvoid_type_node, pointer));
      gfc_add_expr_to_block (&non_null, tmp);

      if (status != NULL_TREE && !integer_zerop (status))
	{
	  /* We set STATUS to zero if it is present.  */
	  tree status_type = TREE_TYPE (TREE_TYPE (status));
	  tree cond2;

	  cond2 = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
				   status,
				   build_int_cst (TREE_TYPE (status), 0));
	  tmp = fold_build2_loc (input_location, MODIFY_EXPR, status_type,
				 fold_build1_loc (input_location, INDIRECT_REF,
						  status_type, status),
				 build_int_cst (status_type, 0));
	  tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
				 gfc_unlikely (cond2), tmp,
				 build_empty_stmt (input_location));
	  gfc_add_expr_to_block (&non_null, tmp);
	}
    }
  else
    {
      tree caf_type, token, cond2;
      tree pstat = null_pointer_node;

      if (errmsg == NULL_TREE)
	{
	  gcc_assert (errlen == NULL_TREE);
	  errmsg = null_pointer_node;
	  errlen = build_zero_cst (integer_type_node);
	}
      else
	{
	  gcc_assert (errlen != NULL_TREE);
	  if (!POINTER_TYPE_P (TREE_TYPE (errmsg)))
	    errmsg = gfc_build_addr_expr (NULL_TREE, errmsg);
	}

      caf_type = TREE_TYPE (caf_decl);

      if (status != NULL_TREE && !integer_zerop (status))
	{
	  gcc_assert (status_type == integer_type_node);
	  pstat = status;
	}

      if (GFC_DESCRIPTOR_TYPE_P (caf_type)
	  && GFC_TYPE_ARRAY_AKIND (caf_type) == GFC_ARRAY_ALLOCATABLE)
	token = gfc_conv_descriptor_token (caf_decl);
      else if (DECL_LANG_SPECIFIC (caf_decl)
	       && GFC_DECL_TOKEN (caf_decl) != NULL_TREE)
	token = GFC_DECL_TOKEN (caf_decl);
      else
	{
	  gcc_assert (GFC_ARRAY_TYPE_P (caf_type)
		      && GFC_TYPE_ARRAY_CAF_TOKEN (caf_type) != NULL_TREE);
	  token = GFC_TYPE_ARRAY_CAF_TOKEN (caf_type);
	}

      token = gfc_build_addr_expr  (NULL_TREE, token);
      tmp = build_call_expr_loc (input_location,
	     gfor_fndecl_caf_deregister, 4,
	     token, pstat, errmsg, errlen);
      gfc_add_expr_to_block (&non_null, tmp);

      if (status != NULL_TREE)
	{
	  tree stat = build_fold_indirect_ref_loc (input_location, status);

	  TREE_USED (label_finish) = 1;
	  tmp = build1_v (GOTO_EXPR, label_finish);
	  cond2 = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
				   stat, build_zero_cst (TREE_TYPE (stat)));
	  tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
        			 gfc_unlikely (cond2), tmp,
				 build_empty_stmt (input_location));
	  gfc_add_expr_to_block (&non_null, tmp);
	}
    }

  return fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
			  gfc_finish_block (&null),
			  gfc_finish_block (&non_null));
}
Example #15
0
static tree
get_variable_decl (tree exp)
{
  /* A static field can be wrapped in a COMPOUND_EXPR where the first
     argument initializes the class.  */
  if (TREE_CODE (exp) == COMPOUND_EXPR)
    exp = extract_field_decl (exp);

  if (TREE_CODE (exp) == VAR_DECL)
    {
      if (! TREE_STATIC (exp) ||  FIELD_FINAL (exp))
	return exp;
    }
  /* We only care about final parameters. */
  else if (TREE_CODE (exp) == PARM_DECL)
    {
      if (DECL_FINAL (exp))
	return exp;
    }
  /* See if exp is this.field. */
  else if (TREE_CODE (exp) == COMPONENT_REF)
    {
      tree op0 = TREE_OPERAND (exp, 0);
      tree op1 = TREE_OPERAND (exp, 1);
      tree mdecl = current_function_decl;
      if (TREE_CODE (op0) == INDIRECT_REF
	  && TREE_CODE (op1) == FIELD_DECL
	  && ! METHOD_STATIC (mdecl)
	  && FIELD_FINAL (op1))
	{
	  op0 = TREE_OPERAND (op0, 0);
	  if (op0 == BLOCK_EXPR_DECLS (DECL_FUNCTION_BODY (mdecl)))
	    return op1;
	}
    }
  else if (TREE_CODE (exp) == INDIRECT_REF)
    {
      /* For indirect dispatch, look for an expression of the form 
      (indirect_ref (+ (array_ref otable <N>) this)).  
      FIXME: it would probably be better to generate a JAVA_FIELD_REF
      expression that gets converted to OTABLE access at
      gimplification time.  */
      exp = TREE_OPERAND (exp, 0);
      if (TREE_CODE (exp) == PLUS_EXPR)
	{
	  tree op0 = TREE_OPERAND (exp, 0);
	  STRIP_NOPS (op0);
	  if (TREE_CODE (op0) == ARRAY_REF)
	    {
	      tree table = TREE_OPERAND (op0, 0);
	      if (TREE_CODE (table) == VAR_DECL
		  && DECL_LANG_SPECIFIC (table)
		  && DECL_OWNER (table) 
		  && TYPE_OTABLE_DECL (DECL_OWNER (table)) == table)
		{
		  HOST_WIDE_INT index 
		    = TREE_INT_CST_LOW (TREE_OPERAND (op0, 1));
		  tree otable_methods 
		    = TYPE_OTABLE_METHODS (DECL_OWNER (table));
		  tree element;
		  for (element = otable_methods; 
		       element; 
		       element = TREE_CHAIN (element))
		    {
		      if (index == 1)
			{
			  tree purpose = TREE_PURPOSE (element);
			  if (TREE_CODE (purpose) == FIELD_DECL)
			    return purpose;
			  else
			    return NULL_TREE;
			}
		      --index;
		    }
		}
	    }
	}
    }

  return NULL_TREE;
}
void
tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
{
  aff_tree tmp;
  enum tree_code code;
  tree cst, core, toffset;
  HOST_WIDE_INT bitpos, bitsize;
  enum machine_mode mode;
  int unsignedp, volatilep;

  STRIP_NOPS (expr);

  code = TREE_CODE (expr);
  switch (code)
    {
    case INTEGER_CST:
      aff_combination_const (comb, type, tree_to_double_int (expr));
      return;

    case POINTER_PLUS_EXPR:
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
      aff_combination_convert (&tmp, type);
      aff_combination_add (comb, &tmp);
      return;

    case PLUS_EXPR:
    case MINUS_EXPR:
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
      if (code == MINUS_EXPR)
	aff_combination_scale (&tmp, double_int_minus_one);
      aff_combination_add (comb, &tmp);
      return;

    case MULT_EXPR:
      cst = TREE_OPERAND (expr, 1);
      if (TREE_CODE (cst) != INTEGER_CST)
	break;
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      aff_combination_scale (comb, tree_to_double_int (cst));
      return;

    case NEGATE_EXPR:
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      aff_combination_scale (comb, double_int_minus_one);
      return;

    case BIT_NOT_EXPR:
      /* ~x = -x - 1 */
      tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
      aff_combination_scale (comb, double_int_minus_one);
      aff_combination_add_cst (comb, double_int_minus_one);
      return;

    case ADDR_EXPR:
      core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
				  &toffset, &mode, &unsignedp, &volatilep,
				  false);
      if (bitpos % BITS_PER_UNIT != 0)
	break;
      aff_combination_const (comb, type,
			     uhwi_to_double_int (bitpos / BITS_PER_UNIT));
      core = build_fold_addr_expr (core);
      if (TREE_CODE (core) == ADDR_EXPR)
	aff_combination_add_elt (comb, core, double_int_one);
      else
	{
	  tree_to_aff_combination (core, type, &tmp);
	  aff_combination_add (comb, &tmp);
	}
      if (toffset)
	{
	  tree_to_aff_combination (toffset, type, &tmp);
	  aff_combination_add (comb, &tmp);
	}
      return;

    default:
      break;
    }

  aff_combination_elt (comb, type, expr);
}