示例#1
0
void
aff_combination_convert (aff_tree *comb, tree type)
{
  unsigned i, j;
  tree comb_type = comb->type;

  if  (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
    {
      tree val = fold_convert (type, aff_combination_to_tree (comb));
      tree_to_aff_combination (val, type, comb);
      return;
    }

  comb->type = type;
  if (comb->rest && !POINTER_TYPE_P (type))
    comb->rest = fold_convert (type, comb->rest);

  if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
    return;

  comb->offset = double_int_ext_for_comb (comb->offset, comb);
  for (i = j = 0; i < comb->n; i++)
    {
      double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
      if (double_int_zero_p (new_coef))
	continue;
      comb->elts[j].coef = new_coef;
      comb->elts[j].val = fold_convert (type, comb->elts[i].val);
      j++;
    }

  comb->n = j;
  if (comb->n < MAX_AFF_ELTS && comb->rest)
    {
      comb->elts[comb->n].coef = double_int_one;
      comb->elts[comb->n].val = comb->rest;
      comb->rest = NULL_TREE;
      comb->n++;
    }
}
示例#2
0
/* Allocate memory, using an optional status argument.
 
   This function follows the following pseudo-code:

    void *
    allocate (size_t size, void** token, int *stat, char* errmsg, int errlen)
    {
      void *newmem;

      newmem = _caf_register (size, regtype, token, &stat, errmsg, errlen);
      return newmem;
    }  */
static void
gfc_allocate_using_lib (stmtblock_t * block, tree pointer, tree size,
			tree token, tree status, tree errmsg, tree errlen)
{
  tree tmp, pstat;

  gcc_assert (token != NULL_TREE);

  /* Evaluate size only once, and make sure it has the right type.  */
  size = gfc_evaluate_now (size, block);
  if (TREE_TYPE (size) != TREE_TYPE (size_type_node))
    size = fold_convert (size_type_node, size);

  /* The allocation itself.  */
  if (status == NULL_TREE)
    pstat  = null_pointer_node;
  else
    pstat  = gfc_build_addr_expr (NULL_TREE, status);

  if (errmsg == NULL_TREE)
    {
      gcc_assert(errlen == NULL_TREE);
      errmsg = null_pointer_node;
      errlen = build_int_cst (integer_type_node, 0);
    }

  tmp = build_call_expr_loc (input_location,
	     gfor_fndecl_caf_register, 6,
	     fold_build2_loc (input_location,
			      MAX_EXPR, size_type_node, size,
			      build_int_cst (size_type_node, 1)),
	     build_int_cst (integer_type_node,
			    GFC_CAF_COARRAY_ALLOC),
	     token, pstat, errmsg, errlen);

  tmp = fold_build2_loc (input_location, MODIFY_EXPR,
			 TREE_TYPE (pointer), pointer,
			 fold_convert ( TREE_TYPE (pointer), tmp));
  gfc_add_expr_to_block (block, tmp);
}
示例#3
0
static void
aff_combination_add_product (aff_tree *c, double_int coef, tree val,
			     aff_tree *r)
{
  unsigned i;
  tree aval, type;

  for (i = 0; i < c->n; i++)
    {
      aval = c->elts[i].val;
      if (val)
	{
	  type = TREE_TYPE (aval);
	  aval = fold_build2 (MULT_EXPR, type, aval,
			      fold_convert (type, val));
	}

      aff_combination_add_elt (r, aval,
			       double_int_mul (coef, c->elts[i].coef));
    }

  if (c->rest)
    {
      aval = c->rest;
      if (val)
	{
	  type = TREE_TYPE (aval);
	  aval = fold_build2 (MULT_EXPR, type, aval,
			      fold_convert (type, val));
	}

      aff_combination_add_elt (r, aval, coef);
    }

  if (val)
    aff_combination_add_elt (r, val,
			     double_int_mul (coef, c->offset));
  else
    aff_combination_add_cst (r, double_int_mul (coef, c->offset));
}
示例#4
0
static tree
update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
			     gimple_stmt_iterator gsi)
{
  gimple stmt;
  tree var = copy_ssa_name (acc, NULL);
  if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
    stmt = gimple_build_assign_with_ops (code, var, acc, op1);
  else
    {
      tree rhs = fold_convert (TREE_TYPE (acc),
			       fold_build2 (code,
					    TREE_TYPE (op1),
					    fold_convert (TREE_TYPE (op1), acc),
					    op1));
      rhs = force_gimple_operand_gsi (&gsi, rhs,
				      false, NULL, false, GSI_CONTINUE_LINKING);
      stmt = gimple_build_assign (var, rhs);
    }
  gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
  return var;
}
示例#5
0
文件: tree-chrec.c 项目: woepaul/gcc
static tree
tree_fold_binomial (tree type, tree n, unsigned int k)
{
  double_int num, denom, idx, di_res;
  bool overflow;
  unsigned int i;
  tree res;

  /* Handle the most frequent cases.  */
  if (k == 0)
    return build_int_cst (type, 1);
  if (k == 1)
    return fold_convert (type, n);

  /* Numerator = n.  */
  num = TREE_INT_CST (n);

  /* Check that k <= n.  */
  if (num.ult (double_int::from_uhwi (k)))
    return NULL_TREE;

  /* Denominator = 2.  */
  denom = double_int::from_uhwi (2);

  /* Index = Numerator-1.  */
  idx = num - double_int_one;

  /* Numerator = Numerator*Index = n*(n-1).  */
  num = num.mul_with_sign (idx, false, &overflow);
  if (overflow)
    return NULL_TREE;

  for (i = 3; i <= k; i++)
    {
      /* Index--.  */
      --idx;

      /* Numerator *= Index.  */
      num = num.mul_with_sign (idx, false, &overflow);
      if (overflow)
	return NULL_TREE;

      /* Denominator *= i.  */
      denom *= double_int::from_uhwi (i);
    }

  /* Result = Numerator / Denominator.  */
  di_res = num.div (denom, true, EXACT_DIV_EXPR);
  res = build_int_cst_wide (type, di_res.low, di_res.high);
  return int_fits_type_p (res, type) ? res : NULL_TREE;
}
示例#6
0
tree
gfc_build_array_ref (tree base, tree offset, tree decl)
{
  tree type = TREE_TYPE (base);
  tree tmp;

  gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
  type = TREE_TYPE (type);

  if (DECL_P (base))
    TREE_ADDRESSABLE (base) = 1;

  /* Strip NON_LVALUE_EXPR nodes.  */
  STRIP_TYPE_NOPS (offset);

  /* If the array reference is to a pointer, whose target contains a
     subreference, use the span that is stored with the backend decl
     and reference the element with pointer arithmetic.  */
  if (decl && (TREE_CODE (decl) == FIELD_DECL
		 || TREE_CODE (decl) == VAR_DECL
		 || TREE_CODE (decl) == PARM_DECL)
	&& GFC_DECL_SUBREF_ARRAY_P (decl)
	&& !integer_zerop (GFC_DECL_SPAN(decl)))
    {
      offset = fold_build2 (MULT_EXPR, gfc_array_index_type,
			    offset, GFC_DECL_SPAN(decl));
      tmp = gfc_build_addr_expr (pvoid_type_node, base);
      tmp = fold_build2 (POINTER_PLUS_EXPR, pvoid_type_node,
			 tmp, fold_convert (sizetype, offset));
      tmp = fold_convert (build_pointer_type (type), tmp);
      if (!TYPE_STRING_FLAG (type))
	tmp = build_fold_indirect_ref_loc (input_location, tmp);
      return tmp;
    }
  else
    /* Otherwise use a straightforward array reference.  */
    return build4 (ARRAY_REF, type, base, offset, NULL_TREE, NULL_TREE);
}
示例#7
0
static tree 
chrec_convert_1 (tree type, tree chrec, tree at_stmt,
		 bool use_overflow_semantics)
{
  tree ct, res;
  tree base, step;
  struct loop *loop;

  if (automatically_generated_chrec_p (chrec))
    return chrec;
  
  ct = chrec_type (chrec);
  if (ct == type)
    return chrec;

  if (!evolution_function_is_affine_p (chrec))
    goto keep_cast;

  loop = current_loops->parray[CHREC_VARIABLE (chrec)];
  base = CHREC_LEFT (chrec);
  step = CHREC_RIGHT (chrec);

  if (convert_affine_scev (loop, type, &base, &step, at_stmt,
			   use_overflow_semantics))
    return build_polynomial_chrec (loop->num, base, step);

  /* If we cannot propagate the cast inside the chrec, just keep the cast.  */
keep_cast:
  res = fold_convert (type, chrec);

  /* Don't propagate overflows.  */
  if (CONSTANT_CLASS_P (res))
    {
      TREE_CONSTANT_OVERFLOW (res) = 0;
      TREE_OVERFLOW (res) = 0;
    }

  /* But reject constants that don't fit in their type after conversion.
     This can happen if TYPE_MIN_VALUE or TYPE_MAX_VALUE are not the
     natural values associated with TYPE_PRECISION and TYPE_UNSIGNED,
     and can cause problems later when computing niters of loops.  Note
     that we don't do the check before converting because we don't want
     to reject conversions of negative chrecs to unsigned types.  */
  if (TREE_CODE (res) == INTEGER_CST
      && TREE_CODE (type) == INTEGER_TYPE
      && !int_fits_type_p (res, type))
    res = chrec_dont_know;

  return res;
}
示例#8
0
void
gfc_conv_const_charlen (gfc_charlen * cl)
{
  if (!cl || cl->backend_decl)
    return;

  if (cl->length && cl->length->expr_type == EXPR_CONSTANT)
    {
      cl->backend_decl = gfc_conv_mpz_to_tree (cl->length->value.integer,
					       cl->length->ts.kind);
      cl->backend_decl = fold_convert (gfc_charlen_type_node,
					cl->backend_decl);
    }
}
示例#9
0
tree
gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
{
  tree type = TREE_TYPE (dest), rank, size, esize, call;
  stmtblock_t block;

  if (! GFC_DESCRIPTOR_TYPE_P (type)
      || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
    return build2_v (MODIFY_EXPR, dest, src);

  /* Handle copying allocatable arrays.  */
  gfc_start_block (&block);

  rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
  size = gfc_conv_descriptor_ubound_get (dest, rank);
  size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
		      gfc_conv_descriptor_lbound_get (dest, rank));
  size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
		      gfc_index_one_node);
  if (GFC_TYPE_ARRAY_RANK (type) > 1)
    size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
			gfc_conv_descriptor_stride_get (dest, rank));
  esize = fold_convert (gfc_array_index_type,
			TYPE_SIZE_UNIT (gfc_get_element_type (type)));
  size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
  size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
  call = build_call_expr_loc (input_location,
			  built_in_decls[BUILT_IN_MEMCPY], 3,
			  fold_convert (pvoid_type_node,
					gfc_conv_descriptor_data_get (dest)),
			  fold_convert (pvoid_type_node,
					gfc_conv_descriptor_data_get (src)),
			  size);
  gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));

  return gfc_finish_block (&block);
}
示例#10
0
static bool
gimple_resimplify1 (gimple_seq *seq,
		    code_helper *res_code, tree type, tree *res_ops,
		    tree (*valueize)(tree))
{
  if (constant_for_folding (res_ops[0]))
    {
      tree tem = NULL_TREE;
      if (res_code->is_tree_code ())
	tem = const_unop (*res_code, type, res_ops[0]);
      else
	{
	  tree decl = builtin_decl_implicit (*res_code);
	  if (decl)
	    {
	      tem = fold_builtin_n (UNKNOWN_LOCATION, decl, res_ops, 1, false);
	      if (tem)
		{
		  /* fold_builtin_n wraps the result inside a NOP_EXPR.  */
		  STRIP_NOPS (tem);
		  tem = fold_convert (type, tem);
		}
	    }
	}
      if (tem != NULL_TREE
	  && CONSTANT_CLASS_P (tem))
	{
	  res_ops[0] = tem;
	  res_ops[1] = NULL_TREE;
	  res_ops[2] = NULL_TREE;
	  *res_code = TREE_CODE (res_ops[0]);
	  return true;
	}
    }

  code_helper res_code2;
  tree res_ops2[3] = {};
  if (gimple_simplify (&res_code2, res_ops2, seq, valueize,
		       *res_code, type, res_ops[0]))
    {
      *res_code = res_code2;
      res_ops[0] = res_ops2[0];
      res_ops[1] = res_ops2[1];
      res_ops[2] = res_ops2[2];
      return true;
    }

  return false;
}
示例#11
0
tree 
chrec_convert (tree type, 
	       tree chrec)
{
  tree ct;
  
  if (automatically_generated_chrec_p (chrec))
    return chrec;
  
  ct = chrec_type (chrec);
  if (ct == type)
    return chrec;

  if (TYPE_PRECISION (ct) < TYPE_PRECISION (type))
    return count_ev_in_wider_type (type, chrec);

  switch (TREE_CODE (chrec))
    {
    case POLYNOMIAL_CHREC:
      return build_polynomial_chrec (CHREC_VARIABLE (chrec),
				     chrec_convert (type,
						    CHREC_LEFT (chrec)),
				     chrec_convert (type,
						    CHREC_RIGHT (chrec)));

    default:
      {
	tree res = fold_convert (type, chrec);

	/* Don't propagate overflows.  */
	TREE_OVERFLOW (res) = 0;
	if (CONSTANT_CLASS_P (res))
	  TREE_CONSTANT_OVERFLOW (res) = 0;

	/* But reject constants that don't fit in their type after conversion.
	   This can happen if TYPE_MIN_VALUE or TYPE_MAX_VALUE are not the
	   natural values associated with TYPE_PRECISION and TYPE_UNSIGNED,
	   and can cause problems later when computing niters of loops.  Note
	   that we don't do the check before converting because we don't want
	   to reject conversions of negative chrecs to unsigned types.  */
	if (TREE_CODE (res) == INTEGER_CST
	    && TREE_CODE (type) == INTEGER_TYPE
	    && !int_fits_type_p (res, type))
	  res = chrec_dont_know;

	return res;
      }
    }
}
示例#12
0
文件: vxworks.c 项目: AHelper/gcc
static tree
vxworks_emutls_var_init (tree var, tree decl, tree tmpl_addr)
{
  vec<constructor_elt, va_gc> *v;
  vec_alloc (v, 3);
  
  tree type = TREE_TYPE (var);
  tree field = TYPE_FIELDS (type);
  
  constructor_elt elt = {field, fold_convert (TREE_TYPE (field), tmpl_addr)};
  v->quick_push (elt);
  
  field = DECL_CHAIN (field);
  elt.index = field;
  elt.value = build_int_cst (TREE_TYPE (field), 0);
  v->quick_push (elt);
  
  field = DECL_CHAIN (field);
  elt.index = field;
  elt.value = fold_convert (TREE_TYPE (field), DECL_SIZE_UNIT (decl));
  v->quick_push (elt);
  
  return build_constructor (type, v);
}
示例#13
0
文件: trans.c 项目: philscher/gcc
/* Reallocate MEM so it has SIZE bytes of data.  This behaves like the
   following pseudo-code:

void *
internal_realloc (void *mem, size_t size)
{
  res = realloc (mem, size);
  if (!res && size != 0)
    _gfortran_os_error ("Allocation would exceed memory limit");

  return res;
}  */
tree
gfc_call_realloc (stmtblock_t * block, tree mem, tree size)
{
  tree msg, res, nonzero, null_result, tmp;
  tree type = TREE_TYPE (mem);

  size = gfc_evaluate_now (size, block);

  if (TREE_TYPE (size) != TREE_TYPE (size_type_node))
    size = fold_convert (size_type_node, size);

  /* Create a variable to hold the result.  */
  res = gfc_create_var (type, NULL);

  /* Call realloc and check the result.  */
  tmp = build_call_expr_loc (input_location,
			 builtin_decl_explicit (BUILT_IN_REALLOC), 2,
			 fold_convert (pvoid_type_node, mem), size);
  gfc_add_modify (block, res, fold_convert (type, tmp));
  null_result = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
				 res, build_int_cst (pvoid_type_node, 0));
  nonzero = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, size,
			     build_int_cst (size_type_node, 0));
  null_result = fold_build2_loc (input_location, TRUTH_AND_EXPR, boolean_type_node,
				 null_result, nonzero);
  msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const
			     ("Allocation would exceed memory limit"));
  tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
			 null_result,
			 build_call_expr_loc (input_location,
					      gfor_fndecl_os_error, 1, msg),
			 build_empty_stmt (input_location));
  gfc_add_expr_to_block (block, tmp);

  return res;
}
示例#14
0
static void
generate_memset_zero (gimple stmt, tree op0, tree nb_iter,
		      gimple_stmt_iterator bsi)
{
  tree addr_base, nb_bytes;
  bool res = false;
  gimple_seq stmt_list = NULL, stmts;
  gimple fn_call;
  tree mem, fn;
  struct data_reference *dr = XCNEW (struct data_reference);
  location_t loc = gimple_location (stmt);

  DR_STMT (dr) = stmt;
  DR_REF (dr) = op0;
  res = dr_analyze_innermost (dr);
  gcc_assert (res && stride_of_unit_type_p (DR_STEP (dr), TREE_TYPE (op0)));

  nb_bytes = build_size_arg_loc (loc, nb_iter, op0, &stmt_list);
  addr_base = size_binop_loc (loc, PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr));
  addr_base = fold_convert_loc (loc, sizetype, addr_base);

  /* Test for a negative stride, iterating over every element.  */
  if (integer_zerop (size_binop (PLUS_EXPR,
				 TYPE_SIZE_UNIT (TREE_TYPE (op0)),
				 fold_convert (sizetype, DR_STEP (dr)))))
    {
      addr_base = size_binop_loc (loc, MINUS_EXPR, addr_base,
				  fold_convert_loc (loc, sizetype, nb_bytes));
      addr_base = size_binop_loc (loc, PLUS_EXPR, addr_base,
				  TYPE_SIZE_UNIT (TREE_TYPE (op0)));
    }

  addr_base = fold_build2_loc (loc, POINTER_PLUS_EXPR,
			       TREE_TYPE (DR_BASE_ADDRESS (dr)),
			       DR_BASE_ADDRESS (dr), addr_base);
  mem = force_gimple_operand (addr_base, &stmts, true, NULL);
  gimple_seq_add_seq (&stmt_list, stmts);

  fn = build_fold_addr_expr (implicit_built_in_decls [BUILT_IN_MEMSET]);
  fn_call = gimple_build_call (fn, 3, mem, integer_zero_node, nb_bytes);
  gimple_seq_add_stmt (&stmt_list, fn_call);
  gsi_insert_seq_after (&bsi, stmt_list, GSI_CONTINUE_LINKING);

  if (dump_file && (dump_flags & TDF_DETAILS))
    fprintf (dump_file, "generated memset zero\n");

  free_data_ref (dr);
}
示例#15
0
static tree
create_tailcall_accumulator (const char *label, basic_block bb, tree init)
{
  tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
  if (POINTER_TYPE_P (ret_type))
    ret_type = sizetype;

  tree tmp = make_temp_ssa_name (ret_type, NULL, label);
  gimple phi;

  phi = create_phi_node (tmp, bb);
  /* RET_TYPE can be a float when -ffast-maths is enabled.  */
  add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
	       UNKNOWN_LOCATION);
  return PHI_RESULT (phi);
}
示例#16
0
tree
build_exception_object_ref (tree type)
{
  tree obj;

  /* Java only passes object via pointer and doesn't require adjusting.
     The java object is immediately before the generic exception header.  */
  obj = build_exception_object_var ();
  obj = fold_convert (build_pointer_type (type), obj);
  obj = fold_build_pointer_plus (obj,
		fold_build1 (NEGATE_EXPR, sizetype,
			     TYPE_SIZE_UNIT (TREE_TYPE (obj))));
  obj = build1 (INDIRECT_REF, type, obj);

  return obj;
}
示例#17
0
static tree
tree_fold_binomial (tree type, tree n, unsigned int k)
{
  bool overflow;
  unsigned int i;
  tree res;

  /* Handle the most frequent cases.  */
  if (k == 0)
    return build_int_cst (type, 1);
  if (k == 1)
    return fold_convert (type, n);

  /* Check that k <= n.  */
  if (wi::ltu_p (n, k))
    return NULL_TREE;

  /* Denominator = 2.  */
  wide_int denom = wi::two (TYPE_PRECISION (TREE_TYPE (n)));

  /* Index = Numerator-1.  */
  wide_int idx = wi::sub (n, 1);

  /* Numerator = Numerator*Index = n*(n-1).  */
  wide_int num = wi::smul (n, idx, &overflow);
  if (overflow)
    return NULL_TREE;

  for (i = 3; i <= k; i++)
    {
      /* Index--.  */
      --idx;

      /* Numerator *= Index.  */
      num = wi::smul (num, idx, &overflow);
      if (overflow)
	return NULL_TREE;

      /* Denominator *= i.  */
      denom *= i;
    }

  /* Result = Numerator / Denominator.  */
  wide_int di_res = wi::udiv_trunc (num, denom);
  res = wide_int_to_tree (type, di_res);
  return int_fits_type_p (res, type) ? res : NULL_TREE;
}
static tree
create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
		    bool verify)
{
  tree base, index2;

  if (verify
      && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
    return NULL_TREE;

  if (addr->step && integer_onep (addr->step))
    addr->step = NULL_TREE;

  if (addr->offset)
    addr->offset = fold_convert (alias_ptr_type, addr->offset);
  else
    addr->offset = build_int_cst (alias_ptr_type, 0);

  if (addr->symbol)
    {
      base = addr->symbol;
      index2 = addr->base;
    }
  else if (addr->base
	   && POINTER_TYPE_P (TREE_TYPE (addr->base)))
    {
      base = addr->base;
      index2 = NULL_TREE;
    }
  else
    {
      base = build_int_cst (ptr_type_node, 0);
      index2 = addr->base;
    }

  /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
     ???  As IVOPTs does not follow restrictions to where the base
     pointer may point to create a MEM_REF only if we know that
     base is valid.  */
  if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
      && (!index2 || integer_zerop (index2))
      && (!addr->index || integer_zerop (addr->index)))
    return fold_build2 (MEM_REF, type, base, addr->offset);

  return build5 (TARGET_MEM_REF, type,
		 base, addr->offset, addr->index, addr->step, index2);
}
示例#19
0
tree
aff_combination_to_tree (aff_tree *comb)
{
  tree type = comb->type, base = NULL_TREE, expr = NULL_TREE;
  unsigned i;
  poly_widest_int off;
  int sgn;

  gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);

  i = 0;
  if (POINTER_TYPE_P (type))
    {
      type = sizetype;
      if (comb->n > 0 && comb->elts[0].coef == 1
	  && POINTER_TYPE_P (TREE_TYPE (comb->elts[0].val)))
	{
	  base = comb->elts[0].val;
	  ++i;
	}
    }

  for (; i < comb->n; i++)
    expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef);

  if (comb->rest)
    expr = add_elt_to_tree (expr, type, comb->rest, 1);

  /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
     unsigned.  */
  if (known_lt (comb->offset, 0))
    {
      off = -comb->offset;
      sgn = -1;
    }
  else
    {
      off = comb->offset;
      sgn = 1;
    }
  expr = add_elt_to_tree (expr, type, wide_int_to_tree (type, off), sgn);

  if (base)
    return fold_build_pointer_plus (base, expr);
  else
    return fold_convert (comb->type, expr);
}
示例#20
0
文件: ubsan.c 项目: acoxepochlabs/gcc
tree
ubsan_encode_value (tree t, bool in_expand_p)
{
  tree type = TREE_TYPE (t);
  const unsigned int bitsize = GET_MODE_BITSIZE (TYPE_MODE (type));
  if (bitsize <= POINTER_SIZE)
    switch (TREE_CODE (type))
      {
      case BOOLEAN_TYPE:
      case ENUMERAL_TYPE:
      case INTEGER_TYPE:
	return fold_build1 (NOP_EXPR, pointer_sized_int_node, t);
      case REAL_TYPE:
	{
	  tree itype = build_nonstandard_integer_type (bitsize, true);
	  t = fold_build1 (VIEW_CONVERT_EXPR, itype, t);
	  return fold_convert (pointer_sized_int_node, t);
	}
      default:
	gcc_unreachable ();
      }
  else
    {
      if (!DECL_P (t) || !TREE_ADDRESSABLE (t))
	{
	  /* The reason for this is that we don't want to pessimize
	     code by making vars unnecessarily addressable.  */
	  tree var = create_tmp_var (type, NULL);
	  tree tem = build2 (MODIFY_EXPR, void_type_node, var, t);
	  if (in_expand_p)
	    {
	      rtx mem
		= assign_stack_temp_for_type (TYPE_MODE (type),
					      GET_MODE_SIZE (TYPE_MODE (type)),
					      type);
	      SET_DECL_RTL (var, mem);
	      expand_assignment (var, t, false);
	      return build_fold_addr_expr (var);
	    }
	  t = build_fold_addr_expr (var);
	  return build2 (COMPOUND_EXPR, TREE_TYPE (t), tem, t);
	}
      else
	return build_fold_addr_expr (t);
    }
}
tree
convert (tree type, tree expr)
{
  enum tree_code code = TREE_CODE (type);

  if (!expr)
   return error_mark_node;

  if (do_not_fold)
    return build1 (NOP_EXPR, type, expr);

  if (type == TREE_TYPE (expr)
      || TREE_CODE (expr) == ERROR_MARK)
    return expr;
  if (TREE_CODE (TREE_TYPE (expr)) == ERROR_MARK)
    return error_mark_node;
  if (code == VOID_TYPE)
    return build1 (CONVERT_EXPR, type, expr);
  if (code == BOOLEAN_TYPE || code ==  CHAR_TYPE)
    return fold_convert (type, expr);
  if (code == INTEGER_TYPE)
    {
      if ((really_constant_p (expr)
	   || (! flag_unsafe_math_optimizations
	       && ! flag_emit_class_files))
	  && TREE_CODE (TREE_TYPE (expr)) == REAL_TYPE
	  && TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT)
	return fold (convert_ieee_real_to_integer (type, expr));
      else
	{
	  /* fold very helpfully sets the overflow status if a type
	     overflows in a narrowing integer conversion, but Java
	     doesn't care.  */
	  tree tmp = fold (convert_to_integer (type, expr));
	  TREE_OVERFLOW (tmp) = 0;
	  return tmp;
	}
    }	  
  if (code == REAL_TYPE)
    return fold (convert_to_real (type, expr));
  if (code == POINTER_TYPE)
    return fold (convert_to_pointer (type, expr));
  error ("conversion to non-scalar type requested");
  return error_mark_node;
}
示例#22
0
static tree
create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
		    bool verify)
{
  tree base, index2;

  if (verify
      && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
    return NULL_TREE;

  if (addr->step && integer_onep (addr->step))
    addr->step = NULL_TREE;

  if (addr->offset)
    addr->offset = fold_convert (alias_ptr_type, addr->offset);
  else
    addr->offset = build_int_cst (alias_ptr_type, 0);

  if (addr->symbol)
    {
      base = addr->symbol;
      index2 = addr->base;
    }
  else if (addr->base
	   && POINTER_TYPE_P (TREE_TYPE (addr->base)))
    {
      base = addr->base;
      index2 = NULL_TREE;
    }
  else
    {
      base = build_int_cst (ptr_type_node, 0);
      index2 = addr->base;
    }

  /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.  */
  if (alias_ptr_type
      && (!index2 || integer_zerop (index2))
      && (!addr->index || integer_zerop (addr->index)))
    return fold_build2 (MEM_REF, type, base, addr->offset);

  return build5 (TARGET_MEM_REF, type,
		 base, addr->offset, addr->index, addr->step, index2);
}
static tree
add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
		 aff_tree *comb)
{
  enum tree_code code;

  scale = double_int_ext_for_comb (scale, comb);
  elt = fold_convert (type, elt);

  if (double_int_one_p (scale))
    {
      if (!expr)
	return elt;

      return fold_build2 (PLUS_EXPR, type, expr, elt);
    }

  if (double_int_minus_one_p (scale))
    {
      if (!expr)
	return fold_build1 (NEGATE_EXPR, type, elt);

      return fold_build2 (MINUS_EXPR, type, expr, elt);
    }

  if (!expr)
    return fold_build2 (MULT_EXPR, type, elt,
			double_int_to_tree (type, scale));

  if (double_int_negative_p (scale))
    {
      code = MINUS_EXPR;
      scale = double_int_neg (scale);
    }
  else
    code = PLUS_EXPR;

  elt = fold_build2 (MULT_EXPR, type, elt,
		     double_int_to_tree (type, scale));
  return fold_build2 (code, type, expr, elt);
}
static void
create_canonical_iv (struct loop *loop, edge exit, tree niter)
{
  edge in;
  tree cond, type, var;
  block_stmt_iterator incr_at;
  enum tree_code cmp;

  if (dump_file && (dump_flags & TDF_DETAILS))
    {
      fprintf (dump_file, "Added canonical iv to loop %d, ", loop->num);
      print_generic_expr (dump_file, niter, TDF_SLIM);
      fprintf (dump_file, " iterations.\n");
    }

  cond = last_stmt (exit->src);
  in = EDGE_SUCC (exit->src, 0);
  if (in == exit)
    in = EDGE_SUCC (exit->src, 1);

  /* Note that we do not need to worry about overflows, since
     type of niter is always unsigned and all comparisons are
     just for equality/nonequality -- i.e. everything works
     with a modulo arithmetics.  */

  type = TREE_TYPE (niter);
  niter = fold_build2 (PLUS_EXPR, type,
		       niter,
		       build_int_cst (type, 1));
  incr_at = bsi_last (in->src);
  create_iv (niter,
	     fold_convert (type, integer_minus_one_node),
	     NULL_TREE, loop,
	     &incr_at, false, NULL, &var);

  cmp = (exit->flags & EDGE_TRUE_VALUE) ? EQ_EXPR : NE_EXPR;
  COND_EXPR_COND (cond) = build2 (cmp, boolean_type_node,
				  var,
				  build_int_cst (type, 0));
  update_stmt (cond);
}
static void
move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
{
  unsigned i;
  tree val = NULL_TREE;

  gcc_assert (!parts->index);
  for (i = 0; i < addr->n; i++)
    {
      val = addr->elts[i].val;
      if (operand_equal_p (val, v, 0))
	break;
    }

  if (i == addr->n)
    return;

  parts->index = fold_convert (sizetype, val);
  parts->step = double_int_to_tree (sizetype, addr->elts[i].coef);
  aff_combination_remove_elt (addr, i);
}
示例#26
0
文件: trans.c 项目: PeyloW/gcc-4.6.4
/* Free a given variable, if it's not NULL.  */
tree
gfc_call_free (tree var)
{
  stmtblock_t block;
  tree tmp, cond, call;

  if (TREE_TYPE (var) != TREE_TYPE (pvoid_type_node))
    var = fold_convert (pvoid_type_node, var);

  gfc_start_block (&block);
  var = gfc_evaluate_now (var, &block);
  cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, var,
			  build_int_cst (pvoid_type_node, 0));
  call = build_call_expr_loc (input_location,
			      built_in_decls[BUILT_IN_FREE], 1, var);
  tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond, call,
			 build_empty_stmt (input_location));
  gfc_add_expr_to_block (&block, tmp);

  return gfc_finish_block (&block);
}
示例#27
0
tree
omp_get_for_step_from_incr (location_t loc, tree incr)
{
  tree step;
  switch (TREE_CODE (incr))
    {
    case PLUS_EXPR:
      step = TREE_OPERAND (incr, 1);
      break;
    case POINTER_PLUS_EXPR:
      step = fold_convert (ssizetype, TREE_OPERAND (incr, 1));
      break;
    case MINUS_EXPR:
      step = TREE_OPERAND (incr, 1);
      step = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (step), step);
      break;
    default:
      gcc_unreachable ();
    }
  return step;
}
void
propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
{
    gimple stmt = gsi_stmt (*gsi);

    if (is_gimple_assign (stmt))
    {
        tree expr = NULL_TREE;
        if (gimple_assign_single_p (stmt))
            expr = gimple_assign_rhs1 (stmt);
        propagate_tree_value (&expr, val);
        gimple_assign_set_rhs_from_tree (gsi, expr);
        stmt = gsi_stmt (*gsi);
    }
    else if (gimple_code (stmt) == GIMPLE_COND)
    {
        tree lhs = NULL_TREE;
        tree rhs = fold_convert (TREE_TYPE (val), integer_zero_node);
        propagate_tree_value (&lhs, val);
        gimple_cond_set_code (stmt, NE_EXPR);
        gimple_cond_set_lhs (stmt, lhs);
        gimple_cond_set_rhs (stmt, rhs);
    }
    else if (is_gimple_call (stmt)
             && gimple_call_lhs (stmt) != NULL_TREE)
    {
        gimple new_stmt;

        tree expr = NULL_TREE;
        propagate_tree_value (&expr, val);
        new_stmt  = gimple_build_assign (gimple_call_lhs (stmt), expr);
        copy_virtual_operands (new_stmt, stmt);
        move_ssa_defining_stmt_for_defs (new_stmt, stmt);
        gsi_replace (gsi, new_stmt, false);
    }
    else if (gimple_code (stmt) == GIMPLE_SWITCH)
        propagate_tree_value (gimple_switch_index_ptr (stmt), val);
    else
        gcc_unreachable ();
}
示例#29
0
文件: builtins.c 项目: Nodplus/gcc
static tree
putVolatile_builtin (tree method_return_type ATTRIBUTE_UNUSED, 
		     tree orig_call)
{
  tree addr, stmt, modify_stmt;
  UNMARSHAL4 (orig_call);
  
  addr = build_addr_sum (value_type, obj_arg, offset_arg);
  addr 
    = fold_convert (build_pointer_type (build_type_variant (value_type, 0, 1)),
		    addr);
  
  stmt = build_call_expr (builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE), 0);
  modify_stmt = fold_build2 (MODIFY_EXPR, value_type,
			     build_java_indirect_ref (value_type, addr,
						      flag_check_references),
			     value_arg);
  stmt = build2 (COMPOUND_EXPR, TREE_TYPE (modify_stmt), 
		 stmt, modify_stmt);

  return build_check_this (stmt, this_arg);
}
示例#30
0
tree
ubsan_encode_value (tree t)
{
  tree type = TREE_TYPE (t);
  switch (TREE_CODE (type))
    {
    case INTEGER_TYPE:
      if (TYPE_PRECISION (type) <= POINTER_SIZE)
	return fold_build1 (NOP_EXPR, pointer_sized_int_node, t);
      else
	return build_fold_addr_expr (t);
    case REAL_TYPE:
      {
	unsigned int bitsize = GET_MODE_BITSIZE (TYPE_MODE (type));
	if (bitsize <= POINTER_SIZE)
	  {
	    tree itype = build_nonstandard_integer_type (bitsize, true);
	    t = fold_build1 (VIEW_CONVERT_EXPR, itype, t);
	    return fold_convert (pointer_sized_int_node, t);
	  }
	else
	  {
	    if (!TREE_ADDRESSABLE (t))
	      {
		/* The reason for this is that we don't want to pessimize
		   code by making vars unnecessarily addressable.  */
		tree var = create_tmp_var (TREE_TYPE (t), NULL);
		tree tem = build2 (MODIFY_EXPR, void_type_node, var, t);
		t = build_fold_addr_expr (var);
		return build2 (COMPOUND_EXPR, TREE_TYPE (t), tem, t);
	      }
	    else
	      return build_fold_addr_expr (t);
	  }
      }
    default:
      gcc_unreachable ();
    }
}