Example #1
0
/* Subtract polynomial item CST * VAR from ADDR.  */
static void
chkp_sub_addr_item (address_t &addr, tree cst, tree var)
{
    int n = chkp_pol_find (addr, var);

    cst = chkp_extend_const (cst);

    if (n < 0)
    {
        struct pol_item item;
        item.cst = fold_build2 (MINUS_EXPR, TREE_TYPE (cst),
                                integer_zero_node, cst);
        item.var = var;

        addr.pol.safe_push (item);
        addr.pol.qsort (&chkp_pol_item_compare);
    }
    else
    {
        addr.pol[n].cst = fold_build2 (MINUS_EXPR, TREE_TYPE (addr.pol[n].cst),
                                       addr.pol[n].cst, cst);
        if (TREE_CODE (addr.pol[n].cst) == INTEGER_CST
                && integer_zerop (addr.pol[n].cst))
            addr.pol.ordered_remove (n);
    }
}
Example #2
0
static tree
chrec_evaluate (unsigned var, tree chrec, tree n, unsigned int k)
{
  tree arg0, arg1, binomial_n_k;
  tree type = TREE_TYPE (chrec);
  struct loop *var_loop = get_loop (cfun, var);

  while (TREE_CODE (chrec) == POLYNOMIAL_CHREC
	 && flow_loop_nested_p (var_loop, get_chrec_loop (chrec)))
    chrec = CHREC_LEFT (chrec);

  if (TREE_CODE (chrec) == POLYNOMIAL_CHREC
      && CHREC_VARIABLE (chrec) == var)
    {
      arg1 = chrec_evaluate (var, CHREC_RIGHT (chrec), n, k + 1);
      if (arg1 == chrec_dont_know)
	return chrec_dont_know;
      binomial_n_k = tree_fold_binomial (type, n, k);
      if (!binomial_n_k)
	return chrec_dont_know;
      arg0 = fold_build2 (MULT_EXPR, type,
			  CHREC_LEFT (chrec), binomial_n_k);
      return chrec_fold_plus (type, arg0, arg1);
    }

  binomial_n_k = tree_fold_binomial (type, n, k);
  if (!binomial_n_k)
    return chrec_dont_know;

  return fold_build2 (MULT_EXPR, type, chrec, binomial_n_k);
}
Example #3
0
File: typeck.c Project: AHelper/gcc
static tree
convert_ieee_real_to_integer (tree type, tree expr)
{
  tree result;
  expr = save_expr (expr);

  result = fold_build3 (COND_EXPR, type,
			fold_build2 (NE_EXPR, boolean_type_node, expr, expr),
			 convert (type, integer_zero_node),
			 convert_to_integer (type, expr));
  
  result = fold_build3 (COND_EXPR, type, 
			fold_build2 (LE_EXPR, boolean_type_node, expr, 
				     convert (TREE_TYPE (expr), 
					      TYPE_MIN_VALUE (type))),
			TYPE_MIN_VALUE (type),
			result);
  
  result = fold_build3 (COND_EXPR, type,
			fold_build2 (GE_EXPR, boolean_type_node, expr, 
				     convert (TREE_TYPE (expr), 
					      TYPE_MAX_VALUE (type))),
			TYPE_MAX_VALUE (type),
			result);

  return result;
}  
Example #4
0
static tree
adjust_return_value_with_ops (enum tree_code code, const char *label,
			      tree acc, tree op1, gimple_stmt_iterator gsi)
{

  tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
  tree result = make_temp_ssa_name (ret_type, NULL, label);
  gimple stmt;

  if (POINTER_TYPE_P (ret_type))
    {
      gcc_assert (code == PLUS_EXPR && TREE_TYPE (acc) == sizetype);
      code = POINTER_PLUS_EXPR;
    }
  if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1))
      && code != POINTER_PLUS_EXPR)
    stmt = gimple_build_assign_with_ops (code, result, acc, op1);
  else
    {
      tree tem;
      if (code == POINTER_PLUS_EXPR)
	tem = fold_build2 (code, TREE_TYPE (op1), op1, acc);
      else
	tem = fold_build2 (code, TREE_TYPE (op1),
			   fold_convert (TREE_TYPE (op1), acc), op1);
      tree rhs = fold_convert (ret_type, tem);
      rhs = force_gimple_operand_gsi (&gsi, rhs,
				      false, NULL, true, GSI_SAME_STMT);
      stmt = gimple_build_assign (result, rhs);
    }

  gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
  return result;
}
Example #5
0
static void
add_to_parts (struct mem_address *parts, tree elt)
{
  tree type;

  if (!parts->index)
    {
      parts->index = fold_convert (sizetype, elt);
      return;
    }

  if (!parts->base)
    {
      parts->base = elt;
      return;
    }

  /* Add ELT to base.  */
  type = TREE_TYPE (parts->base);
  if (POINTER_TYPE_P (type))
    parts->base = fold_build2 (POINTER_PLUS_EXPR, type,
			       parts->base,
			       fold_convert (sizetype, elt));
  else
    parts->base = fold_build2 (PLUS_EXPR, type,
			       parts->base, elt);
}
Example #6
0
tree
UI_To_gnu (Uint Input, tree type)
{
  /* We might have a TYPE with biased representation and be passed an unbiased
     value that doesn't fit.  We always use an unbiased type to be able to hold
     any such possible value for intermediate computations and then rely on a
     conversion back to TYPE to perform the bias adjustment when need be.  */
  tree comp_type
    = TREE_CODE (type) == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type)
      ? get_base_type (type) : type;
  tree gnu_ret;

  if (Input <= Uint_Direct_Last)
    gnu_ret = build_cst_from_int (comp_type, Input - Uint_Direct_Bias);
  else
    {
      Int Idx = Uints_Ptr[Input].Loc;
      Pos Length = Uints_Ptr[Input].Length;
      Int First = Udigits_Ptr[Idx];
      tree gnu_base;

      gcc_assert (Length > 0);

      /* The computations we perform below always require a type at least as
	 large as an integer not to overflow.  FP types are always fine, but
	 INTEGER or ENUMERAL types we are handed may be too short.  We use a
	 base integer type node for the computations in this case and will
	 convert the final result back to the incoming type later on.  */
      if (!SCALAR_FLOAT_TYPE_P (comp_type) && TYPE_PRECISION (comp_type) < 32)
	comp_type = gnat_type_for_size (32, 0);

      gnu_base = build_cst_from_int (comp_type, Base);

      gnu_ret = build_cst_from_int (comp_type, First);
      if (First < 0)
	for (Idx++, Length--; Length; Idx++, Length--)
	  gnu_ret = fold_build2 (MINUS_EXPR, comp_type,
				 fold_build2 (MULT_EXPR, comp_type,
					      gnu_ret, gnu_base),
				 build_cst_from_int (comp_type,
						     Udigits_Ptr[Idx]));
      else
	for (Idx++, Length--; Length; Idx++, Length--)
	  gnu_ret = fold_build2 (PLUS_EXPR, comp_type,
				 fold_build2 (MULT_EXPR, comp_type,
					      gnu_ret, gnu_base),
				 build_cst_from_int (comp_type,
						     Udigits_Ptr[Idx]));
    }

  gnu_ret = convert (type, gnu_ret);

  /* We don't need any NOP_EXPR or NON_LVALUE_EXPR on GNU_RET.  */
  while ((TREE_CODE (gnu_ret) == NOP_EXPR
	  || TREE_CODE (gnu_ret) == NON_LVALUE_EXPR)
	 && TREE_TYPE (TREE_OPERAND (gnu_ret, 0)) == TREE_TYPE (gnu_ret))
    gnu_ret = TREE_OPERAND (gnu_ret, 0);

  return gnu_ret;
}
Example #7
0
void
aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
{
  unsigned i;
  tree type;

  widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
  if (scale == 0)
    return;

  for (i = 0; i < comb->n; i++)
    if (operand_equal_p (comb->elts[i].val, elt, 0))
      {
	widest_int new_coef
	  = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb->type);
	if (new_coef != 0)
	  {
	    comb->elts[i].coef = new_coef;
	    return;
	  }

	comb->n--;
	comb->elts[i] = comb->elts[comb->n];

	if (comb->rest)
	  {
	    gcc_assert (comb->n == MAX_AFF_ELTS - 1);
	    comb->elts[comb->n].coef = 1;
	    comb->elts[comb->n].val = comb->rest;
	    comb->rest = NULL_TREE;
	    comb->n++;
	  }
	return;
      }
  if (comb->n < MAX_AFF_ELTS)
    {
      comb->elts[comb->n].coef = scale;
      comb->elts[comb->n].val = elt;
      comb->n++;
      return;
    }

  type = comb->type;
  if (POINTER_TYPE_P (type))
    type = sizetype;

  if (scale == 1)
    elt = fold_convert (type, elt);
  else
    elt = fold_build2 (MULT_EXPR, type,
		       fold_convert (type, elt),
		       wide_int_to_tree (type, scale));

  if (comb->rest)
    comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
			      elt);
  else
    comb->rest = elt;
}
static tree
add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
		 aff_tree *comb)
{
  enum tree_code code;
  tree type1 = type;
  if (POINTER_TYPE_P (type))
    type1 = sizetype;

  scale = double_int_ext_for_comb (scale, comb);
  elt = fold_convert (type1, elt);

  if (scale.is_one ())
    {
      if (!expr)
	return fold_convert (type, elt);

      if (POINTER_TYPE_P (type))
        return fold_build_pointer_plus (expr, elt);
      return fold_build2 (PLUS_EXPR, type, expr, elt);
    }

  if (scale.is_minus_one ())
    {
      if (!expr)
	return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt));

      if (POINTER_TYPE_P (type))
	{
	  elt = fold_build1 (NEGATE_EXPR, type1, elt);
	  return fold_build_pointer_plus (expr, elt);
	}
      return fold_build2 (MINUS_EXPR, type, expr, elt);
    }

  if (!expr)
    return fold_convert (type,
			 fold_build2 (MULT_EXPR, type1, elt,
				      double_int_to_tree (type1, scale)));

  if (scale.is_negative ())
    {
      code = MINUS_EXPR;
      scale = -scale;
    }
  else
    code = PLUS_EXPR;

  elt = fold_build2 (MULT_EXPR, type1, elt,
		     double_int_to_tree (type1, scale));
  if (POINTER_TYPE_P (type))
    {
      if (code == MINUS_EXPR)
        elt = fold_build1 (NEGATE_EXPR, type1, elt);
      return fold_build_pointer_plus (expr, elt);
    }
  return fold_build2 (code, type, expr, elt);
}
Example #9
0
tree
tree_mem_ref_addr (tree type, tree mem_ref)
{
  tree addr;
  tree act_elem;
  tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
  tree sym = TMR_SYMBOL (mem_ref), base = TMR_BASE (mem_ref);
  tree addr_base = NULL_TREE, addr_off = NULL_TREE;

  if (sym)
    addr_base = fold_convert (type, build_addr (sym, current_function_decl));
  else if (base && POINTER_TYPE_P (TREE_TYPE (base)))
    {
      addr_base = fold_convert (type, base);
      base = NULL_TREE;
    }

  act_elem = TMR_INDEX (mem_ref);
  if (act_elem)
    {
      if (step)
	act_elem = fold_build2 (MULT_EXPR, sizetype, act_elem, step);
      addr_off = act_elem;
    }

  act_elem = base;
  if (act_elem)
    {
      if (addr_off)
	addr_off = fold_build2 (PLUS_EXPR, sizetype, addr_off, act_elem);
      else
	addr_off = act_elem;
    }

  if (offset && !integer_zerop (offset))
    {
      if (addr_off)
	addr_off = fold_build2 (PLUS_EXPR, sizetype, addr_off, offset);
      else
	addr_off = offset;
    }

  if (addr_off)
    {
      if (addr_base)
	addr = fold_build2 (POINTER_PLUS_EXPR, type, addr_base, addr_off);
      else
	addr = fold_convert (type, addr_off);
    }
  else if (addr_base)
    addr = addr_base;
  else
    addr = build_int_cst (type, 0);

  return addr;
}
Example #10
0
Uint
UI_From_gnu (tree Input)
{
  tree gnu_type = TREE_TYPE (Input), gnu_base, gnu_temp;
  /* UI_Base is defined so that 5 Uint digits is sufficient to hold the
     largest possible signed 64-bit value.  */
  const int Max_For_Dint = 5;
  int v[Max_For_Dint], i;
  Vector_Template temp;
  Int_Vector vec;

#if HOST_BITS_PER_WIDE_INT == 64
  /* On 64-bit hosts, tree_fits_shwi_p tells whether the input fits in a
     signed 64-bit integer.  Then a truncation tells whether it fits
     in a signed 32-bit integer.  */
  if (tree_fits_shwi_p (Input))
    {
      HOST_WIDE_INT hw_input = tree_to_shwi (Input);
      if (hw_input == (int) hw_input)
	return UI_From_Int (hw_input);
    }
  else
    return No_Uint;
#else
  /* On 32-bit hosts, tree_fits_shwi_p tells whether the input fits in a
     signed 32-bit integer.  Then a sign test tells whether it fits
     in a signed 64-bit integer.  */
  if (tree_fits_shwi_p (Input))
    return UI_From_Int (tree_to_shwi (Input));

  gcc_assert (TYPE_PRECISION (gnu_type) <= 64);
  if (TYPE_UNSIGNED (gnu_type)
      && TYPE_PRECISION (gnu_type) == 64
      && wi::neg_p (Input, SIGNED))
    return No_Uint;
#endif

  gnu_base = build_int_cst (gnu_type, UI_Base);
  gnu_temp = Input;

  for (i = Max_For_Dint - 1; i >= 0; i--)
    {
      v[i] = tree_to_shwi (fold_build1 (ABS_EXPR, gnu_type,
					fold_build2 (TRUNC_MOD_EXPR, gnu_type,
						     gnu_temp, gnu_base)));
      gnu_temp = fold_build2 (TRUNC_DIV_EXPR, gnu_type, gnu_temp, gnu_base);
    }

  temp.Low_Bound = 1;
  temp.High_Bound = Max_For_Dint;
  vec.Bounds = &temp;
  vec.Array = v;
  return Vector_To_Uint (vec, tree_int_cst_sgn (Input) < 0);
}
Example #11
0
tree
gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
{
  tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
  stmtblock_t block, cond_block;

  if (! GFC_DESCRIPTOR_TYPE_P (type)
      || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
    return NULL;

  gcc_assert (outer != NULL);
  gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
	      || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);

  /* Allocatable arrays in PRIVATE clauses need to be set to
     "not currently allocated" allocation status if outer
     array is "not currently allocated", otherwise should be allocated.  */
  gfc_start_block (&block);

  gfc_init_block (&cond_block);

  gfc_add_modify (&cond_block, decl, outer);
  rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
  size = gfc_conv_descriptor_ubound_get (decl, rank);
  size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
		      gfc_conv_descriptor_lbound_get (decl, rank));
  size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
		      gfc_index_one_node);
  if (GFC_TYPE_ARRAY_RANK (type) > 1)
    size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
			gfc_conv_descriptor_stride_get (decl, rank));
  esize = fold_convert (gfc_array_index_type,
			TYPE_SIZE_UNIT (gfc_get_element_type (type)));
  size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
  size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
  ptr = gfc_allocate_array_with_status (&cond_block,
					build_int_cst (pvoid_type_node, 0),
					size, NULL, NULL);
  gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
  then_b = gfc_finish_block (&cond_block);

  gfc_init_block (&cond_block);
  gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
  else_b = gfc_finish_block (&cond_block);

  cond = fold_build2 (NE_EXPR, boolean_type_node,
		      fold_convert (pvoid_type_node,
				    gfc_conv_descriptor_data_get (outer)),
		      null_pointer_node);
  gfc_add_expr_to_block (&block, build3 (COND_EXPR, void_type_node,
			 cond, then_b, else_b));

  return gfc_finish_block (&block);
}
Example #12
0
/* Reallocate MEM so it has SIZE bytes of data.  This behaves like the
   following pseudo-code:

void *
internal_realloc (void *mem, size_t size)
{
  if (size < 0)
    runtime_error ("Attempt to allocate a negative amount of memory.");
  res = realloc (mem, size);
  if (!res && size != 0)
    _gfortran_os_error ("Out of memory");

  if (size == 0)
    return NULL;

  return res;
}  */
tree
gfc_call_realloc (stmtblock_t * block, tree mem, tree size)
{
  tree msg, res, negative, nonzero, zero, null_result, tmp;
  tree type = TREE_TYPE (mem);

  size = gfc_evaluate_now (size, block);

  if (TREE_TYPE (size) != TREE_TYPE (size_type_node))
    size = fold_convert (size_type_node, size);

  /* Create a variable to hold the result.  */
  res = gfc_create_var (type, NULL);

  /* size < 0 ?  */
  negative = fold_build2 (LT_EXPR, boolean_type_node, size,
			  build_int_cst (size_type_node, 0));
  msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const
      ("Attempt to allocate a negative amount of memory."));
  tmp = fold_build3 (COND_EXPR, void_type_node, negative,
		     build_call_expr_loc (input_location,
				      gfor_fndecl_runtime_error, 1, msg),
		     build_empty_stmt (input_location));
  gfc_add_expr_to_block (block, tmp);

  /* Call realloc and check the result.  */
  tmp = build_call_expr_loc (input_location,
			 built_in_decls[BUILT_IN_REALLOC], 2,
			 fold_convert (pvoid_type_node, mem), size);
  gfc_add_modify (block, res, fold_convert (type, tmp));
  null_result = fold_build2 (EQ_EXPR, boolean_type_node, res,
			     build_int_cst (pvoid_type_node, 0));
  nonzero = fold_build2 (NE_EXPR, boolean_type_node, size,
			 build_int_cst (size_type_node, 0));
  null_result = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, null_result,
			     nonzero);
  msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const
						("Out of memory"));
  tmp = fold_build3 (COND_EXPR, void_type_node, null_result,
		     build_call_expr_loc (input_location,
				      gfor_fndecl_os_error, 1, msg),
		     build_empty_stmt (input_location));
  gfc_add_expr_to_block (block, tmp);

  /* if (size == 0) then the result is NULL.  */
  tmp = fold_build2 (MODIFY_EXPR, type, res, build_int_cst (type, 0));
  zero = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, nonzero);
  tmp = fold_build3 (COND_EXPR, void_type_node, zero, tmp,
		     build_empty_stmt (input_location));
  gfc_add_expr_to_block (block, tmp);

  return res;
}
Uint
UI_From_gnu (tree Input)
{
  tree gnu_type = TREE_TYPE (Input), gnu_base, gnu_temp;
  /* UI_Base is defined so that 5 Uint digits is sufficient to hold the
     largest possible signed 64-bit value.  */
  const int Max_For_Dint = 5;
  int v[Max_For_Dint], i;
  Vector_Template temp;
  Int_Vector vec;

#if HOST_BITS_PER_WIDE_INT == 64
  /* On 64-bit hosts, host_integerp tells whether the input fits in a
     signed 64-bit integer.  Then a truncation tells whether it fits
     in a signed 32-bit integer.  */
  if (host_integerp (Input, 0))
    {
      HOST_WIDE_INT hw_input = TREE_INT_CST_LOW (Input);
      if (hw_input == (int) hw_input)
	return UI_From_Int (hw_input);
    }
  else
    return No_Uint;
#else
  /* On 32-bit hosts, host_integerp tells whether the input fits in a
     signed 32-bit integer.  Then a sign test tells whether it fits
     in a signed 64-bit integer.  */
  if (host_integerp (Input, 0))
    return UI_From_Int (TREE_INT_CST_LOW (Input));
  else if (TREE_INT_CST_HIGH (Input) < 0
	   && TYPE_UNSIGNED (gnu_type)
	   && !(TREE_CODE (gnu_type) == INTEGER_TYPE
		&& TYPE_IS_SIZETYPE (gnu_type)))
    return No_Uint;
#endif

  gnu_base = build_int_cst (gnu_type, UI_Base);
  gnu_temp = Input;

  for (i = Max_For_Dint - 1; i >= 0; i--)
    {
      v[i] = tree_low_cst (fold_build1 (ABS_EXPR, gnu_type,
					fold_build2 (TRUNC_MOD_EXPR, gnu_type,
						     gnu_temp, gnu_base)),
			   0);
      gnu_temp = fold_build2 (TRUNC_DIV_EXPR, gnu_type, gnu_temp, gnu_base);
    }

  temp.Low_Bound = 1, temp.High_Bound = Max_For_Dint;
  vec.Array = v, vec.Bounds = &temp;
  return Vector_To_Uint (vec, tree_int_cst_sgn (Input) < 0);
}
Example #14
0
static void
aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
			     aff_tree *r)
{
  unsigned i;
  tree aval, type;

  for (i = 0; i < c->n; i++)
    {
      aval = c->elts[i].val;
      if (val)
	{
	  type = TREE_TYPE (aval);
	  aval = fold_build2 (MULT_EXPR, type, aval,
			      fold_convert (type, val));
	}

      aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
    }

  if (c->rest)
    {
      aval = c->rest;
      if (val)
	{
	  type = TREE_TYPE (aval);
	  aval = fold_build2 (MULT_EXPR, type, aval,
			      fold_convert (type, val));
	}

      aff_combination_add_elt (r, aval, coef);
    }

  if (val)
    {
      if (c->offset.is_constant ())
	/* Access coeffs[0] directly, for efficiency.  */
	aff_combination_add_elt (r, val, coef * c->offset.coeffs[0]);
      else
	{
	  /* c->offset is polynomial, so multiply VAL rather than COEF
	     by it.  */
	  tree offset = wide_int_to_tree (TREE_TYPE (val), c->offset);
	  val = fold_build2 (MULT_EXPR, TREE_TYPE (val), val, offset);
	  aff_combination_add_elt (r, val, coef);
	}
    }
  else
    aff_combination_add_cst (r, coef * c->offset);
}
Example #15
0
tree
oacc_launch_pack (unsigned code, tree device, unsigned op)
{
  tree res;

  res = build_int_cst (unsigned_type_node, GOMP_LAUNCH_PACK (code, 0, op));
  if (device)
    {
      device = fold_build2 (LSHIFT_EXPR, unsigned_type_node,
			    device, build_int_cst (unsigned_type_node,
						   GOMP_LAUNCH_DEVICE_SHIFT));
      res = fold_build2 (BIT_IOR_EXPR, unsigned_type_node, res, device);
    }
  return res;
}
Example #16
0
tree
ubsan_instrument_vla (location_t loc, tree size)
{
  tree type = TREE_TYPE (size);
  tree t, tt;

  t = fold_build2 (LE_EXPR, boolean_type_node, size, build_int_cst (type, 0));
  if (flag_sanitize_undefined_trap_on_error)
    tt = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
  else
    {
      tree data = ubsan_create_data ("__ubsan_vla_data", 1, &loc,
				     ubsan_type_descriptor (type), NULL_TREE,
				     NULL_TREE);
      data = build_fold_addr_expr_loc (loc, data);
      enum built_in_function bcode
	= (flag_sanitize_recover & SANITIZE_VLA)
	  ? BUILT_IN_UBSAN_HANDLE_VLA_BOUND_NOT_POSITIVE
	  : BUILT_IN_UBSAN_HANDLE_VLA_BOUND_NOT_POSITIVE_ABORT;
      tt = builtin_decl_explicit (bcode);
      tt = build_call_expr_loc (loc, tt, 2, data, ubsan_encode_value (size));
    }
  t = fold_build3 (COND_EXPR, void_type_node, t, tt, void_node);

  return t;
}
Example #17
0
static tree
getVolatile_builtin (tree method_return_type ATTRIBUTE_UNUSED, 
		     tree orig_call)
{
  tree addr, stmt, modify_stmt, tmp;
  UNMARSHAL3 (orig_call);
  (void) this_arg; /* Avoid set but not used warning.  */

  addr = build_addr_sum (method_return_type, obj_arg, offset_arg);
  addr 
    = fold_convert (build_pointer_type (build_type_variant 
					(method_return_type, 0, 1)), addr);
  
  stmt = build_call_expr (builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE), 0);
  tmp = build_decl (BUILTINS_LOCATION, VAR_DECL, NULL, method_return_type);
  DECL_IGNORED_P (tmp) = 1;
  DECL_ARTIFICIAL (tmp) = 1;
  pushdecl (tmp);

  modify_stmt = fold_build2 (MODIFY_EXPR, method_return_type,
			     tmp,
			     build_java_indirect_ref (method_return_type, addr,
						      flag_check_references));

  stmt = build2 (COMPOUND_EXPR, void_type_node, modify_stmt, stmt);
  stmt = build2 (COMPOUND_EXPR, method_return_type, stmt, tmp);
  
  return stmt;
}
Example #18
0
static tree
create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr)
{
  if (!valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
    return NULL_TREE;

  if (addr->step && integer_onep (addr->step))
    addr->step = NULL_TREE;

  if (addr->offset && integer_zerop (addr->offset))
    addr->offset = NULL_TREE;

  /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.  */
  if (alias_ptr_type
      && !addr->index
      && !addr->step
      && (!addr->base || POINTER_TYPE_P (TREE_TYPE (addr->base))))
    {
      tree base, offset;
      gcc_assert (!addr->symbol ^ !addr->base);
      if (addr->symbol)
	base = build_fold_addr_expr (addr->symbol);
      else
	base = addr->base;
      if (addr->offset)
	offset = fold_convert (alias_ptr_type, addr->offset);
      else
	offset = build_int_cst (alias_ptr_type, 0);
      return fold_build2 (MEM_REF, type, base, offset);
    }

  return build6 (TARGET_MEM_REF, type,
		 addr->symbol, addr->base, addr->index,
		 addr->step, addr->offset, NULL);
}
Example #19
0
tree
gfc_truthvalue_conversion (tree expr)
{
  switch (TREE_CODE (TREE_TYPE (expr)))
    {
    case BOOLEAN_TYPE:
      if (TREE_TYPE (expr) == boolean_type_node)
	return expr;
      else if (COMPARISON_CLASS_P (expr))
	{
	  TREE_TYPE (expr) = boolean_type_node;
	  return expr;
	}
      else if (TREE_CODE (expr) == NOP_EXPR)
        return fold_build1 (NOP_EXPR,
			    boolean_type_node, TREE_OPERAND (expr, 0));
      else
        return fold_build1 (NOP_EXPR, boolean_type_node, expr);

    case INTEGER_TYPE:
      if (TREE_CODE (expr) == INTEGER_CST)
	return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
      else
        return fold_build2 (NE_EXPR, boolean_type_node, expr,
			    build_int_cst (TREE_TYPE (expr), 0));

    default:
      internal_error ("Unexpected type in truthvalue_conversion");
    }
}
static void
build_arrays (gimple swtch)
{
  tree arr_index_type;
  tree tidx, sub;
  gimple stmt;
  gimple_stmt_iterator gsi;
  int i;

  gsi = gsi_for_stmt (swtch);

  arr_index_type = build_index_type (info.range_size);
  tidx = make_rename_temp (arr_index_type, "csti");
  sub = fold_build2 (MINUS_EXPR, TREE_TYPE (info.index_expr), info.index_expr,
		     fold_convert (TREE_TYPE (info.index_expr),
				   info.range_min));
  sub = force_gimple_operand_gsi (&gsi, fold_convert (arr_index_type, sub),
				  false, NULL, true, GSI_SAME_STMT);
  stmt = gimple_build_assign (tidx, sub);

  gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
  mark_symbols_for_renaming (stmt);
  info.arr_ref_first = stmt;

  for (gsi = gsi_start_phis (info.final_bb), i = 0;
       !gsi_end_p (gsi); gsi_next (&gsi), i++)
    build_one_array (swtch, i, arr_index_type, gsi_stmt (gsi), tidx);
}
Example #21
0
static void
gen_one_condition (tree arg, int lbub,
                   enum tree_code tcode,
                   const char *temp_name1,
		   const char *temp_name2,
                   vec<gimple> conds,
                   unsigned *nconds)
{
  tree lbub_real_cst, lbub_cst, float_type;
  tree temp, tempn, tempc, tempcn;
  gimple stmt1, stmt2, stmt3;

  float_type = TREE_TYPE (arg);
  lbub_cst = build_int_cst (integer_type_node, lbub);
  lbub_real_cst = build_real_from_int_cst (float_type, lbub_cst);

  temp = create_tmp_var (float_type, temp_name1);
  stmt1 = gimple_build_assign (temp, arg);
  tempn = make_ssa_name (temp, stmt1);
  gimple_assign_set_lhs (stmt1, tempn);

  tempc = create_tmp_var (boolean_type_node, temp_name2);
  stmt2 = gimple_build_assign (tempc,
                               fold_build2 (tcode,
					    boolean_type_node,
					    tempn, lbub_real_cst));
  tempcn = make_ssa_name (tempc, stmt2);
  gimple_assign_set_lhs (stmt2, tempcn);

  stmt3 = gimple_build_cond_from_tree (tempcn, NULL_TREE, NULL_TREE);
  conds.quick_push (stmt1);
  conds.quick_push (stmt2);
  conds.quick_push (stmt3);
  (*nconds)++;
}
Example #22
0
tree
create_cilk_function_exit (tree frame, bool detaches, bool needs_sync)
{
  tree epi = alloc_stmt_list ();

  if (needs_sync) 
    append_to_statement_list (build_cilk_sync (), &epi);
  tree func_ptr = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl, frame);
  tree pop_frame = build_call_expr (cilk_pop_fndecl, 1, func_ptr);
  tree worker = cilk_dot (frame, CILK_TI_FRAME_WORKER, 0);
  tree current = cilk_arrow (worker, CILK_TI_WORKER_CUR, 0);
  tree parent = cilk_dot (frame, CILK_TI_FRAME_PARENT, 0);
  tree set_current = build2 (MODIFY_EXPR, void_type_node, current, parent);
  append_to_statement_list (set_current, &epi);
  append_to_statement_list (pop_frame, &epi);
  tree call = build_call_expr (cilk_leave_fndecl, 1, func_ptr);
  if (!detaches)
    {
      tree flags = cilk_dot (frame, CILK_TI_FRAME_FLAGS, false);
      tree flags_cmp_expr = fold_build2 (NE_EXPR, TREE_TYPE (flags), flags, 
					 build_int_cst (TREE_TYPE (flags), 
							CILK_FRAME_VERSION));
      call = fold_build3 (COND_EXPR, void_type_node, flags_cmp_expr,
			  call, build_empty_stmt (EXPR_LOCATION (flags)));
    }
  append_to_statement_list (call, &epi);  
  return epi;
}
Example #23
0
/* Call malloc to allocate size bytes of memory, with special conditions:
      + if size <= 0, return a malloced area of size 1,
      + if malloc returns NULL, issue a runtime error.  */
tree
gfc_call_malloc (stmtblock_t * block, tree type, tree size)
{
  tree tmp, msg, malloc_result, null_result, res;
  stmtblock_t block2;

  size = gfc_evaluate_now (size, block);

  if (TREE_TYPE (size) != TREE_TYPE (size_type_node))
    size = fold_convert (size_type_node, size);

  /* Create a variable to hold the result.  */
  res = gfc_create_var (prvoid_type_node, NULL);

  /* Call malloc.  */
  gfc_start_block (&block2);

  size = fold_build2 (MAX_EXPR, size_type_node, size,
		      build_int_cst (size_type_node, 1));

  gfc_add_modify (&block2, res,
		  fold_convert (prvoid_type_node,
				build_call_expr_loc (input_location,
				   built_in_decls[BUILT_IN_MALLOC], 1, size)));

  /* Optionally check whether malloc was successful.  */
  if (gfc_option.rtcheck & GFC_RTCHECK_MEM)
    {
      null_result = fold_build2 (EQ_EXPR, boolean_type_node, res,
				 build_int_cst (pvoid_type_node, 0));
      msg = gfc_build_addr_expr (pchar_type_node,
	      gfc_build_localized_cstring_const ("Memory allocation failed"));
      tmp = fold_build3 (COND_EXPR, void_type_node, null_result,
	      build_call_expr_loc (input_location,
				   gfor_fndecl_os_error, 1, msg),
				   build_empty_stmt (input_location));
      gfc_add_expr_to_block (&block2, tmp);
    }

  malloc_result = gfc_finish_block (&block2);

  gfc_add_expr_to_block (block, malloc_result);

  if (type != NULL)
    res = fold_convert (type, res);
  return res;
}
Example #24
0
/* Mutiply address ADDR by integer constant MULT.  */
static void
chkp_mult_addr (address_t &addr, tree mult)
{
    unsigned int i;
    for (i = 0; i < addr.pol.length (); i++)
        addr.pol[i].cst = fold_build2 (MULT_EXPR, TREE_TYPE (addr.pol[i].cst),
                                       addr.pol[i].cst, mult);
}
tree
tree_mem_ref_addr (tree type, tree mem_ref)
{
  tree addr;
  tree act_elem;
  tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
  tree addr_base = NULL_TREE, addr_off = NULL_TREE;

  addr_base = fold_convert (type, TMR_BASE (mem_ref));

  act_elem = TMR_INDEX (mem_ref);
  if (act_elem)
    {
      if (step)
	act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
				act_elem, step);
      addr_off = act_elem;
    }

  act_elem = TMR_INDEX2 (mem_ref);
  if (act_elem)
    {
      if (addr_off)
	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
				addr_off, act_elem);
      else
	addr_off = act_elem;
    }

  if (offset && !integer_zerop (offset))
    {
      if (addr_off)
	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
				fold_convert (TREE_TYPE (addr_off), offset));
      else
	addr_off = offset;
    }

  if (addr_off)
    addr = fold_build_pointer_plus (addr_base, addr_off);
  else
    addr = addr_base;

  return addr;
}
Example #26
0
void
gfc_trans_runtime_check (bool error, bool once, tree cond, stmtblock_t * pblock,
			 locus * where, const char * msgid, ...)
{
  va_list ap;
  stmtblock_t block;
  tree body;
  tree tmp;
  tree tmpvar = NULL;

  if (integer_zerop (cond))
    return;

  if (once)
    {
       tmpvar = gfc_create_var (boolean_type_node, "print_warning");
       TREE_STATIC (tmpvar) = 1;
       DECL_INITIAL (tmpvar) = boolean_true_node;
       gfc_add_expr_to_block (pblock, tmpvar);
    }

  gfc_start_block (&block);

  /* The code to generate the error.  */
  va_start (ap, msgid);
  gfc_add_expr_to_block (&block,
			 gfc_trans_runtime_error_vararg (error, where,
							 msgid, ap));

  if (once)
    gfc_add_modify (&block, tmpvar, boolean_false_node);

  body = gfc_finish_block (&block);

  if (integer_onep (cond))
    {
      gfc_add_expr_to_block (pblock, body);
    }
  else
    {
      /* Tell the compiler that this isn't likely.  */
      if (once)
	cond = fold_build2 (TRUTH_AND_EXPR, long_integer_type_node, tmpvar,
			    cond);
      else
	cond = fold_convert (long_integer_type_node, cond);

      tmp = build_int_cst (long_integer_type_node, 0);
      cond = build_call_expr_loc (input_location,
			      built_in_decls[BUILT_IN_EXPECT], 2, cond, tmp);
      cond = fold_convert (boolean_type_node, cond);

      tmp = build3_v (COND_EXPR, cond, body, build_empty_stmt (input_location));
      gfc_add_expr_to_block (pblock, tmp);
    }
}
Example #27
0
/* Compute value of PTR and put it into address RES.
   PTR has to be ADDR_EXPR.  */
static void
chkp_collect_addr_value (tree ptr, address_t &res)
{
    tree obj = TREE_OPERAND (ptr, 0);
    address_t addr;

    switch (TREE_CODE (obj))
    {
    case INDIRECT_REF:
        chkp_collect_value (TREE_OPERAND (obj, 0), res);
        break;

    case MEM_REF:
        chkp_collect_value (TREE_OPERAND (obj, 0), res);
        addr.pol.create (0);
        chkp_collect_value (TREE_OPERAND (obj, 1), addr);
        chkp_add_addr_addr (res, addr);
        addr.pol.release ();
        break;

    case ARRAY_REF:
        chkp_collect_value (build_fold_addr_expr (TREE_OPERAND (obj, 0)), res);
        addr.pol.create (0);
        chkp_collect_value (TREE_OPERAND (obj, 1), addr);
        chkp_mult_addr (addr, array_ref_element_size (obj));
        chkp_add_addr_addr (res, addr);
        addr.pol.release ();
        break;

    case COMPONENT_REF:
    {
        tree str = TREE_OPERAND (obj, 0);
        tree field = TREE_OPERAND (obj, 1);
        chkp_collect_value (build_fold_addr_expr (str), res);
        addr.pol.create (0);
        chkp_collect_value (component_ref_field_offset (obj), addr);
        chkp_add_addr_addr (res, addr);
        addr.pol.release ();
        if (DECL_FIELD_BIT_OFFSET (field))
        {
            addr.pol.create (0);
            chkp_collect_value (fold_build2 (TRUNC_DIV_EXPR, size_type_node,
                                             DECL_FIELD_BIT_OFFSET (field),
                                             size_int (BITS_PER_UNIT)),
                                addr);
            chkp_add_addr_addr (res, addr);
            addr.pol.release ();
        }
    }
    break;

    default:
        chkp_add_addr_item (res, integer_one_node, ptr);
        break;
    }
}
Example #28
0
tree
gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
{
  tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
  stmtblock_t block;

  if (! GFC_DESCRIPTOR_TYPE_P (type)
      || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
    return build2_v (MODIFY_EXPR, dest, src);

  gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);

  /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
     and copied from SRC.  */
  gfc_start_block (&block);

  gfc_add_modify (&block, dest, src);
  rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
  size = gfc_conv_descriptor_ubound_get (dest, rank);
  size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
		      gfc_conv_descriptor_lbound_get (dest, rank));
  size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
		      gfc_index_one_node);
  if (GFC_TYPE_ARRAY_RANK (type) > 1)
    size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
			gfc_conv_descriptor_stride_get (dest, rank));
  esize = fold_convert (gfc_array_index_type,
			TYPE_SIZE_UNIT (gfc_get_element_type (type)));
  size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
  size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
  ptr = gfc_allocate_array_with_status (&block,
					build_int_cst (pvoid_type_node, 0),
					size, NULL, NULL);
  gfc_conv_descriptor_data_set (&block, dest, ptr);
  call = build_call_expr_loc (input_location,
			  built_in_decls[BUILT_IN_MEMCPY], 3, ptr,
			  fold_convert (pvoid_type_node,
					gfc_conv_descriptor_data_get (src)),
			  size);
  gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));

  return gfc_finish_block (&block);
}
Example #29
0
static tree
min_builtin (tree method_return_type, tree orig_call)
{
  /* MIN_EXPR does not handle -0.0 in the Java style.  */
  if (TREE_CODE (method_return_type) == REAL_TYPE)
    return NULL_TREE;
  return fold_build2 (MIN_EXPR, method_return_type,
		      CALL_EXPR_ARG (orig_call, 0),
		      CALL_EXPR_ARG (orig_call, 1));
}
static void
lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
{
  gimple stmt, call = gsi_stmt (*gsi);
  tree pptr = gimple_call_arg (call, 0);
  tree align = gimple_call_arg (call, 1);
  tree res = gimple_call_lhs (call);
  tree ptr = create_tmp_reg (ptr_type_node, NULL);
  if (TREE_CODE (pptr) == ADDR_EXPR)
    {
      tree tem = create_tmp_var (ptr_type_node, NULL);
      TREE_ADDRESSABLE (tem) = 1;
      gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
      stmt = gimple_build_assign (ptr, tem);
    }
  else
    stmt = gimple_build_assign (ptr,
				fold_build2 (MEM_REF, ptr_type_node, pptr,
					     build_int_cst (ptr_type_node, 0)));
  if (res == NULL_TREE)
    {
      res = create_tmp_reg (integer_type_node, NULL);
      gimple_call_set_lhs (call, res);
    }
  tree align_label = create_artificial_label (UNKNOWN_LOCATION);
  tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
  gimple cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
				   align_label, noalign_label);
  gsi_insert_after (gsi, cond, GSI_NEW_STMT);
  gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
  gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
  stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
			    2, ptr, align);
  gimple_call_set_lhs (stmt, ptr);
  gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
  stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
					   build_int_cst (ptr_type_node, 0)),
			      ptr);
  gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
  gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
}