Beispiel #1
0
tree
gfc_truthvalue_conversion (tree expr)
{
  switch (TREE_CODE (TREE_TYPE (expr)))
    {
    case BOOLEAN_TYPE:
      if (TREE_TYPE (expr) == boolean_type_node)
	return expr;
      else if (COMPARISON_CLASS_P (expr))
	{
	  TREE_TYPE (expr) = boolean_type_node;
	  return expr;
	}
      else if (TREE_CODE (expr) == NOP_EXPR)
        return fold_build1_loc (input_location, NOP_EXPR,
			    boolean_type_node, TREE_OPERAND (expr, 0));
      else
        return fold_build1_loc (input_location, NOP_EXPR, boolean_type_node,
				expr);

    case INTEGER_TYPE:
      if (TREE_CODE (expr) == INTEGER_CST)
	return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
      else
        return fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
				expr, build_int_cst (TREE_TYPE (expr), 0));

    default:
      internal_error ("Unexpected type in truthvalue_conversion");
    }
}
Beispiel #2
0
tree
convert (tree type, tree expr)
{
  tree e = expr;
  enum tree_code code;

  if (type == TREE_TYPE (expr))
    return expr;

  if (TREE_CODE (type) == ERROR_MARK
      || TREE_CODE (expr) == ERROR_MARK
      || TREE_CODE (TREE_TYPE (expr)) == ERROR_MARK)
    return expr;

  gcc_checking_assert (TREE_CODE (TREE_TYPE (expr)) != VOID_TYPE);

  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (expr)))
    return fold_build1_loc (input_location, NOP_EXPR, type, expr);

  code = TREE_CODE (type);
  if (code == VOID_TYPE)
    return fold_build1_loc (input_location, CONVERT_EXPR, type, e);
  if (code == BOOLEAN_TYPE)
    return fold_build1_loc (input_location, NOP_EXPR, type,
			    truthvalue_conversion (e));
  if (code == INTEGER_TYPE)
    return fold (convert_to_integer (type, e));
  if (code == POINTER_TYPE || code == REFERENCE_TYPE)
    return fold (convert_to_pointer (type, e));
  if (code == REAL_TYPE)
    return fold (convert_to_real (type, e));
  if (code == COMPLEX_TYPE)
    return fold (convert_to_complex (type, e));
  if (code == VECTOR_TYPE)
    return fold (convert_to_vector (type, e));

  gcc_unreachable ();
}
Beispiel #3
0
static tree
c_omp_for_incr_canonicalize_ptr (location_t loc, tree decl, tree incr)
{
  if (POINTER_TYPE_P (TREE_TYPE (decl))
      && TREE_OPERAND (incr, 1))
    {
      tree t = fold_convert_loc (loc,
				 sizetype, TREE_OPERAND (incr, 1));

      if (TREE_CODE (incr) == POSTDECREMENT_EXPR
	  || TREE_CODE (incr) == PREDECREMENT_EXPR)
	t = fold_build1_loc (loc, NEGATE_EXPR, sizetype, t);
      t = fold_build_pointer_plus (decl, t);
      incr = build2 (MODIFY_EXPR, void_type_node, decl, t);
    }
  return incr;
}
Beispiel #4
0
/* Reallocate MEM so it has SIZE bytes of data.  This behaves like the
   following pseudo-code:

void *
internal_realloc (void *mem, size_t size)
{
  res = realloc (mem, size);
  if (!res && size != 0)
    _gfortran_os_error ("Allocation would exceed memory limit");

  if (size == 0)
    return NULL;

  return res;
}  */
tree
gfc_call_realloc (stmtblock_t * block, tree mem, tree size)
{
  tree msg, res, nonzero, zero, null_result, tmp;
  tree type = TREE_TYPE (mem);

  size = gfc_evaluate_now (size, block);

  if (TREE_TYPE (size) != TREE_TYPE (size_type_node))
    size = fold_convert (size_type_node, size);

  /* Create a variable to hold the result.  */
  res = gfc_create_var (type, NULL);

  /* Call realloc and check the result.  */
  tmp = build_call_expr_loc (input_location,
			 builtin_decl_explicit (BUILT_IN_REALLOC), 2,
			 fold_convert (pvoid_type_node, mem), size);
  gfc_add_modify (block, res, fold_convert (type, tmp));
  null_result = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
				 res, build_int_cst (pvoid_type_node, 0));
  nonzero = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, size,
			     build_int_cst (size_type_node, 0));
  null_result = fold_build2_loc (input_location, TRUTH_AND_EXPR, boolean_type_node,
				 null_result, nonzero);
  msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const
			     ("Allocation would exceed memory limit"));
  tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
			 null_result,
			 build_call_expr_loc (input_location,
					      gfor_fndecl_os_error, 1, msg),
			 build_empty_stmt (input_location));
  gfc_add_expr_to_block (block, tmp);

  /* if (size == 0) then the result is NULL.  */
  tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, res,
			 build_int_cst (type, 0));
  zero = fold_build1_loc (input_location, TRUTH_NOT_EXPR, boolean_type_node,
			  nonzero);
  tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, zero, tmp,
			 build_empty_stmt (input_location));
  gfc_add_expr_to_block (block, tmp);

  return res;
}
Beispiel #5
0
tree
gfc_build_addr_expr (tree type, tree t)
{
  tree base_type = TREE_TYPE (t);
  tree natural_type;

  if (type && POINTER_TYPE_P (type)
      && TREE_CODE (base_type) == ARRAY_TYPE
      && TYPE_MAIN_VARIANT (TREE_TYPE (type))
	 == TYPE_MAIN_VARIANT (TREE_TYPE (base_type)))
    {
      tree min_val = size_zero_node;
      tree type_domain = TYPE_DOMAIN (base_type);
      if (type_domain && TYPE_MIN_VALUE (type_domain))
        min_val = TYPE_MIN_VALUE (type_domain);
      t = fold (build4_loc (input_location, ARRAY_REF, TREE_TYPE (type),
			    t, min_val, NULL_TREE, NULL_TREE));
      natural_type = type;
    }
  else
    natural_type = build_pointer_type (base_type);

  if (TREE_CODE (t) == INDIRECT_REF)
    {
      if (!type)
	type = natural_type;
      t = TREE_OPERAND (t, 0);
      natural_type = TREE_TYPE (t);
    }
  else
    {
      tree base = get_base_address (t);
      if (base && DECL_P (base))
        TREE_ADDRESSABLE (base) = 1;
      t = fold_build1_loc (input_location, ADDR_EXPR, natural_type, t);
    }

  if (type && natural_type != type)
    t = convert (type, t);

  return t;
}
Beispiel #6
0
tree
omp_get_for_step_from_incr (location_t loc, tree incr)
{
  tree step;
  switch (TREE_CODE (incr))
    {
    case PLUS_EXPR:
      step = TREE_OPERAND (incr, 1);
      break;
    case POINTER_PLUS_EXPR:
      step = fold_convert (ssizetype, TREE_OPERAND (incr, 1));
      break;
    case MINUS_EXPR:
      step = TREE_OPERAND (incr, 1);
      step = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (step), step);
      break;
    default:
      gcc_unreachable ();
    }
  return step;
}
Beispiel #7
0
tree
gfc_conv_constant_to_tree (gfc_expr * expr)
{
  tree res;

  gcc_assert (expr->expr_type == EXPR_CONSTANT);

  /* If it is has a prescribed memory representation, we build a string
     constant and VIEW_CONVERT to its type.  */
 
  switch (expr->ts.type)
    {
    case BT_INTEGER:
      if (expr->representation.string)
	return fold_build1_loc (input_location, VIEW_CONVERT_EXPR,
			 gfc_get_int_type (expr->ts.kind),
			 gfc_build_string_const (expr->representation.length,
						 expr->representation.string));
      else
	return gfc_conv_mpz_to_tree (expr->value.integer, expr->ts.kind);

    case BT_REAL:
      if (expr->representation.string)
	return fold_build1_loc (input_location, VIEW_CONVERT_EXPR,
			 gfc_get_real_type (expr->ts.kind),
			 gfc_build_string_const (expr->representation.length,
						 expr->representation.string));
      else
	return gfc_conv_mpfr_to_tree (expr->value.real, expr->ts.kind, expr->is_snan);

    case BT_LOGICAL:
      if (expr->representation.string)
	{
	  tree tmp = fold_build1_loc (input_location, VIEW_CONVERT_EXPR,
			gfc_get_int_type (expr->ts.kind),
			gfc_build_string_const (expr->representation.length,
						expr->representation.string));
	  if (!integer_zerop (tmp) && !integer_onep (tmp))
	    gfc_warning ("Assigning value other than 0 or 1 to LOGICAL"
			 " has undefined result at %L", &expr->where);
	  return fold_convert (gfc_get_logical_type (expr->ts.kind), tmp);
	}
      else
	return build_int_cst (gfc_get_logical_type (expr->ts.kind),
			      expr->value.logical);

    case BT_COMPLEX:
      if (expr->representation.string)
	return fold_build1_loc (input_location, VIEW_CONVERT_EXPR,
			 gfc_get_complex_type (expr->ts.kind),
			 gfc_build_string_const (expr->representation.length,
						 expr->representation.string));
      else
	{
	  tree real = gfc_conv_mpfr_to_tree (mpc_realref (expr->value.complex),
					  expr->ts.kind, expr->is_snan);
	  tree imag = gfc_conv_mpfr_to_tree (mpc_imagref (expr->value.complex),
					  expr->ts.kind, expr->is_snan);

	  return build_complex (gfc_typenode_for_spec (&expr->ts),
				real, imag);
	}

    case BT_CHARACTER:
      res = gfc_build_wide_string_const (expr->ts.kind,
					 expr->value.character.length,
					 expr->value.character.string);
      return res;

    case BT_HOLLERITH:
      return gfc_build_string_const (expr->representation.length,
				     expr->representation.string);

    default:
      fatal_error ("gfc_conv_constant_to_tree(): invalid type: %s",
		   gfc_typename (&expr->ts));
    }
}
tree
expand_start_catch_block (tree decl)
{
  tree exp;
  tree type, init;

  if (! doing_eh ())
    return NULL_TREE;

  if (decl)
    {
      if (!is_admissible_throw_operand_or_catch_parameter (decl, false))
	decl = error_mark_node;

      type = prepare_eh_type (TREE_TYPE (decl));
      mark_used (eh_type_info (type));
    }
  else
    type = NULL_TREE;

  if (decl && decl_is_java_type (type, 1))
    {
      /* Java only passes object via pointer and doesn't require
	 adjusting.  The java object is immediately before the
	 generic exception header.  */
      exp = build_exc_ptr ();
      exp = build1 (NOP_EXPR, build_pointer_type (type), exp);
      exp = fold_build_pointer_plus (exp,
		    fold_build1_loc (input_location,
				     NEGATE_EXPR, sizetype,
				     TYPE_SIZE_UNIT (TREE_TYPE (exp))));
      exp = cp_build_indirect_ref (exp, RO_NULL, tf_warning_or_error);
      initialize_handler_parm (decl, exp);
      return type;
    }

  /* Call __cxa_end_catch at the end of processing the exception.  */
  push_eh_cleanup (type);

  init = do_begin_catch ();

  /* If there's no decl at all, then all we need to do is make sure
     to tell the runtime that we've begun handling the exception.  */
  if (decl == NULL || decl == error_mark_node || init == error_mark_node)
    finish_expr_stmt (init);

  /* If the C++ object needs constructing, we need to do that before
     calling __cxa_begin_catch, so that std::uncaught_exception gets
     the right value during the copy constructor.  */
  else if (flag_use_cxa_get_exception_ptr
	   && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
    {
      exp = do_get_exception_ptr ();
      initialize_handler_parm (decl, exp);
      finish_expr_stmt (init);
    }

  /* Otherwise the type uses a bitwise copy, and we don't have to worry
     about the value of std::uncaught_exception and therefore can do the
     copy with the return value of __cxa_end_catch instead.  */
  else
    {
      tree init_type = type;

      /* Pointers are passed by values, everything else by reference.  */
      if (!TYPE_PTR_P (type))
	init_type = build_pointer_type (type);
      if (init_type != TREE_TYPE (init))
	init = build1 (NOP_EXPR, init_type, init);
      exp = create_temporary_var (init_type);
      DECL_REGISTER (exp) = 1;
      cp_finish_decl (exp, init, /*init_const_expr=*/false,
		      NULL_TREE, LOOKUP_ONLYCONVERTING);
      initialize_handler_parm (decl, exp);
    }

  return type;
}
Beispiel #9
0
tree
c_finish_omp_for (location_t locus, enum tree_code code, tree declv,
		  tree orig_declv, tree initv, tree condv, tree incrv,
		  tree body, tree pre_body)
{
  location_t elocus;
  bool fail = false;
  int i;

  if ((code == CILK_SIMD || code == CILK_FOR)
      && !c_check_cilk_loop (locus, TREE_VEC_ELT (declv, 0)))
    fail = true;

  gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (initv));
  gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (condv));
  gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (incrv));
  for (i = 0; i < TREE_VEC_LENGTH (declv); i++)
    {
      tree decl = TREE_VEC_ELT (declv, i);
      tree init = TREE_VEC_ELT (initv, i);
      tree cond = TREE_VEC_ELT (condv, i);
      tree incr = TREE_VEC_ELT (incrv, i);

      elocus = locus;
      if (EXPR_HAS_LOCATION (init))
	elocus = EXPR_LOCATION (init);

      /* Validate the iteration variable.  */
      if (!INTEGRAL_TYPE_P (TREE_TYPE (decl))
	  && TREE_CODE (TREE_TYPE (decl)) != POINTER_TYPE)
	{
	  error_at (elocus, "invalid type for iteration variable %qE", decl);
	  fail = true;
	}

      /* In the case of "for (int i = 0...)", init will be a decl.  It should
	 have a DECL_INITIAL that we can turn into an assignment.  */
      if (init == decl)
	{
	  elocus = DECL_SOURCE_LOCATION (decl);

	  init = DECL_INITIAL (decl);
	  if (init == NULL)
	    {
	      error_at (elocus, "%qE is not initialized", decl);
	      init = integer_zero_node;
	      fail = true;
	    }
	  DECL_INITIAL (decl) = NULL_TREE;

	  init = build_modify_expr (elocus, decl, NULL_TREE, NOP_EXPR,
	      			    /* FIXME diagnostics: This should
				       be the location of the INIT.  */
	      			    elocus,
				    init,
				    NULL_TREE);
	}
      if (init != error_mark_node)
	{
	  gcc_assert (TREE_CODE (init) == MODIFY_EXPR);
	  gcc_assert (TREE_OPERAND (init, 0) == decl);
	}

      if (cond == NULL_TREE)
	{
	  error_at (elocus, "missing controlling predicate");
	  fail = true;
	}
      else
	{
	  bool cond_ok = false;

	  if (EXPR_HAS_LOCATION (cond))
	    elocus = EXPR_LOCATION (cond);

	  if (TREE_CODE (cond) == LT_EXPR
	      || TREE_CODE (cond) == LE_EXPR
	      || TREE_CODE (cond) == GT_EXPR
	      || TREE_CODE (cond) == GE_EXPR
	      || TREE_CODE (cond) == NE_EXPR
	      || TREE_CODE (cond) == EQ_EXPR)
	    {
	      tree op0 = TREE_OPERAND (cond, 0);
	      tree op1 = TREE_OPERAND (cond, 1);

	      /* 2.5.1.  The comparison in the condition is computed in
		 the type of DECL, otherwise the behavior is undefined.

		 For example:
		 long n; int i;
		 i < n;

		 according to ISO will be evaluated as:
		 (long)i < n;

		 We want to force:
		 i < (int)n;  */
	      if (TREE_CODE (op0) == NOP_EXPR
		  && decl == TREE_OPERAND (op0, 0))
		{
		  TREE_OPERAND (cond, 0) = TREE_OPERAND (op0, 0);
		  TREE_OPERAND (cond, 1)
		    = fold_build1_loc (elocus, NOP_EXPR, TREE_TYPE (decl),
				   TREE_OPERAND (cond, 1));
		}
	      else if (TREE_CODE (op1) == NOP_EXPR
		       && decl == TREE_OPERAND (op1, 0))
		{
		  TREE_OPERAND (cond, 1) = TREE_OPERAND (op1, 0);
		  TREE_OPERAND (cond, 0)
		    = fold_build1_loc (elocus, NOP_EXPR, TREE_TYPE (decl),
				   TREE_OPERAND (cond, 0));
		}

	      if (decl == TREE_OPERAND (cond, 0))
		cond_ok = true;
	      else if (decl == TREE_OPERAND (cond, 1))
		{
		  TREE_SET_CODE (cond,
				 swap_tree_comparison (TREE_CODE (cond)));
		  TREE_OPERAND (cond, 1) = TREE_OPERAND (cond, 0);
		  TREE_OPERAND (cond, 0) = decl;
		  cond_ok = true;
		}

	      if (TREE_CODE (cond) == NE_EXPR
		  || TREE_CODE (cond) == EQ_EXPR)
		{
		  if (!INTEGRAL_TYPE_P (TREE_TYPE (decl)))
		    {
		      if (code != CILK_SIMD && code != CILK_FOR)
			cond_ok = false;
		    }
		  else if (operand_equal_p (TREE_OPERAND (cond, 1),
					    TYPE_MIN_VALUE (TREE_TYPE (decl)),
					    0))
		    TREE_SET_CODE (cond, TREE_CODE (cond) == NE_EXPR
					 ? GT_EXPR : LE_EXPR);
		  else if (operand_equal_p (TREE_OPERAND (cond, 1),
					    TYPE_MAX_VALUE (TREE_TYPE (decl)),
					    0))
		    TREE_SET_CODE (cond, TREE_CODE (cond) == NE_EXPR
					 ? LT_EXPR : GE_EXPR);
		  else if (code != CILK_SIMD && code != CILK_FOR)
		    cond_ok = false;
		}
	    }

	  if (!cond_ok)
	    {
	      error_at (elocus, "invalid controlling predicate");
	      fail = true;
	    }
	}

      if (incr == NULL_TREE)
	{
	  error_at (elocus, "missing increment expression");
	  fail = true;
	}
      else
	{
	  bool incr_ok = false;

	  if (EXPR_HAS_LOCATION (incr))
	    elocus = EXPR_LOCATION (incr);

	  /* Check all the valid increment expressions: v++, v--, ++v, --v,
	     v = v + incr, v = incr + v and v = v - incr.  */
	  switch (TREE_CODE (incr))
	    {
	    case POSTINCREMENT_EXPR:
	    case PREINCREMENT_EXPR:
	    case POSTDECREMENT_EXPR:
	    case PREDECREMENT_EXPR:
	      if (TREE_OPERAND (incr, 0) != decl)
		break;

	      incr_ok = true;
	      incr = c_omp_for_incr_canonicalize_ptr (elocus, decl, incr);
	      break;

	    case COMPOUND_EXPR:
	      if (TREE_CODE (TREE_OPERAND (incr, 0)) != SAVE_EXPR
		  || TREE_CODE (TREE_OPERAND (incr, 1)) != MODIFY_EXPR)
		break;
	      incr = TREE_OPERAND (incr, 1);
	      /* FALLTHRU */
	    case MODIFY_EXPR:
	      if (TREE_OPERAND (incr, 0) != decl)
		break;
	      if (TREE_OPERAND (incr, 1) == decl)
		break;
	      if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR
		  && (TREE_OPERAND (TREE_OPERAND (incr, 1), 0) == decl
		      || TREE_OPERAND (TREE_OPERAND (incr, 1), 1) == decl))
		incr_ok = true;
	      else if ((TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR
			|| (TREE_CODE (TREE_OPERAND (incr, 1))
			    == POINTER_PLUS_EXPR))
		       && TREE_OPERAND (TREE_OPERAND (incr, 1), 0) == decl)
		incr_ok = true;
	      else
		{
		  tree t = check_omp_for_incr_expr (elocus,
						    TREE_OPERAND (incr, 1),
						    decl);
		  if (t != error_mark_node)
		    {
		      incr_ok = true;
		      t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
		      incr = build2 (MODIFY_EXPR, void_type_node, decl, t);
		    }
		}
	      break;

	    default:
	      break;
	    }
	  if (!incr_ok)
	    {
	      error_at (elocus, "invalid increment expression");
	      fail = true;
	    }
	}

      TREE_VEC_ELT (initv, i) = init;
      TREE_VEC_ELT (incrv, i) = incr;
    }

  if (fail)
    return NULL;
  else
    {
      tree t = make_node (code);

      TREE_TYPE (t) = void_type_node;
      OMP_FOR_INIT (t) = initv;
      OMP_FOR_COND (t) = condv;
      OMP_FOR_INCR (t) = incrv;
      OMP_FOR_BODY (t) = body;
      OMP_FOR_PRE_BODY (t) = pre_body;
      if (code == OMP_FOR)
	OMP_FOR_ORIG_DECLS (t) = orig_declv;

      SET_EXPR_LOCATION (t, locus);
      return add_stmt (t);
    }
}
Beispiel #10
0
void
omp_extract_for_data (gomp_for *for_stmt, struct omp_for_data *fd,
		      struct omp_for_data_loop *loops)
{
  tree t, var, *collapse_iter, *collapse_count;
  tree count = NULL_TREE, iter_type = long_integer_type_node;
  struct omp_for_data_loop *loop;
  int i;
  struct omp_for_data_loop dummy_loop;
  location_t loc = gimple_location (for_stmt);
  bool simd = gimple_omp_for_kind (for_stmt) & GF_OMP_FOR_SIMD;
  bool distribute = gimple_omp_for_kind (for_stmt)
		    == GF_OMP_FOR_KIND_DISTRIBUTE;
  bool taskloop = gimple_omp_for_kind (for_stmt)
		  == GF_OMP_FOR_KIND_TASKLOOP;
  tree iterv, countv;

  fd->for_stmt = for_stmt;
  fd->pre = NULL;
  if (gimple_omp_for_collapse (for_stmt) > 1)
    fd->loops = loops;
  else
    fd->loops = &fd->loop;

  fd->have_nowait = distribute || simd;
  fd->have_ordered = false;
  fd->collapse = 1;
  fd->ordered = 0;
  fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
  fd->sched_modifiers = 0;
  fd->chunk_size = NULL_TREE;
  fd->simd_schedule = false;
  if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_CILKFOR)
    fd->sched_kind = OMP_CLAUSE_SCHEDULE_CILKFOR;
  collapse_iter = NULL;
  collapse_count = NULL;

  for (t = gimple_omp_for_clauses (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
    switch (OMP_CLAUSE_CODE (t))
      {
      case OMP_CLAUSE_NOWAIT:
	fd->have_nowait = true;
	break;
      case OMP_CLAUSE_ORDERED:
	fd->have_ordered = true;
	if (OMP_CLAUSE_ORDERED_EXPR (t))
	  fd->ordered = tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (t));
	break;
      case OMP_CLAUSE_SCHEDULE:
	gcc_assert (!distribute && !taskloop);
	fd->sched_kind
	  = (enum omp_clause_schedule_kind)
	    (OMP_CLAUSE_SCHEDULE_KIND (t) & OMP_CLAUSE_SCHEDULE_MASK);
	fd->sched_modifiers = (OMP_CLAUSE_SCHEDULE_KIND (t)
			       & ~OMP_CLAUSE_SCHEDULE_MASK);
	fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
	fd->simd_schedule = OMP_CLAUSE_SCHEDULE_SIMD (t);
	break;
      case OMP_CLAUSE_DIST_SCHEDULE:
	gcc_assert (distribute);
	fd->chunk_size = OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (t);
	break;
      case OMP_CLAUSE_COLLAPSE:
	fd->collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (t));
	if (fd->collapse > 1)
	  {
	    collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
	    collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
	  }
	break;
      default:
	break;
      }
  if (fd->ordered && fd->collapse == 1 && loops != NULL)
    {
      fd->loops = loops;
      iterv = NULL_TREE;
      countv = NULL_TREE;
      collapse_iter = &iterv;
      collapse_count = &countv;
    }

  /* FIXME: for now map schedule(auto) to schedule(static).
     There should be analysis to determine whether all iterations
     are approximately the same amount of work (then schedule(static)
     is best) or if it varies (then schedule(dynamic,N) is better).  */
  if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
    {
      fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
      gcc_assert (fd->chunk_size == NULL);
    }
  gcc_assert (fd->collapse == 1 || collapse_iter != NULL);
  if (taskloop)
    fd->sched_kind = OMP_CLAUSE_SCHEDULE_RUNTIME;
  if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
    gcc_assert (fd->chunk_size == NULL);
  else if (fd->chunk_size == NULL)
    {
      /* We only need to compute a default chunk size for ordered
	 static loops and dynamic loops.  */
      if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
	  || fd->have_ordered)
	fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
			 ? integer_zero_node : integer_one_node;
    }

  int cnt = fd->ordered ? fd->ordered : fd->collapse;
  for (i = 0; i < cnt; i++)
    {
      if (i == 0 && fd->collapse == 1 && (fd->ordered == 0 || loops == NULL))
	loop = &fd->loop;
      else if (loops != NULL)
	loop = loops + i;
      else
	loop = &dummy_loop;

      loop->v = gimple_omp_for_index (for_stmt, i);
      gcc_assert (SSA_VAR_P (loop->v));
      gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
		  || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
      var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
      loop->n1 = gimple_omp_for_initial (for_stmt, i);

      loop->cond_code = gimple_omp_for_cond (for_stmt, i);
      loop->n2 = gimple_omp_for_final (for_stmt, i);
      gcc_assert (loop->cond_code != NE_EXPR
		  || gimple_omp_for_kind (for_stmt) == GF_OMP_FOR_KIND_CILKSIMD
		  || gimple_omp_for_kind (for_stmt) == GF_OMP_FOR_KIND_CILKFOR);
      omp_adjust_for_condition (loc, &loop->cond_code, &loop->n2);

      t = gimple_omp_for_incr (for_stmt, i);
      gcc_assert (TREE_OPERAND (t, 0) == var);
      loop->step = omp_get_for_step_from_incr (loc, t);

      if (simd
	  || (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
	      && !fd->have_ordered))
	{
	  if (fd->collapse == 1)
	    iter_type = TREE_TYPE (loop->v);
	  else if (i == 0
		   || TYPE_PRECISION (iter_type)
		      < TYPE_PRECISION (TREE_TYPE (loop->v)))
	    iter_type
	      = build_nonstandard_integer_type
		  (TYPE_PRECISION (TREE_TYPE (loop->v)), 1);
	}
      else if (iter_type != long_long_unsigned_type_node)
	{
	  if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
	    iter_type = long_long_unsigned_type_node;
	  else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
		   && TYPE_PRECISION (TREE_TYPE (loop->v))
		      >= TYPE_PRECISION (iter_type))
	    {
	      tree n;

	      if (loop->cond_code == LT_EXPR)
		n = fold_build2_loc (loc,
				 PLUS_EXPR, TREE_TYPE (loop->v),
				 loop->n2, loop->step);
	      else
		n = loop->n1;
	      if (TREE_CODE (n) != INTEGER_CST
		  || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
		iter_type = long_long_unsigned_type_node;
	    }
	  else if (TYPE_PRECISION (TREE_TYPE (loop->v))
		   > TYPE_PRECISION (iter_type))
	    {
	      tree n1, n2;

	      if (loop->cond_code == LT_EXPR)
		{
		  n1 = loop->n1;
		  n2 = fold_build2_loc (loc,
				    PLUS_EXPR, TREE_TYPE (loop->v),
				    loop->n2, loop->step);
		}
	      else
		{
		  n1 = fold_build2_loc (loc,
				    MINUS_EXPR, TREE_TYPE (loop->v),
				    loop->n2, loop->step);
		  n2 = loop->n1;
		}
	      if (TREE_CODE (n1) != INTEGER_CST
		  || TREE_CODE (n2) != INTEGER_CST
		  || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
		  || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
		iter_type = long_long_unsigned_type_node;
	    }
	}

      if (i >= fd->collapse)
	continue;

      if (collapse_count && *collapse_count == NULL)
	{
	  t = fold_binary (loop->cond_code, boolean_type_node,
			   fold_convert (TREE_TYPE (loop->v), loop->n1),
			   fold_convert (TREE_TYPE (loop->v), loop->n2));
	  if (t && integer_zerop (t))
	    count = build_zero_cst (long_long_unsigned_type_node);
	  else if ((i == 0 || count != NULL_TREE)
		   && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
		   && TREE_CONSTANT (loop->n1)
		   && TREE_CONSTANT (loop->n2)
		   && TREE_CODE (loop->step) == INTEGER_CST)
	    {
	      tree itype = TREE_TYPE (loop->v);

	      if (POINTER_TYPE_P (itype))
		itype = signed_type_for (itype);
	      t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
	      t = fold_build2_loc (loc,
			       PLUS_EXPR, itype,
			       fold_convert_loc (loc, itype, loop->step), t);
	      t = fold_build2_loc (loc, PLUS_EXPR, itype, t,
			       fold_convert_loc (loc, itype, loop->n2));
	      t = fold_build2_loc (loc, MINUS_EXPR, itype, t,
			       fold_convert_loc (loc, itype, loop->n1));
	      if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
		t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
				 fold_build1_loc (loc, NEGATE_EXPR, itype, t),
				 fold_build1_loc (loc, NEGATE_EXPR, itype,
					      fold_convert_loc (loc, itype,
								loop->step)));
	      else
		t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
				 fold_convert_loc (loc, itype, loop->step));
	      t = fold_convert_loc (loc, long_long_unsigned_type_node, t);
	      if (count != NULL_TREE)
		count = fold_build2_loc (loc,
				     MULT_EXPR, long_long_unsigned_type_node,
				     count, t);
	      else
		count = t;
	      if (TREE_CODE (count) != INTEGER_CST)
		count = NULL_TREE;
	    }
	  else if (count && !integer_zerop (count))
	    count = NULL_TREE;
	}
    }

  if (count
      && !simd
      && (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
	  || fd->have_ordered))
    {
      if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
	iter_type = long_long_unsigned_type_node;
      else
	iter_type = long_integer_type_node;
    }
  else if (collapse_iter && *collapse_iter != NULL)
    iter_type = TREE_TYPE (*collapse_iter);
  fd->iter_type = iter_type;
  if (collapse_iter && *collapse_iter == NULL)
    *collapse_iter = create_tmp_var (iter_type, ".iter");
  if (collapse_count && *collapse_count == NULL)
    {
      if (count)
	*collapse_count = fold_convert_loc (loc, iter_type, count);
      else
	*collapse_count = create_tmp_var (iter_type, ".count");
    }

  if (fd->collapse > 1 || (fd->ordered && loops))
    {
      fd->loop.v = *collapse_iter;
      fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
      fd->loop.n2 = *collapse_count;
      fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
      fd->loop.cond_code = LT_EXPR;
    }
  else if (loops)
    loops[0] = fd->loop;
}
Beispiel #11
0
/* User-deallocate; we emit the code directly from the front-end, and the
   logic is the same as the previous library function:

    void
    deallocate (void *pointer, GFC_INTEGER_4 * stat)
    {
      if (!pointer)
	{
	  if (stat)
	    *stat = 1;
	  else
	    runtime_error ("Attempt to DEALLOCATE unallocated memory.");
	}
      else
	{
	  free (pointer);
	  if (stat)
	    *stat = 0;
	}
    }

   In this front-end version, status doesn't have to be GFC_INTEGER_4.
   Moreover, if CAN_FAIL is true, then we will not emit a runtime error,
   even when no status variable is passed to us (this is used for
   unconditional deallocation generated by the front-end at end of
   each procedure).
   
   If a runtime-message is possible, `expr' must point to the original
   expression being deallocated for its locus and variable name.

   For coarrays, "pointer" must be the array descriptor and not its
   "data" component.  */
tree
gfc_deallocate_with_status (tree pointer, tree status, tree errmsg,
			    tree errlen, tree label_finish,
			    bool can_fail, gfc_expr* expr, bool coarray)
{
  stmtblock_t null, non_null;
  tree cond, tmp, error;
  tree status_type = NULL_TREE;
  tree caf_decl = NULL_TREE;

  if (coarray)
    {
      gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (pointer)));
      caf_decl = pointer;
      pointer = gfc_conv_descriptor_data_get (caf_decl);
      STRIP_NOPS (pointer);
    }

  cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, pointer,
			  build_int_cst (TREE_TYPE (pointer), 0));

  /* When POINTER is NULL, we set STATUS to 1 if it's present, otherwise
     we emit a runtime error.  */
  gfc_start_block (&null);
  if (!can_fail)
    {
      tree varname;

      gcc_assert (expr && expr->expr_type == EXPR_VARIABLE && expr->symtree);

      varname = gfc_build_cstring_const (expr->symtree->name);
      varname = gfc_build_addr_expr (pchar_type_node, varname);

      error = gfc_trans_runtime_error (true, &expr->where,
				       "Attempt to DEALLOCATE unallocated '%s'",
				       varname);
    }
  else
    error = build_empty_stmt (input_location);

  if (status != NULL_TREE && !integer_zerop (status))
    {
      tree cond2;

      status_type = TREE_TYPE (TREE_TYPE (status));
      cond2 = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
			       status, build_int_cst (TREE_TYPE (status), 0));
      tmp = fold_build2_loc (input_location, MODIFY_EXPR, status_type,
			     fold_build1_loc (input_location, INDIRECT_REF,
					      status_type, status),
			     build_int_cst (status_type, 1));
      error = fold_build3_loc (input_location, COND_EXPR, void_type_node,
			       cond2, tmp, error);
    }

  gfc_add_expr_to_block (&null, error);

  /* When POINTER is not NULL, we free it.  */
  gfc_start_block (&non_null);
  if (!coarray || gfc_option.coarray != GFC_FCOARRAY_LIB)
    {
      tmp = build_call_expr_loc (input_location,
				 builtin_decl_explicit (BUILT_IN_FREE), 1,
				 fold_convert (pvoid_type_node, pointer));
      gfc_add_expr_to_block (&non_null, tmp);

      if (status != NULL_TREE && !integer_zerop (status))
	{
	  /* We set STATUS to zero if it is present.  */
	  tree status_type = TREE_TYPE (TREE_TYPE (status));
	  tree cond2;

	  cond2 = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
				   status,
				   build_int_cst (TREE_TYPE (status), 0));
	  tmp = fold_build2_loc (input_location, MODIFY_EXPR, status_type,
				 fold_build1_loc (input_location, INDIRECT_REF,
						  status_type, status),
				 build_int_cst (status_type, 0));
	  tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
				 gfc_unlikely (cond2), tmp,
				 build_empty_stmt (input_location));
	  gfc_add_expr_to_block (&non_null, tmp);
	}
    }
  else
    {
      tree caf_type, token, cond2;
      tree pstat = null_pointer_node;

      if (errmsg == NULL_TREE)
	{
	  gcc_assert (errlen == NULL_TREE);
	  errmsg = null_pointer_node;
	  errlen = build_zero_cst (integer_type_node);
	}
      else
	{
	  gcc_assert (errlen != NULL_TREE);
	  if (!POINTER_TYPE_P (TREE_TYPE (errmsg)))
	    errmsg = gfc_build_addr_expr (NULL_TREE, errmsg);
	}

      caf_type = TREE_TYPE (caf_decl);

      if (status != NULL_TREE && !integer_zerop (status))
	{
	  gcc_assert (status_type == integer_type_node);
	  pstat = status;
	}

      if (GFC_DESCRIPTOR_TYPE_P (caf_type)
	  && GFC_TYPE_ARRAY_AKIND (caf_type) == GFC_ARRAY_ALLOCATABLE)
	token = gfc_conv_descriptor_token (caf_decl);
      else if (DECL_LANG_SPECIFIC (caf_decl)
	       && GFC_DECL_TOKEN (caf_decl) != NULL_TREE)
	token = GFC_DECL_TOKEN (caf_decl);
      else
	{
	  gcc_assert (GFC_ARRAY_TYPE_P (caf_type)
		      && GFC_TYPE_ARRAY_CAF_TOKEN (caf_type) != NULL_TREE);
	  token = GFC_TYPE_ARRAY_CAF_TOKEN (caf_type);
	}

      token = gfc_build_addr_expr  (NULL_TREE, token);
      tmp = build_call_expr_loc (input_location,
	     gfor_fndecl_caf_deregister, 4,
	     token, pstat, errmsg, errlen);
      gfc_add_expr_to_block (&non_null, tmp);

      if (status != NULL_TREE)
	{
	  tree stat = build_fold_indirect_ref_loc (input_location, status);

	  TREE_USED (label_finish) = 1;
	  tmp = build1_v (GOTO_EXPR, label_finish);
	  cond2 = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
				   stat, build_zero_cst (TREE_TYPE (stat)));
	  tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
        			 gfc_unlikely (cond2), tmp,
				 build_empty_stmt (input_location));
	  gfc_add_expr_to_block (&non_null, tmp);
	}
    }

  return fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
			  gfc_finish_block (&null),
			  gfc_finish_block (&non_null));
}
Beispiel #12
0
/* Allocate memory, using an optional status argument.
 
   This function follows the following pseudo-code:

    void *
    allocate (size_t size, integer_type* stat)
    {
      void *newmem;
    
      if (stat)
	*stat = 0;

      newmem = malloc (MAX (size, 1));
      if (newmem == NULL)
      {
        if (stat)
          *stat = LIBERROR_ALLOCATION;
        else
	  runtime_error ("Allocation would exceed memory limit");
      }
      return newmem;
    }  */
tree
gfc_allocate_with_status (stmtblock_t * block, tree size, tree status)
{
  stmtblock_t alloc_block;
  tree res, tmp, msg, cond;
  tree status_type = status ? TREE_TYPE (TREE_TYPE (status)) : NULL_TREE;

  /* Evaluate size only once, and make sure it has the right type.  */
  size = gfc_evaluate_now (size, block);
  if (TREE_TYPE (size) != TREE_TYPE (size_type_node))
    size = fold_convert (size_type_node, size);

  /* Create a variable to hold the result.  */
  res = gfc_create_var (prvoid_type_node, NULL);

  /* Set the optional status variable to zero.  */
  if (status != NULL_TREE && !integer_zerop (status))
    {
      tmp = fold_build2_loc (input_location, MODIFY_EXPR, status_type,
			     fold_build1_loc (input_location, INDIRECT_REF,
					      status_type, status),
			     build_int_cst (status_type, 0));
      tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
			     fold_build2_loc (input_location, NE_EXPR,
					boolean_type_node, status,
					build_int_cst (TREE_TYPE (status), 0)),
			     tmp, build_empty_stmt (input_location));
      gfc_add_expr_to_block (block, tmp);
    }

  /* The allocation itself.  */
  gfc_start_block (&alloc_block);
  gfc_add_modify (&alloc_block, res,
		  fold_convert (prvoid_type_node,
				build_call_expr_loc (input_location,
				   built_in_decls[BUILT_IN_MALLOC], 1,
					fold_build2_loc (input_location,
					    MAX_EXPR, size_type_node, size,
					    build_int_cst (size_type_node,
							   1)))));

  msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const
			     ("Allocation would exceed memory limit"));
  tmp = build_call_expr_loc (input_location,
			 gfor_fndecl_os_error, 1, msg);

  if (status != NULL_TREE && !integer_zerop (status))
    {
      /* Set the status variable if it's present.  */
      tree tmp2;

      cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
			      status, build_int_cst (TREE_TYPE (status), 0));
      tmp2 = fold_build2_loc (input_location, MODIFY_EXPR, status_type,
			      fold_build1_loc (input_location, INDIRECT_REF,
					       status_type, status),
			      build_int_cst (status_type, LIBERROR_ALLOCATION));
      tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
			     tmp, tmp2);
    }

  tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
			 fold_build2_loc (input_location, EQ_EXPR,
					  boolean_type_node, res,
					  build_int_cst (prvoid_type_node, 0)),
			 tmp, build_empty_stmt (input_location));
  gfc_add_expr_to_block (&alloc_block, tmp);
  gfc_add_expr_to_block (block, gfc_finish_block (&alloc_block));

  return res;
}
Beispiel #13
0
static tree
trans_runtime_error_vararg (bool error, locus* where, const char* msgid,
			    va_list ap)
{
  stmtblock_t block;
  tree tmp;
  tree arg, arg2;
  tree *argarray;
  tree fntype;
  char *message;
  const char *p;
  int line, nargs, i;
  location_t loc;

  /* Compute the number of extra arguments from the format string.  */
  for (p = msgid, nargs = 0; *p; p++)
    if (*p == '%')
      {
	p++;
	if (*p != '%')
	  nargs++;
      }

  /* The code to generate the error.  */
  gfc_start_block (&block);

  if (where)
    {
      line = LOCATION_LINE (where->lb->location);
      asprintf (&message, "At line %d of file %s",  line,
		where->lb->file->filename);
    }
  else
    asprintf (&message, "In file '%s', around line %d",
	      gfc_source_file, input_line + 1);

  arg = gfc_build_addr_expr (pchar_type_node,
			     gfc_build_localized_cstring_const (message));
  free (message);
  
  asprintf (&message, "%s", _(msgid));
  arg2 = gfc_build_addr_expr (pchar_type_node,
			      gfc_build_localized_cstring_const (message));
  free (message);

  /* Build the argument array.  */
  argarray = XALLOCAVEC (tree, nargs + 2);
  argarray[0] = arg;
  argarray[1] = arg2;
  for (i = 0; i < nargs; i++)
    argarray[2 + i] = va_arg (ap, tree);
  
  /* Build the function call to runtime_(warning,error)_at; because of the
     variable number of arguments, we can't use build_call_expr_loc dinput_location,
     irectly.  */
  if (error)
    fntype = TREE_TYPE (gfor_fndecl_runtime_error_at);
  else
    fntype = TREE_TYPE (gfor_fndecl_runtime_warning_at);

  loc = where ? where->lb->location : input_location;
  tmp = fold_builtin_call_array (loc, TREE_TYPE (fntype),
				 fold_build1_loc (loc, ADDR_EXPR,
					     build_pointer_type (fntype),
					     error
					     ? gfor_fndecl_runtime_error_at
					     : gfor_fndecl_runtime_warning_at),
				 nargs + 2, argarray);
  gfc_add_expr_to_block (&block, tmp);

  return gfc_finish_block (&block);
}
Beispiel #14
0
static tree
c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands,
		       bool *maybe_const_itself, bool for_int_const)
{
  tree ret = expr;
  enum tree_code code = TREE_CODE (expr);
  enum tree_code_class kind = TREE_CODE_CLASS (code);
  location_t loc = EXPR_LOCATION (expr);
  tree op0, op1, op2, op3;
  tree orig_op0, orig_op1, orig_op2;
  bool op0_const = true, op1_const = true, op2_const = true;
  bool op0_const_self = true, op1_const_self = true, op2_const_self = true;
  bool nowarning = TREE_NO_WARNING (expr);
  bool unused_p;
  source_range old_range;

  /* Constants, declarations, statements, errors, SAVE_EXPRs and
     anything else not counted as an expression cannot usefully be
     folded further at this point.  */
  if (!IS_EXPR_CODE_CLASS (kind)
      || kind == tcc_statement
      || code == SAVE_EXPR)
    return expr;

  if (IS_EXPR_CODE_CLASS (kind))
    old_range = EXPR_LOCATION_RANGE (expr);

  /* Operands of variable-length expressions (function calls) have
     already been folded, as have __builtin_* function calls, and such
     expressions cannot occur in constant expressions.  */
  if (kind == tcc_vl_exp)
    {
      *maybe_const_operands = false;
      ret = fold (expr);
      goto out;
    }

  if (code == C_MAYBE_CONST_EXPR)
    {
      tree pre = C_MAYBE_CONST_EXPR_PRE (expr);
      tree inner = C_MAYBE_CONST_EXPR_EXPR (expr);
      if (C_MAYBE_CONST_EXPR_NON_CONST (expr))
	*maybe_const_operands = false;
      if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr))
	{
	  *maybe_const_itself = false;
	  inner = c_fully_fold_internal (inner, in_init, maybe_const_operands,
					 maybe_const_itself, true);
	}
      if (pre && !in_init)
	ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner);
      else
	ret = inner;
      goto out;
    }

  /* Assignment, increment, decrement, function call and comma
     operators, and statement expressions, cannot occur in constant
     expressions if evaluated / outside of sizeof.  (Function calls
     were handled above, though VA_ARG_EXPR is treated like a function
     call here, and statement expressions are handled through
     C_MAYBE_CONST_EXPR to avoid folding inside them.)  */
  switch (code)
    {
    case MODIFY_EXPR:
    case PREDECREMENT_EXPR:
    case PREINCREMENT_EXPR:
    case POSTDECREMENT_EXPR:
    case POSTINCREMENT_EXPR:
    case COMPOUND_EXPR:
      *maybe_const_operands = false;
      break;

    case VA_ARG_EXPR:
    case TARGET_EXPR:
    case BIND_EXPR:
    case OBJ_TYPE_REF:
      *maybe_const_operands = false;
      ret = fold (expr);
      goto out;

    default:
      break;
    }

  /* Fold individual tree codes as appropriate.  */
  switch (code)
    {
    case COMPOUND_LITERAL_EXPR:
      /* Any non-constancy will have been marked in a containing
	 C_MAYBE_CONST_EXPR; there is no more folding to do here.  */
      goto out;

    case COMPONENT_REF:
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      op1 = TREE_OPERAND (expr, 1);
      op2 = TREE_OPERAND (expr, 2);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      if (op0 != orig_op0)
	ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2);
      if (ret != expr)
	{
	  TREE_READONLY (ret) = TREE_READONLY (expr);
	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
	}
      goto out;

    case ARRAY_REF:
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      op2 = TREE_OPERAND (expr, 2);
      op3 = TREE_OPERAND (expr, 3);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op1);
      op1 = decl_constant_value_for_optimization (op1);
      if (op0 != orig_op0 || op1 != orig_op1)
	ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3);
      if (ret != expr)
	{
	  TREE_READONLY (ret) = TREE_READONLY (expr);
	  TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
	}
      ret = fold (ret);
      goto out;

    case COMPOUND_EXPR:
    case MODIFY_EXPR:
    case PREDECREMENT_EXPR:
    case PREINCREMENT_EXPR:
    case POSTDECREMENT_EXPR:
    case POSTINCREMENT_EXPR:
    case PLUS_EXPR:
    case MINUS_EXPR:
    case MULT_EXPR:
    case POINTER_PLUS_EXPR:
    case TRUNC_DIV_EXPR:
    case CEIL_DIV_EXPR:
    case FLOOR_DIV_EXPR:
    case TRUNC_MOD_EXPR:
    case RDIV_EXPR:
    case EXACT_DIV_EXPR:
    case LSHIFT_EXPR:
    case RSHIFT_EXPR:
    case BIT_IOR_EXPR:
    case BIT_XOR_EXPR:
    case BIT_AND_EXPR:
    case LT_EXPR:
    case LE_EXPR:
    case GT_EXPR:
    case GE_EXPR:
    case EQ_EXPR:
    case NE_EXPR:
    case COMPLEX_EXPR:
    case TRUTH_AND_EXPR:
    case TRUTH_OR_EXPR:
    case TRUTH_XOR_EXPR:
    case UNORDERED_EXPR:
    case ORDERED_EXPR:
    case UNLT_EXPR:
    case UNLE_EXPR:
    case UNGT_EXPR:
    case UNGE_EXPR:
    case UNEQ_EXPR:
      /* Binary operations evaluating both arguments (increment and
	 decrement are binary internally in GCC).  */
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      if (code != MODIFY_EXPR
	  && code != PREDECREMENT_EXPR
	  && code != PREINCREMENT_EXPR
	  && code != POSTDECREMENT_EXPR
	  && code != POSTINCREMENT_EXPR)
	op0 = decl_constant_value_for_optimization (op0);
      /* The RHS of a MODIFY_EXPR was fully folded when building that
	 expression for the sake of conversion warnings.  */
      if (code != MODIFY_EXPR)
	op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
				     maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op1);
      op1 = decl_constant_value_for_optimization (op1);

      if (for_int_const && (TREE_CODE (op0) != INTEGER_CST
			    || TREE_CODE (op1) != INTEGER_CST))
	goto out;

      if (op0 != orig_op0 || op1 != orig_op1 || in_init)
	ret = in_init
	  ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
	  : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
      else
	ret = fold (expr);
      if (TREE_OVERFLOW_P (ret)
	  && !TREE_OVERFLOW_P (op0)
	  && !TREE_OVERFLOW_P (op1))
	overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret);
      if (code == LSHIFT_EXPR
	  && TREE_CODE (orig_op0) != INTEGER_CST
	  && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
	  && TREE_CODE (op0) == INTEGER_CST
	  && c_inhibit_evaluation_warnings == 0
	  && tree_int_cst_sgn (op0) < 0)
	warning_at (loc, OPT_Wshift_negative_value,
		    "left shift of negative value");
      if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
	  && TREE_CODE (orig_op1) != INTEGER_CST
	  && TREE_CODE (op1) == INTEGER_CST
	  && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
	      || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
	  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
	  && c_inhibit_evaluation_warnings == 0)
	{
	  if (tree_int_cst_sgn (op1) < 0)
	    warning_at (loc, OPT_Wshift_count_negative,
			(code == LSHIFT_EXPR
			 ? G_("left shift count is negative")
			 : G_("right shift count is negative")));
	  else if (compare_tree_int (op1,
				     TYPE_PRECISION (TREE_TYPE (orig_op0)))
		   >= 0)
	    warning_at (loc, OPT_Wshift_count_overflow,
			(code == LSHIFT_EXPR
			 ? G_("left shift count >= width of type")
			 : G_("right shift count >= width of type")));
	}
      if (code == LSHIFT_EXPR
	  /* If either OP0 has been folded to INTEGER_CST...  */
	  && ((TREE_CODE (orig_op0) != INTEGER_CST
	       && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
	       && TREE_CODE (op0) == INTEGER_CST)
	      /* ...or if OP1 has been folded to INTEGER_CST...  */
	      || (TREE_CODE (orig_op1) != INTEGER_CST
		  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
		  && TREE_CODE (op1) == INTEGER_CST))
	  && c_inhibit_evaluation_warnings == 0)
	/* ...then maybe we can detect an overflow.  */
	maybe_warn_shift_overflow (loc, op0, op1);
      if ((code == TRUNC_DIV_EXPR
	   || code == CEIL_DIV_EXPR
	   || code == FLOOR_DIV_EXPR
	   || code == EXACT_DIV_EXPR
	   || code == TRUNC_MOD_EXPR)
	  && TREE_CODE (orig_op1) != INTEGER_CST
	  && TREE_CODE (op1) == INTEGER_CST
	  && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
	      || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
	  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE)
	warn_for_div_by_zero (loc, op1);
      goto out;

    case INDIRECT_REF:
    case FIX_TRUNC_EXPR:
    case FLOAT_EXPR:
    CASE_CONVERT:
    case ADDR_SPACE_CONVERT_EXPR:
    case VIEW_CONVERT_EXPR:
    case NON_LVALUE_EXPR:
    case NEGATE_EXPR:
    case BIT_NOT_EXPR:
    case TRUTH_NOT_EXPR:
    case ADDR_EXPR:
    case CONJ_EXPR:
    case REALPART_EXPR:
    case IMAGPART_EXPR:
      /* Unary operations.  */
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      if (code != ADDR_EXPR && code != REALPART_EXPR && code != IMAGPART_EXPR)
	op0 = decl_constant_value_for_optimization (op0);

      if (for_int_const && TREE_CODE (op0) != INTEGER_CST)
	goto out;

      /* ??? Cope with user tricks that amount to offsetof.  The middle-end is
	 not prepared to deal with them if they occur in initializers.  */
      if (op0 != orig_op0
	  && code == ADDR_EXPR
	  && (op1 = get_base_address (op0)) != NULL_TREE
	  && INDIRECT_REF_P (op1)
	  && TREE_CONSTANT (TREE_OPERAND (op1, 0)))
	ret = fold_convert_loc (loc, TREE_TYPE (expr), fold_offsetof_1 (op0));
      else if (op0 != orig_op0 || in_init)
	ret = in_init
	  ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0)
	  : fold_build1_loc (loc, code, TREE_TYPE (expr), op0);
      else
	ret = fold (expr);
      if (code == INDIRECT_REF
	  && ret != expr
	  && INDIRECT_REF_P (ret))
	{
	  TREE_READONLY (ret) = TREE_READONLY (expr);
	  TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
	}
      switch (code)
	{
	case FIX_TRUNC_EXPR:
	case FLOAT_EXPR:
	CASE_CONVERT:
	  /* Don't warn about explicit conversions.  We will already
	     have warned about suspect implicit conversions.  */
	  break;

	default:
	  if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0))
	    overflow_warning (EXPR_LOCATION (expr), ret);
	  break;
	}
      goto out;

    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
      /* Binary operations not necessarily evaluating both
	 arguments.  */
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
				   for_int_const);
      STRIP_TYPE_NOPS (op0);

      unused_p = (op0 == (code == TRUTH_ANDIF_EXPR
			  ? truthvalue_false_node
			  : truthvalue_true_node));
      c_disable_warnings (unused_p);
      op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
				   for_int_const);
      STRIP_TYPE_NOPS (op1);
      c_enable_warnings (unused_p);

      if (for_int_const
	  && (TREE_CODE (op0) != INTEGER_CST
	      /* Require OP1 be an INTEGER_CST only if it's evaluated.  */
	      || (!unused_p && TREE_CODE (op1) != INTEGER_CST)))
	goto out;

      if (op0 != orig_op0 || op1 != orig_op1 || in_init)
	ret = in_init
	  ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
	  : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
      else
	ret = fold (expr);
      *maybe_const_operands &= op0_const;
      *maybe_const_itself &= op0_const_self;
      if (!(flag_isoc99
	    && op0_const
	    && op0_const_self
	    && (code == TRUTH_ANDIF_EXPR
		? op0 == truthvalue_false_node
		: op0 == truthvalue_true_node)))
	*maybe_const_operands &= op1_const;
      if (!(op0_const
	    && op0_const_self
	    && (code == TRUTH_ANDIF_EXPR
		? op0 == truthvalue_false_node
		: op0 == truthvalue_true_node)))
	*maybe_const_itself &= op1_const_self;
      goto out;

    case COND_EXPR:
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      orig_op2 = op2 = TREE_OPERAND (expr, 2);
      op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
				   for_int_const);

      STRIP_TYPE_NOPS (op0);
      c_disable_warnings (op0 == truthvalue_false_node);
      op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
				   for_int_const);
      STRIP_TYPE_NOPS (op1);
      c_enable_warnings (op0 == truthvalue_false_node);

      c_disable_warnings (op0 == truthvalue_true_node);
      op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self,
				   for_int_const);
      STRIP_TYPE_NOPS (op2);
      c_enable_warnings (op0 == truthvalue_true_node);

      if (for_int_const
	  && (TREE_CODE (op0) != INTEGER_CST
	      /* Only the evaluated operand must be an INTEGER_CST.  */
	      || (op0 == truthvalue_true_node
		  ? TREE_CODE (op1) != INTEGER_CST
		  : TREE_CODE (op2) != INTEGER_CST)))
	goto out;

      if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
	ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
      else
	ret = fold (expr);
      *maybe_const_operands &= op0_const;
      *maybe_const_itself &= op0_const_self;
      if (!(flag_isoc99
	    && op0_const
	    && op0_const_self
	    && op0 == truthvalue_false_node))
	*maybe_const_operands &= op1_const;
      if (!(op0_const
	    && op0_const_self
	    && op0 == truthvalue_false_node))
	*maybe_const_itself &= op1_const_self;
      if (!(flag_isoc99
	    && op0_const
	    && op0_const_self
	    && op0 == truthvalue_true_node))
	*maybe_const_operands &= op2_const;
      if (!(op0_const
	    && op0_const_self
	    && op0 == truthvalue_true_node))
	*maybe_const_itself &= op2_const_self;
      goto out;

    case VEC_COND_EXPR:
      orig_op0 = op0 = TREE_OPERAND (expr, 0);
      orig_op1 = op1 = TREE_OPERAND (expr, 1);
      orig_op2 = op2 = TREE_OPERAND (expr, 2);
      op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op0);
      op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op1);
      op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands,
				   maybe_const_itself, for_int_const);
      STRIP_TYPE_NOPS (op2);

      if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
	ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
      else
	ret = fold (expr);
      goto out;

    case EXCESS_PRECISION_EXPR:
      /* Each case where an operand with excess precision may be
	 encountered must remove the EXCESS_PRECISION_EXPR around
	 inner operands and possibly put one around the whole
	 expression or possibly convert to the semantic type (which
	 c_fully_fold does); we cannot tell at this stage which is
	 appropriate in any particular case.  */
      gcc_unreachable ();

    default:
      /* Various codes may appear through folding built-in functions
	 and their arguments.  */
      goto out;
    }

 out:
  /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks
     have been done by this point, so remove them again.  */
  nowarning |= TREE_NO_WARNING (ret);
  STRIP_TYPE_NOPS (ret);
  if (nowarning && !TREE_NO_WARNING (ret))
    {
      if (!CAN_HAVE_LOCATION_P (ret))
	ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
      TREE_NO_WARNING (ret) = 1;
    }
  if (ret != expr)
    {
      protected_set_expr_location (ret, loc);
      if (IS_EXPR_CODE_CLASS (kind))
	set_source_range (ret, old_range.m_start, old_range.m_finish);
    }
  return ret;
}
Beispiel #15
0
/* Generate code for an ALLOCATE statement when the argument is an
   allocatable array.  If the array is currently allocated, it is an
   error to allocate it again.
 
   This function follows the following pseudo-code:
  
    void *
    allocate_array (void *mem, size_t size, integer_type *stat)
    {
      if (mem == NULL)
	return allocate (size, stat);
      else
      {
	if (stat)
	{
	  free (mem);
	  mem = allocate (size, stat);
	  *stat = LIBERROR_ALLOCATION;
	  return mem;
	}
	else
	  runtime_error ("Attempting to allocate already allocated variable");
      }
    }
    
    expr must be set to the original expression being allocated for its locus
    and variable name in case a runtime error has to be printed.  */
tree
gfc_allocate_array_with_status (stmtblock_t * block, tree mem, tree size,
				tree status, gfc_expr* expr)
{
  stmtblock_t alloc_block;
  tree res, tmp, null_mem, alloc, error;
  tree type = TREE_TYPE (mem);

  if (TREE_TYPE (size) != TREE_TYPE (size_type_node))
    size = fold_convert (size_type_node, size);

  /* Create a variable to hold the result.  */
  res = gfc_create_var (type, NULL);
  null_mem = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, mem,
			      build_int_cst (type, 0));

  /* If mem is NULL, we call gfc_allocate_with_status.  */
  gfc_start_block (&alloc_block);
  tmp = gfc_allocate_with_status (&alloc_block, size, status);
  gfc_add_modify (&alloc_block, res, fold_convert (type, tmp));
  alloc = gfc_finish_block (&alloc_block);

  /* Otherwise, we issue a runtime error or set the status variable.  */
  if (expr)
    {
      tree varname;

      gcc_assert (expr->expr_type == EXPR_VARIABLE && expr->symtree);
      varname = gfc_build_cstring_const (expr->symtree->name);
      varname = gfc_build_addr_expr (pchar_type_node, varname);

      error = gfc_trans_runtime_error (true, &expr->where,
				       "Attempting to allocate already"
				       " allocated variable '%s'",
				       varname);
    }
  else
    error = gfc_trans_runtime_error (true, NULL,
				     "Attempting to allocate already allocated"
				     " variable");

  if (status != NULL_TREE && !integer_zerop (status))
    {
      tree status_type = TREE_TYPE (TREE_TYPE (status));
      stmtblock_t set_status_block;

      gfc_start_block (&set_status_block);
      tmp = build_call_expr_loc (input_location,
			     built_in_decls[BUILT_IN_FREE], 1,
			     fold_convert (pvoid_type_node, mem));
      gfc_add_expr_to_block (&set_status_block, tmp);

      tmp = gfc_allocate_with_status (&set_status_block, size, status);
      gfc_add_modify (&set_status_block, res, fold_convert (type, tmp));

      gfc_add_modify (&set_status_block,
			   fold_build1_loc (input_location, INDIRECT_REF,
					    status_type, status),
			   build_int_cst (status_type, LIBERROR_ALLOCATION));

      tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
			     status, build_int_cst (status_type, 0));
      error = fold_build3_loc (input_location, COND_EXPR, void_type_node, tmp,
			       error, gfc_finish_block (&set_status_block));
    }

  tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, null_mem,
			 alloc, error);
  gfc_add_expr_to_block (block, tmp);

  return res;
}
Beispiel #16
0
tree
gfc_deallocate_scalar_with_status (tree pointer, tree status, bool can_fail,
				   gfc_expr* expr, gfc_typespec ts)
{
  stmtblock_t null, non_null;
  tree cond, tmp, error;

  cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, pointer,
			  build_int_cst (TREE_TYPE (pointer), 0));

  /* When POINTER is NULL, we set STATUS to 1 if it's present, otherwise
     we emit a runtime error.  */
  gfc_start_block (&null);
  if (!can_fail)
    {
      tree varname;

      gcc_assert (expr && expr->expr_type == EXPR_VARIABLE && expr->symtree);

      varname = gfc_build_cstring_const (expr->symtree->name);
      varname = gfc_build_addr_expr (pchar_type_node, varname);

      error = gfc_trans_runtime_error (true, &expr->where,
				       "Attempt to DEALLOCATE unallocated '%s'",
				       varname);
    }
  else
    error = build_empty_stmt (input_location);

  if (status != NULL_TREE && !integer_zerop (status))
    {
      tree status_type = TREE_TYPE (TREE_TYPE (status));
      tree cond2;

      cond2 = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
			       status, build_int_cst (TREE_TYPE (status), 0));
      tmp = fold_build2_loc (input_location, MODIFY_EXPR, status_type,
			     fold_build1_loc (input_location, INDIRECT_REF,
					      status_type, status),
			     build_int_cst (status_type, 1));
      error = fold_build3_loc (input_location, COND_EXPR, void_type_node,
			       cond2, tmp, error);
    }

  gfc_add_expr_to_block (&null, error);

  /* When POINTER is not NULL, we free it.  */
  gfc_start_block (&non_null);
  
  /* Free allocatable components.  */
  if (ts.type == BT_DERIVED && ts.u.derived->attr.alloc_comp)
    {
      tmp = build_fold_indirect_ref_loc (input_location, pointer);
      tmp = gfc_deallocate_alloc_comp (ts.u.derived, tmp, 0);
      gfc_add_expr_to_block (&non_null, tmp);
    }
  else if (ts.type == BT_CLASS
	   && ts.u.derived->components->ts.u.derived->attr.alloc_comp)
    {
      tmp = build_fold_indirect_ref_loc (input_location, pointer);
      tmp = gfc_deallocate_alloc_comp (ts.u.derived->components->ts.u.derived,
				       tmp, 0);
      gfc_add_expr_to_block (&non_null, tmp);
    }
  
  tmp = build_call_expr_loc (input_location,
			     builtin_decl_explicit (BUILT_IN_FREE), 1,
			     fold_convert (pvoid_type_node, pointer));
  gfc_add_expr_to_block (&non_null, tmp);

  if (status != NULL_TREE && !integer_zerop (status))
    {
      /* We set STATUS to zero if it is present.  */
      tree status_type = TREE_TYPE (TREE_TYPE (status));
      tree cond2;

      cond2 = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
			       status, build_int_cst (TREE_TYPE (status), 0));
      tmp = fold_build2_loc (input_location, MODIFY_EXPR, status_type,
			     fold_build1_loc (input_location, INDIRECT_REF,
					      status_type, status),
			     build_int_cst (status_type, 0));
      tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond2,
			     tmp, build_empty_stmt (input_location));
      gfc_add_expr_to_block (&non_null, tmp);
    }

  return fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
			  gfc_finish_block (&null),
			  gfc_finish_block (&non_null));
}
Beispiel #17
0
tree
cilk_for_number_of_iterations (tree cilk_for)
{
  tree t, v, n1, n2, step, type, init, cond, incr, itype;
  enum tree_code cond_code;
  location_t loc = EXPR_LOCATION (cilk_for);

  init = TREE_VEC_ELT (OMP_FOR_INIT (cilk_for), 0);
  v = TREE_OPERAND (init, 0);
  cond = TREE_VEC_ELT (OMP_FOR_COND (cilk_for), 0);
  incr = TREE_VEC_ELT (OMP_FOR_INCR (cilk_for), 0);
  type = TREE_TYPE (v);

  gcc_assert (TREE_CODE (TREE_TYPE (v)) == INTEGER_TYPE
	      || TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE);
  n1 = TREE_OPERAND (init, 1);
  cond_code = TREE_CODE (cond);
  n2 = TREE_OPERAND (cond, 1);
  switch (cond_code)
    {
    case LT_EXPR:
    case GT_EXPR:
    case NE_EXPR:
      break;
    case LE_EXPR:
      if (POINTER_TYPE_P (TREE_TYPE (n2)))
	n2 = fold_build_pointer_plus_hwi_loc (loc, n2, 1);
      else
	n2 = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (n2), n2,
			      build_int_cst (TREE_TYPE (n2), 1));
      cond_code = LT_EXPR;
      break;
    case GE_EXPR:
      if (POINTER_TYPE_P (TREE_TYPE (n2)))
	n2 = fold_build_pointer_plus_hwi_loc (loc, n2, -1);
      else
	n2 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (n2), n2,
			      build_int_cst (TREE_TYPE (n2), 1));
      cond_code = GT_EXPR;
      break;
    default:
      gcc_unreachable ();
    }

  step = NULL_TREE;
  switch (TREE_CODE (incr))
    {
    case PREINCREMENT_EXPR:
    case POSTINCREMENT_EXPR:
      step = build_int_cst (TREE_TYPE (v), 1);
      break;
    case PREDECREMENT_EXPR:
    case POSTDECREMENT_EXPR:
      step = build_int_cst (TREE_TYPE (v), -1);
      break;
    case MODIFY_EXPR:
      t = TREE_OPERAND (incr, 1);
      gcc_assert (TREE_OPERAND (t, 0) == v);
      switch (TREE_CODE (t))
	{
	case PLUS_EXPR:
	  step = TREE_OPERAND (t, 1);
	  break;
	case POINTER_PLUS_EXPR:
	  step = fold_convert (ssizetype, TREE_OPERAND (t, 1));
	  break;
	case MINUS_EXPR:
	  step = TREE_OPERAND (t, 1);
	  step = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (step), step);
	  break;
	default:
	  gcc_unreachable ();
	}
      break;
    default:
      gcc_unreachable ();
    }

  itype = type;
  if (POINTER_TYPE_P (itype))
    itype = signed_type_for (itype);
  if (cond_code == NE_EXPR)
    {
      /* For NE_EXPR, we need to find out if the iterator increases
	 or decreases from whether step is positive or negative.  */
      tree stype = itype;
      if (TYPE_UNSIGNED (stype))
	stype = signed_type_for (stype);
      cond = fold_build2_loc (loc, GE_EXPR, boolean_type_node,
			      fold_convert_loc (loc, stype, step),
			      build_int_cst (stype, 0));
      t = fold_build3_loc (loc, COND_EXPR, itype, cond,
			   build_int_cst (itype, -1),
			   build_int_cst (itype, 1));
    }
  else
    t = build_int_cst (itype, (cond_code == LT_EXPR ? -1 : 1));
  t = fold_build2_loc (loc, PLUS_EXPR, itype,
		       fold_convert_loc (loc, itype, step), t);
  t = fold_build2_loc (loc, PLUS_EXPR, itype, t,
		       fold_convert_loc (loc, itype, n2));
  t = fold_build2_loc (loc, MINUS_EXPR, itype, t,
		       fold_convert_loc (loc, itype, n1));
  if (TYPE_UNSIGNED (itype) && cond_code == GT_EXPR)
    t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
			 fold_build1_loc (loc, NEGATE_EXPR, itype, t),
			 fold_build1_loc (loc, NEGATE_EXPR, itype,
					  fold_convert_loc (loc, itype,
							    step)));
  else if (TYPE_UNSIGNED (itype) && cond_code == NE_EXPR)
    {
      tree t1
	= fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
			   fold_convert_loc (loc, itype, step));
      tree t2
	= fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
			   fold_build1_loc (loc, NEGATE_EXPR, itype, t),
			   fold_build1_loc (loc, NEGATE_EXPR, itype,
					    fold_convert_loc (loc, itype,
							      step)));
      t = fold_build3_loc (loc, COND_EXPR, itype, cond, t1, t2);
    }
  else
    t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
			 fold_convert_loc (loc, itype, step));
  cond = fold_build2_loc (loc, cond_code, boolean_type_node, n1, n2);
  t = fold_build3_loc (loc, COND_EXPR, itype, cond, t,
		       build_int_cst (itype, 0));
  return t;
}