Beispiel #1
0
static tree
chrec_convert_1 (tree type, tree chrec, gimple *at_stmt,
		 bool use_overflow_semantics)
{
  tree ct, res;
  tree base, step;
  struct loop *loop;

  if (automatically_generated_chrec_p (chrec))
    return chrec;

  ct = chrec_type (chrec);
  if (useless_type_conversion_p (type, ct))
    return chrec;

  if (!evolution_function_is_affine_p (chrec))
    goto keep_cast;

  loop = get_chrec_loop (chrec);
  base = CHREC_LEFT (chrec);
  step = CHREC_RIGHT (chrec);

  if (convert_affine_scev (loop, type, &base, &step, at_stmt,
			   use_overflow_semantics))
    return build_polynomial_chrec (loop->num, base, step);

  /* If we cannot propagate the cast inside the chrec, just keep the cast.  */
keep_cast:
  /* Fold will not canonicalize (long)(i - 1) to (long)i - 1 because that
     may be more expensive.  We do want to perform this optimization here
     though for canonicalization reasons.  */
  if (use_overflow_semantics
      && (TREE_CODE (chrec) == PLUS_EXPR
	  || TREE_CODE (chrec) == MINUS_EXPR)
      && TREE_CODE (type) == INTEGER_TYPE
      && TREE_CODE (ct) == INTEGER_TYPE
      && TYPE_PRECISION (type) > TYPE_PRECISION (ct)
      && TYPE_OVERFLOW_UNDEFINED (ct))
    res = fold_build2 (TREE_CODE (chrec), type,
		       fold_convert (type, TREE_OPERAND (chrec, 0)),
		       fold_convert (type, TREE_OPERAND (chrec, 1)));
  /* Similar perform the trick that (signed char)((int)x + 2) can be
     narrowed to (signed char)((unsigned char)x + 2).  */
  else if (use_overflow_semantics
	   && TREE_CODE (chrec) == POLYNOMIAL_CHREC
	   && TREE_CODE (ct) == INTEGER_TYPE
	   && TREE_CODE (type) == INTEGER_TYPE
	   && TYPE_OVERFLOW_UNDEFINED (type)
	   && TYPE_PRECISION (type) < TYPE_PRECISION (ct))
    {
      tree utype = unsigned_type_for (type);
      res = build_polynomial_chrec (CHREC_VARIABLE (chrec),
				    fold_convert (utype,
						  CHREC_LEFT (chrec)),
				    fold_convert (utype,
						  CHREC_RIGHT (chrec)));
      res = chrec_convert_1 (type, res, at_stmt, use_overflow_semantics);
    }
  else
    res = fold_convert (type, chrec);

  /* Don't propagate overflows.  */
  if (CONSTANT_CLASS_P (res))
    TREE_OVERFLOW (res) = 0;

  /* But reject constants that don't fit in their type after conversion.
     This can happen if TYPE_MIN_VALUE or TYPE_MAX_VALUE are not the
     natural values associated with TYPE_PRECISION and TYPE_UNSIGNED,
     and can cause problems later when computing niters of loops.  Note
     that we don't do the check before converting because we don't want
     to reject conversions of negative chrecs to unsigned types.  */
  if (TREE_CODE (res) == INTEGER_CST
      && TREE_CODE (type) == INTEGER_TYPE
      && !int_fits_type_p (res, type))
    res = chrec_dont_know;

  return res;
}
Beispiel #2
0
bool
useless_type_conversion_p (tree outer_type, tree inner_type)
{
  /* Do the following before stripping toplevel qualifiers.  */
  if (POINTER_TYPE_P (inner_type)
      && POINTER_TYPE_P (outer_type))
    {
      /* Do not lose casts between pointers to different address spaces.  */
      if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
	  != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
	return false;
      /* Do not lose casts to function pointer types.  */
      if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
	   || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
	  && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
	       || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
	return false;
    }

  /* From now on qualifiers on value types do not matter.  */
  inner_type = TYPE_MAIN_VARIANT (inner_type);
  outer_type = TYPE_MAIN_VARIANT (outer_type);

  if (inner_type == outer_type)
    return true;

  /* If we know the canonical types, compare them.  */
  if (TYPE_CANONICAL (inner_type)
      && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
    return true;

  /* Changes in machine mode are never useless conversions unless we
     deal with aggregate types in which case we defer to later checks.  */
  if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
      && !AGGREGATE_TYPE_P (inner_type))
    return false;

  /* If both the inner and outer types are integral types, then the
     conversion is not necessary if they have the same mode and
     signedness and precision, and both or neither are boolean.  */
  if (INTEGRAL_TYPE_P (inner_type)
      && INTEGRAL_TYPE_P (outer_type))
    {
      /* Preserve changes in signedness or precision.  */
      if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
	  || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
	return false;

      /* Preserve conversions to/from BOOLEAN_TYPE if types are not
	 of precision one.  */
      if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
	   != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
	  && TYPE_PRECISION (outer_type) != 1)
	return false;

      /* We don't need to preserve changes in the types minimum or
	 maximum value in general as these do not generate code
	 unless the types precisions are different.  */
      return true;
    }

  /* Scalar floating point types with the same mode are compatible.  */
  else if (SCALAR_FLOAT_TYPE_P (inner_type)
	   && SCALAR_FLOAT_TYPE_P (outer_type))
    return true;

  /* Fixed point types with the same mode are compatible.  */
  else if (FIXED_POINT_TYPE_P (inner_type)
	   && FIXED_POINT_TYPE_P (outer_type))
    return true;

  /* We need to take special care recursing to pointed-to types.  */
  else if (POINTER_TYPE_P (inner_type)
	   && POINTER_TYPE_P (outer_type))
    {
      /* We do not care for const qualification of the pointed-to types
	 as const qualification has no semantic value to the middle-end.  */

      /* Otherwise pointers/references are equivalent.  */
      return true;
    }

  /* Recurse for complex types.  */
  else if (TREE_CODE (inner_type) == COMPLEX_TYPE
	   && TREE_CODE (outer_type) == COMPLEX_TYPE)
    return useless_type_conversion_p (TREE_TYPE (outer_type),
				      TREE_TYPE (inner_type));

  /* Recurse for vector types with the same number of subparts.  */
  else if (TREE_CODE (inner_type) == VECTOR_TYPE
	   && TREE_CODE (outer_type) == VECTOR_TYPE
	   && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
    return useless_type_conversion_p (TREE_TYPE (outer_type),
				      TREE_TYPE (inner_type));

  else if (TREE_CODE (inner_type) == ARRAY_TYPE
	   && TREE_CODE (outer_type) == ARRAY_TYPE)
    {
      /* Preserve string attributes.  */
      if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
	return false;

      /* Conversions from array types with unknown extent to
	 array types with known extent are not useless.  */
      if (!TYPE_DOMAIN (inner_type)
	  && TYPE_DOMAIN (outer_type))
	return false;

      /* Nor are conversions from array types with non-constant size to
         array types with constant size or to different size.  */
      if (TYPE_SIZE (outer_type)
	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
	  && (!TYPE_SIZE (inner_type)
	      || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
	      || !tree_int_cst_equal (TYPE_SIZE (outer_type),
				      TYPE_SIZE (inner_type))))
	return false;

      /* Check conversions between arrays with partially known extents.
	 If the array min/max values are constant they have to match.
	 Otherwise allow conversions to unknown and variable extents.
	 In particular this declares conversions that may change the
	 mode to BLKmode as useless.  */
      if (TYPE_DOMAIN (inner_type)
	  && TYPE_DOMAIN (outer_type)
	  && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
	{
	  tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
	  tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
	  tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
	  tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));

	  /* After gimplification a variable min/max value carries no
	     additional information compared to a NULL value.  All that
	     matters has been lowered to be part of the IL.  */
	  if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
	    inner_min = NULL_TREE;
	  if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
	    outer_min = NULL_TREE;
	  if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
	    inner_max = NULL_TREE;
	  if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
	    outer_max = NULL_TREE;

	  /* Conversions NULL / variable <- cst are useless, but not
	     the other way around.  */
	  if (outer_min
	      && (!inner_min
		  || !tree_int_cst_equal (inner_min, outer_min)))
	    return false;
	  if (outer_max
	      && (!inner_max
		  || !tree_int_cst_equal (inner_max, outer_max)))
	    return false;
	}

      /* Recurse on the element check.  */
      return useless_type_conversion_p (TREE_TYPE (outer_type),
					TREE_TYPE (inner_type));
    }

  else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
	    || TREE_CODE (inner_type) == METHOD_TYPE)
	   && TREE_CODE (inner_type) == TREE_CODE (outer_type))
    {
      tree outer_parm, inner_parm;

      /* If the return types are not compatible bail out.  */
      if (!useless_type_conversion_p (TREE_TYPE (outer_type),
				      TREE_TYPE (inner_type)))
	return false;

      /* Method types should belong to a compatible base class.  */
      if (TREE_CODE (inner_type) == METHOD_TYPE
	  && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
					 TYPE_METHOD_BASETYPE (inner_type)))
	return false;

      /* A conversion to an unprototyped argument list is ok.  */
      if (!prototype_p (outer_type))
	return true;

      /* If the unqualified argument types are compatible the conversion
	 is useless.  */
      if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
	return true;

      for (outer_parm = TYPE_ARG_TYPES (outer_type),
	   inner_parm = TYPE_ARG_TYPES (inner_type);
	   outer_parm && inner_parm;
	   outer_parm = TREE_CHAIN (outer_parm),
	   inner_parm = TREE_CHAIN (inner_parm))
	if (!useless_type_conversion_p
	       (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
		TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
	  return false;

      /* If there is a mismatch in the number of arguments the functions
	 are not compatible.  */
      if (outer_parm || inner_parm)
	return false;

      /* Defer to the target if necessary.  */
      if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
	return comp_type_attributes (outer_type, inner_type) != 0;

      return true;
    }

  /* For aggregates we rely on TYPE_CANONICAL exclusively and require
     explicit conversions for types involving to be structurally
     compared types.  */
  else if (AGGREGATE_TYPE_P (inner_type)
	   && TREE_CODE (inner_type) == TREE_CODE (outer_type))
    return false;

  return false;
}
tree
create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
		tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
{
  tree mem_ref, tmp;
  struct mem_address parts;

  addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
  gimplify_mem_ref_parts (gsi, &parts);
  mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
  if (mem_ref)
    return mem_ref;

  /* The expression is too complicated.  Try making it simpler.  */

  if (parts.step && !integer_onep (parts.step))
    {
      /* Move the multiplication to index.  */
      gcc_assert (parts.index);
      parts.index = force_gimple_operand_gsi (gsi,
				fold_build2 (MULT_EXPR, sizetype,
					     parts.index, parts.step),
				true, NULL_TREE, true, GSI_SAME_STMT);
      parts.step = NULL_TREE;

      mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
      if (mem_ref)
	return mem_ref;
    }

  if (parts.symbol)
    {
      tmp = parts.symbol;
      gcc_assert (is_gimple_val (tmp));

      /* Add the symbol to base, eventually forcing it to register.  */
      if (parts.base)
	{
	  gcc_assert (useless_type_conversion_p
				(sizetype, TREE_TYPE (parts.base)));

	  if (parts.index)
	    {
	      parts.base = force_gimple_operand_gsi_1 (gsi,
			fold_build_pointer_plus (tmp, parts.base),
			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
	    }
	  else
	    {
	      parts.index = parts.base;
	      parts.base = tmp;
	    }
	}
      else
	parts.base = tmp;
      parts.symbol = NULL_TREE;

      mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
      if (mem_ref)
	return mem_ref;
    }

  if (parts.index)
    {
      /* Add index to base.  */
      if (parts.base)
	{
	  parts.base = force_gimple_operand_gsi_1 (gsi,
			fold_build_pointer_plus (parts.base, parts.index),
			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
	}
      else
	parts.base = parts.index;
      parts.index = NULL_TREE;

      mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
      if (mem_ref)
	return mem_ref;
    }

  if (parts.offset && !integer_zerop (parts.offset))
    {
      /* Try adding offset to base.  */
      if (parts.base)
	{
	  parts.base = force_gimple_operand_gsi_1 (gsi,
			fold_build_pointer_plus (parts.base, parts.offset),
			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
	}
      else
	parts.base = parts.offset;

      parts.offset = NULL_TREE;

      mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
      if (mem_ref)
	return mem_ref;
    }

  /* Verify that the address is in the simplest possible shape
     (only a register).  If we cannot create such a memory reference,
     something is really wrong.  */
  gcc_assert (parts.symbol == NULL_TREE);
  gcc_assert (parts.index == NULL_TREE);
  gcc_assert (!parts.step || integer_onep (parts.step));
  gcc_assert (!parts.offset || integer_zerop (parts.offset));
  gcc_unreachable ();
}
Beispiel #4
0
static bool
forward_propagate_addr_expr_1 (tree name, tree def_rhs,
			       gimple_stmt_iterator *use_stmt_gsi,
			       bool single_use_p)
{
  tree lhs, rhs, rhs2, array_ref;
  tree *rhsp, *lhsp;
  gimple use_stmt = gsi_stmt (*use_stmt_gsi);
  enum tree_code rhs_code;

  gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);

  lhs = gimple_assign_lhs (use_stmt);
  rhs_code = gimple_assign_rhs_code (use_stmt);
  rhs = gimple_assign_rhs1 (use_stmt);

  /* Trivial cases.  The use statement could be a trivial copy or a
     useless conversion.  Recurse to the uses of the lhs as copyprop does
     not copy through different variant pointers and FRE does not catch
     all useless conversions.  Treat the case of a single-use name and
     a conversion to def_rhs type separate, though.  */
  if (TREE_CODE (lhs) == SSA_NAME
      && ((rhs_code == SSA_NAME && rhs == name)
	  || CONVERT_EXPR_CODE_P (rhs_code)))
    {
      /* Only recurse if we don't deal with a single use or we cannot
	 do the propagation to the current statement.  In particular
	 we can end up with a conversion needed for a non-invariant
	 address which we cannot do in a single statement.  */
      if (!single_use_p
	  || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
	      && !is_gimple_min_invariant (def_rhs)))
	return forward_propagate_addr_expr (lhs, def_rhs);

      gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
      if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
	gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
      else
	gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
      return true;
    }

  /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS. 
     ADDR_EXPR will not appear on the LHS.  */
  lhsp = gimple_assign_lhs_ptr (use_stmt);
  while (handled_component_p (*lhsp))
    lhsp = &TREE_OPERAND (*lhsp, 0);
  lhs = *lhsp;

  /* Now see if the LHS node is an INDIRECT_REF using NAME.  If so, 
     propagate the ADDR_EXPR into the use of NAME and fold the result.  */
  if (TREE_CODE (lhs) == INDIRECT_REF
      && TREE_OPERAND (lhs, 0) == name
      && may_propagate_address_into_dereference (def_rhs, lhs)
      && (lhsp != gimple_assign_lhs_ptr (use_stmt)
	  || useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
					TREE_TYPE (rhs))))
    {
      *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
      fold_stmt_inplace (use_stmt);
      tidy_after_forward_propagate_addr (use_stmt);

      /* Continue propagating into the RHS if this was not the only use.  */
      if (single_use_p)
	return true;
    }

  /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
     nodes from the RHS.  */
  rhsp = gimple_assign_rhs1_ptr (use_stmt);
  while (handled_component_p (*rhsp)
	 || TREE_CODE (*rhsp) == ADDR_EXPR)
    rhsp = &TREE_OPERAND (*rhsp, 0);
  rhs = *rhsp;

  /* Now see if the RHS node is an INDIRECT_REF using NAME.  If so,
     propagate the ADDR_EXPR into the use of NAME and fold the result.  */
  if (TREE_CODE (rhs) == INDIRECT_REF
      && TREE_OPERAND (rhs, 0) == name
      && may_propagate_address_into_dereference (def_rhs, rhs))
    {
      *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
      fold_stmt_inplace (use_stmt);
      tidy_after_forward_propagate_addr (use_stmt);
      return true;
    }

  /* Now see if the RHS node is an INDIRECT_REF using NAME.  If so, 
     propagate the ADDR_EXPR into the use of NAME and try to
     create a VCE and fold the result.  */
  if (TREE_CODE (rhs) == INDIRECT_REF
      && TREE_OPERAND (rhs, 0) == name
      && TYPE_SIZE (TREE_TYPE (rhs))
      && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
      /* Function decls should not be used for VCE either as it could be a
         function descriptor that we want and not the actual function code.  */
      && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
      /* We should not convert volatile loads to non volatile loads. */
      && !TYPE_VOLATILE (TREE_TYPE (rhs))
      && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
      && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
			  TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)) 
   {
     tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
     new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
     if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
       {
	 /* If we have folded the VIEW_CONVERT_EXPR then the result is only
	    valid if we can replace the whole rhs of the use statement.  */
	 if (rhs != gimple_assign_rhs1 (use_stmt))
	   return false;
	 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL,
					     true, GSI_NEW_STMT);
	 gimple_assign_set_rhs1 (use_stmt, new_rhs);
	 tidy_after_forward_propagate_addr (use_stmt);
	 return true;
       }
     /* If the defining rhs comes from an indirect reference, then do not
        convert into a VIEW_CONVERT_EXPR.  */
     def_rhs_base = TREE_OPERAND (def_rhs, 0);
     while (handled_component_p (def_rhs_base))
       def_rhs_base = TREE_OPERAND (def_rhs_base, 0);
     if (!INDIRECT_REF_P (def_rhs_base))
       {
	 /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
	    reference.  Place it there and fold the thing.  */
	 *rhsp = new_rhs;
	 fold_stmt_inplace (use_stmt);
	 tidy_after_forward_propagate_addr (use_stmt);
	 return true;
       }
   }

  /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
     is nothing to do. */
  if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
      || gimple_assign_rhs1 (use_stmt) != name)
    return false;

  /* The remaining cases are all for turning pointer arithmetic into
     array indexing.  They only apply when we have the address of
     element zero in an array.  If that is not the case then there
     is nothing to do.  */
  array_ref = TREE_OPERAND (def_rhs, 0);
  if (TREE_CODE (array_ref) != ARRAY_REF
      || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
      || !integer_zerop (TREE_OPERAND (array_ref, 1)))
    return false;

  rhs2 = gimple_assign_rhs2 (use_stmt);
  /* Try to optimize &x[0] p+ C where C is a multiple of the size
     of the elements in X into &x[C/element size].  */
  if (TREE_CODE (rhs2) == INTEGER_CST)
    {
      tree new_rhs = maybe_fold_stmt_addition (gimple_expr_type (use_stmt),
					       array_ref, rhs2);
      if (new_rhs)
	{
	  gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
	  use_stmt = gsi_stmt (*use_stmt_gsi);
	  update_stmt (use_stmt);
	  tidy_after_forward_propagate_addr (use_stmt);
	  return true;
	}
    }

  /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
     converting a multiplication of an index by the size of the
     array elements, then the result is converted into the proper
     type for the arithmetic.  */
  if (TREE_CODE (rhs2) == SSA_NAME
      /* Avoid problems with IVopts creating PLUS_EXPRs with a
	 different type than their operands.  */
      && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
    return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
							     use_stmt_gsi);
  return false;
}
Beispiel #5
0
static void
find_tail_calls (basic_block bb, struct tailcall **ret)
{
  tree ass_var = NULL_TREE, ret_var, func, param;
  gimple stmt, call = NULL;
  gimple_stmt_iterator gsi, agsi;
  bool tail_recursion;
  struct tailcall *nw;
  edge e;
  tree m, a;
  basic_block abb;
  size_t idx;
  tree var;

  if (!single_succ_p (bb))
    return;

  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
    {
      stmt = gsi_stmt (gsi);

      /* Ignore labels, returns, clobbers and debug stmts.  */
      if (gimple_code (stmt) == GIMPLE_LABEL
	  || gimple_code (stmt) == GIMPLE_RETURN
	  || gimple_clobber_p (stmt)
	  || is_gimple_debug (stmt))
	continue;

      /* Check for a call.  */
      if (is_gimple_call (stmt))
	{
	  call = stmt;
	  ass_var = gimple_call_lhs (stmt);
	  break;
	}

      /* If the statement references memory or volatile operands, fail.  */
      if (gimple_references_memory_p (stmt)
	  || gimple_has_volatile_ops (stmt))
	return;
    }

  if (gsi_end_p (gsi))
    {
      edge_iterator ei;
      /* Recurse to the predecessors.  */
      FOR_EACH_EDGE (e, ei, bb->preds)
	find_tail_calls (e->src, ret);

      return;
    }

  /* If the LHS of our call is not just a simple register, we can't
     transform this into a tail or sibling call.  This situation happens,
     in (e.g.) "*p = foo()" where foo returns a struct.  In this case
     we won't have a temporary here, but we need to carry out the side
     effect anyway, so tailcall is impossible.

     ??? In some situations (when the struct is returned in memory via
     invisible argument) we could deal with this, e.g. by passing 'p'
     itself as that argument to foo, but it's too early to do this here,
     and expand_call() will not handle it anyway.  If it ever can, then
     we need to revisit this here, to allow that situation.  */
  if (ass_var && !is_gimple_reg (ass_var))
    return;

  /* We found the call, check whether it is suitable.  */
  tail_recursion = false;
  func = gimple_call_fndecl (call);
  if (func
      && !DECL_BUILT_IN (func)
      && recursive_call_p (current_function_decl, func))
    {
      tree arg;

      for (param = DECL_ARGUMENTS (func), idx = 0;
	   param && idx < gimple_call_num_args (call);
	   param = DECL_CHAIN (param), idx ++)
	{
	  arg = gimple_call_arg (call, idx);
	  if (param != arg)
	    {
	      /* Make sure there are no problems with copying.  The parameter
	         have a copyable type and the two arguments must have reasonably
	         equivalent types.  The latter requirement could be relaxed if
	         we emitted a suitable type conversion statement.  */
	      if (!is_gimple_reg_type (TREE_TYPE (param))
		  || !useless_type_conversion_p (TREE_TYPE (param),
					         TREE_TYPE (arg)))
		break;

	      /* The parameter should be a real operand, so that phi node
		 created for it at the start of the function has the meaning
		 of copying the value.  This test implies is_gimple_reg_type
		 from the previous condition, however this one could be
		 relaxed by being more careful with copying the new value
		 of the parameter (emitting appropriate GIMPLE_ASSIGN and
		 updating the virtual operands).  */
	      if (!is_gimple_reg (param))
		break;
	    }
	}
      if (idx == gimple_call_num_args (call) && !param)
	tail_recursion = true;
    }

  /* Make sure the tail invocation of this function does not refer
     to local variables.  */
  FOR_EACH_LOCAL_DECL (cfun, idx, var)
    {
      if (TREE_CODE (var) != PARM_DECL
	  && auto_var_in_fn_p (var, cfun->decl)
	  && (ref_maybe_used_by_stmt_p (call, var)
	      || call_may_clobber_ref_p (call, var)))
	return;
    }

  /* Now check the statements after the call.  None of them has virtual
     operands, so they may only depend on the call through its return
     value.  The return value should also be dependent on each of them,
     since we are running after dce.  */
  m = NULL_TREE;
  a = NULL_TREE;

  abb = bb;
  agsi = gsi;
  while (1)
    {
      tree tmp_a = NULL_TREE;
      tree tmp_m = NULL_TREE;
      gsi_next (&agsi);

      while (gsi_end_p (agsi))
	{
	  ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
	  abb = single_succ (abb);
	  agsi = gsi_start_bb (abb);
	}

      stmt = gsi_stmt (agsi);

      if (gimple_code (stmt) == GIMPLE_LABEL)
	continue;

      if (gimple_code (stmt) == GIMPLE_RETURN)
	break;

      if (gimple_clobber_p (stmt))
	continue;

      if (is_gimple_debug (stmt))
	continue;

      if (gimple_code (stmt) != GIMPLE_ASSIGN)
	return;

      /* This is a gimple assign. */
      if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
	return;

      if (tmp_a)
	{
	  tree type = TREE_TYPE (tmp_a);
	  if (a)
	    a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
	  else
	    a = tmp_a;
	}
      if (tmp_m)
	{
	  tree type = TREE_TYPE (tmp_m);
	  if (m)
	    m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
	  else
	    m = tmp_m;

	  if (a)
	    a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
	}
    }

  /* See if this is a tail call we can handle.  */
  ret_var = gimple_return_retval (stmt);

  /* We may proceed if there either is no return value, or the return value
     is identical to the call's return.  */
  if (ret_var
      && (ret_var != ass_var))
    return;

  /* If this is not a tail recursive call, we cannot handle addends or
     multiplicands.  */
  if (!tail_recursion && (m || a))
    return;

  /* For pointers only allow additions.  */
  if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
    return;

  nw = XNEW (struct tailcall);

  nw->call_gsi = gsi;

  nw->tail_recursion = tail_recursion;

  nw->mult = m;
  nw->add = a;

  nw->next = *ret;
  *ret = nw;
}
bool
may_propagate_copy (tree dest, tree orig)
{
    tree type_d = TREE_TYPE (dest);
    tree type_o = TREE_TYPE (orig);

    /* If ORIG flows in from an abnormal edge, it cannot be propagated.  */
    if (TREE_CODE (orig) == SSA_NAME
            && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
        return false;

    /* If DEST is an SSA_NAME that flows from an abnormal edge, then it
       cannot be replaced.  */
    if (TREE_CODE (dest) == SSA_NAME
            && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
        return false;

    /* For memory partitions, copies are OK as long as the memory symbol
       belongs to the partition.  */
    if (TREE_CODE (dest) == SSA_NAME
            && TREE_CODE (SSA_NAME_VAR (dest)) == MEMORY_PARTITION_TAG)
        return (TREE_CODE (orig) == SSA_NAME
                && !is_gimple_reg (orig)
                && (SSA_NAME_VAR (dest) == SSA_NAME_VAR (orig)
                    || bitmap_bit_p (MPT_SYMBOLS (SSA_NAME_VAR (dest)),
                                     DECL_UID (SSA_NAME_VAR (orig)))));

    if (TREE_CODE (orig) == SSA_NAME
            && TREE_CODE (SSA_NAME_VAR (orig)) == MEMORY_PARTITION_TAG)
        return (TREE_CODE (dest) == SSA_NAME
                && !is_gimple_reg (dest)
                && (SSA_NAME_VAR (dest) == SSA_NAME_VAR (orig)
                    || bitmap_bit_p (MPT_SYMBOLS (SSA_NAME_VAR (orig)),
                                     DECL_UID (SSA_NAME_VAR (dest)))));

    /* Do not copy between types for which we *do* need a conversion.  */
    if (!useless_type_conversion_p (type_d, type_o))
        return false;

    /* FIXME.  GIMPLE is allowing pointer assignments and comparisons of
       pointers that have different alias sets.  This means that these
       pointers will have different memory tags associated to them.

       If we allow copy propagation in these cases, statements de-referencing
       the new pointer will now have a reference to a different memory tag
       with potentially incorrect SSA information.

       This was showing up in libjava/java/util/zip/ZipFile.java with code
       like:

       	struct java.io.BufferedInputStream *T.660;
    struct java.io.BufferedInputStream *T.647;
    struct java.io.InputStream *is;
    struct java.io.InputStream *is.662;
    [ ... ]
    T.660 = T.647;
    is = T.660;	<-- This ought to be type-casted
    is.662 = is;

       Also, f/name.c exposed a similar problem with a COND_EXPR predicate
       that was causing DOM to generate and equivalence with two pointers of
       alias-incompatible types:

       	struct _ffename_space *n;
    struct _ffename *ns;
    [ ... ]
    if (n == ns)
      goto lab;
    ...
    lab:
    return n;

       I think that GIMPLE should emit the appropriate type-casts.  For the
       time being, blocking copy-propagation in these cases is the safe thing
       to do.  */
    if (TREE_CODE (dest) == SSA_NAME
            && TREE_CODE (orig) == SSA_NAME
            && POINTER_TYPE_P (type_d)
            && POINTER_TYPE_P (type_o))
    {
        tree mt_dest = symbol_mem_tag (SSA_NAME_VAR (dest));
        tree mt_orig = symbol_mem_tag (SSA_NAME_VAR (orig));
        if (mt_dest && mt_orig && mt_dest != mt_orig)
            return false;
        else if (get_alias_set (TREE_TYPE (type_d)) !=
                 get_alias_set (TREE_TYPE (type_o)))
            return false;
        else if (!MTAG_P (SSA_NAME_VAR (dest))
                 && !MTAG_P (SSA_NAME_VAR (orig))
                 && (DECL_NO_TBAA_P (SSA_NAME_VAR (dest))
                     != DECL_NO_TBAA_P (SSA_NAME_VAR (orig))))
            return false;

        /* Also verify flow-sensitive information is compatible.  */
        if (SSA_NAME_PTR_INFO (orig) && SSA_NAME_PTR_INFO (dest))
        {
            struct ptr_info_def *orig_ptr_info = SSA_NAME_PTR_INFO (orig);
            struct ptr_info_def *dest_ptr_info = SSA_NAME_PTR_INFO (dest);

            if (orig_ptr_info->name_mem_tag
                    && dest_ptr_info->name_mem_tag
                    && orig_ptr_info->pt_vars
                    && dest_ptr_info->pt_vars
                    && !bitmap_intersect_p (dest_ptr_info->pt_vars,
                                            orig_ptr_info->pt_vars))
                return false;
        }
    }

    /* If the destination is a SSA_NAME for a virtual operand, then we have
       some special cases to handle.  */
    if (TREE_CODE (dest) == SSA_NAME && !is_gimple_reg (dest))
    {
        /* If both operands are SSA_NAMEs referring to virtual operands, then
        we can always propagate.  */
        if (TREE_CODE (orig) == SSA_NAME
                && !is_gimple_reg (orig))
            return true;

        /* We have a "copy" from something like a constant into a virtual
        operand.  Reject these.  */
        return false;
    }

    /* Anything else is OK.  */
    return true;
}
Beispiel #7
0
static inline tree
chrec_fold_plus_poly_poly (enum tree_code code,
			   tree type,
			   tree poly0,
			   tree poly1)
{
  tree left, right;
  struct loop *loop0 = get_chrec_loop (poly0);
  struct loop *loop1 = get_chrec_loop (poly1);
  tree rtype = code == POINTER_PLUS_EXPR ? chrec_type (poly1) : type;

  gcc_assert (poly0);
  gcc_assert (poly1);
  gcc_assert (TREE_CODE (poly0) == POLYNOMIAL_CHREC);
  gcc_assert (TREE_CODE (poly1) == POLYNOMIAL_CHREC);
  if (POINTER_TYPE_P (chrec_type (poly0)))
    gcc_checking_assert (ptrofftype_p (chrec_type (poly1))
			 && useless_type_conversion_p (type, chrec_type (poly0)));
  else
    gcc_checking_assert (useless_type_conversion_p (type, chrec_type (poly0))
			 && useless_type_conversion_p (type, chrec_type (poly1)));

  /*
    {a, +, b}_1 + {c, +, d}_2  ->  {{a, +, b}_1 + c, +, d}_2,
    {a, +, b}_2 + {c, +, d}_1  ->  {{c, +, d}_1 + a, +, b}_2,
    {a, +, b}_x + {c, +, d}_x  ->  {a+c, +, b+d}_x.  */
  if (flow_loop_nested_p (loop0, loop1))
    {
      if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
	return build_polynomial_chrec
	  (CHREC_VARIABLE (poly1),
	   chrec_fold_plus (type, poly0, CHREC_LEFT (poly1)),
	   CHREC_RIGHT (poly1));
      else
	return build_polynomial_chrec
	  (CHREC_VARIABLE (poly1),
	   chrec_fold_minus (type, poly0, CHREC_LEFT (poly1)),
	   chrec_fold_multiply (type, CHREC_RIGHT (poly1),
				SCALAR_FLOAT_TYPE_P (type)
				? build_real (type, dconstm1)
				: build_int_cst_type (type, -1)));
    }

  if (flow_loop_nested_p (loop1, loop0))
    {
      if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
	return build_polynomial_chrec
	  (CHREC_VARIABLE (poly0),
	   chrec_fold_plus (type, CHREC_LEFT (poly0), poly1),
	   CHREC_RIGHT (poly0));
      else
	return build_polynomial_chrec
	  (CHREC_VARIABLE (poly0),
	   chrec_fold_minus (type, CHREC_LEFT (poly0), poly1),
	   CHREC_RIGHT (poly0));
    }

  /* This function should never be called for chrecs of loops that
     do not belong to the same loop nest.  */
  gcc_assert (loop0 == loop1);

  if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
    {
      left = chrec_fold_plus
	(type, CHREC_LEFT (poly0), CHREC_LEFT (poly1));
      right = chrec_fold_plus
	(rtype, CHREC_RIGHT (poly0), CHREC_RIGHT (poly1));
    }
  else
    {
      left = chrec_fold_minus
	(type, CHREC_LEFT (poly0), CHREC_LEFT (poly1));
      right = chrec_fold_minus
	(type, CHREC_RIGHT (poly0), CHREC_RIGHT (poly1));
    }

  if (chrec_zerop (right))
    return left;
  else
    return build_polynomial_chrec
      (CHREC_VARIABLE (poly0), left, right);
}
Beispiel #8
0
static unsigned int
tree_nrv (void)
{
  tree result = DECL_RESULT (current_function_decl);
  tree result_type = TREE_TYPE (result);
  tree found = NULL;
  basic_block bb;
  gimple_stmt_iterator gsi;
  struct nrv_data data;

  /* If this function does not return an aggregate type in memory, then
     there is nothing to do.  */
  if (!aggregate_value_p (result, current_function_decl))
    return 0;

  /* If a GIMPLE type is returned in memory, finalize_nrv_r might create
     non-GIMPLE.  */
  if (is_gimple_reg_type (result_type))
    return 0;

  /* Look through each block for assignments to the RESULT_DECL.  */
  FOR_EACH_BB (bb)
    {
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple stmt = gsi_stmt (gsi);
	  tree ret_val;

	  if (gimple_code (stmt) == GIMPLE_RETURN)
	    {
	      /* In a function with an aggregate return value, the
		 gimplifier has changed all non-empty RETURN_EXPRs to
		 return the RESULT_DECL.  */
	      ret_val = gimple_return_retval (stmt);
	      if (ret_val)
		gcc_assert (ret_val == result);
	    }
	  else if (is_gimple_assign (stmt)
		   && gimple_assign_lhs (stmt) == result)
	    {
              tree rhs;

	      if (!gimple_assign_copy_p (stmt))
		return 0;

	      rhs = gimple_assign_rhs1 (stmt);

	      /* Now verify that this return statement uses the same value
		 as any previously encountered return statement.  */
	      if (found != NULL)
		{
		  /* If we found a return statement using a different variable
		     than previous return statements, then we can not perform
		     NRV optimizations.  */
		  if (found != rhs)
		    return 0;
		}
	      else
		found = rhs;

	      /* The returned value must be a local automatic variable of the
		 same type and alignment as the function's result.  */
	      if (TREE_CODE (found) != VAR_DECL
		  || TREE_THIS_VOLATILE (found)
		  || DECL_CONTEXT (found) != current_function_decl
		  || TREE_STATIC (found)
		  || TREE_ADDRESSABLE (found)
		  || DECL_ALIGN (found) > DECL_ALIGN (result)
		  || !useless_type_conversion_p (result_type,
					        TREE_TYPE (found)))
		return 0;
	    }
	  else if (is_gimple_assign (stmt))
	    {
	      tree addr = get_base_address (gimple_assign_lhs (stmt));
	       /* If there's any MODIFY of component of RESULT, 
		  then bail out.  */
	      if (addr && addr == result)
		return 0;
	    }
	}
    }

  if (!found)
    return 0;

  /* If dumping details, then note once and only the NRV replacement.  */
  if (dump_file && (dump_flags & TDF_DETAILS))
    {
      fprintf (dump_file, "NRV Replaced: ");
      print_generic_expr (dump_file, found, dump_flags);
      fprintf (dump_file, "  with: ");
      print_generic_expr (dump_file, result, dump_flags);
      fprintf (dump_file, "\n");
    }

  /* At this point we know that all the return statements return the
     same local which has suitable attributes for NRV.   Copy debugging
     information from FOUND to RESULT.  */
  DECL_NAME (result) = DECL_NAME (found);
  DECL_SOURCE_LOCATION (result) = DECL_SOURCE_LOCATION (found);
  DECL_ABSTRACT_ORIGIN (result) = DECL_ABSTRACT_ORIGIN (found);
  TREE_ADDRESSABLE (result) = TREE_ADDRESSABLE (found);

  /* Now walk through the function changing all references to VAR to be
     RESULT.  */
  data.var = found;
  data.result = result;
  FOR_EACH_BB (bb)
    {
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
	{
	  gimple stmt = gsi_stmt (gsi);
	  /* If this is a copy from VAR to RESULT, remove it.  */
	  if (gimple_assign_copy_p (stmt)
	      && gimple_assign_lhs (stmt) == result
	      && gimple_assign_rhs1 (stmt) == found)
	    gsi_remove (&gsi, true);
	  else
	    {
	      struct walk_stmt_info wi;
	      memset (&wi, 0, sizeof (wi));
	      wi.info = &data;
	      walk_gimple_op (stmt, finalize_nrv_r, &wi);
	      gsi_next (&gsi);
	    }
	}
    }

  /* FOUND is no longer used.  Ensure it gets removed.  */
  var_ann (found)->used = 0;
  return 0;
}
void
merge_alias_info (tree orig_name, tree new_name)
{
    tree new_sym = SSA_NAME_VAR (new_name);
    tree orig_sym = SSA_NAME_VAR (orig_name);
    var_ann_t new_ann = var_ann (new_sym);
    var_ann_t orig_ann = var_ann (orig_sym);

    /* No merging necessary when memory partitions are involved.  */
    if (factoring_name_p (new_name))
    {
        gcc_assert (!is_gimple_reg (orig_sym));
        return;
    }
    else if (factoring_name_p (orig_name))
    {
        gcc_assert (!is_gimple_reg (new_sym));
        return;
    }

    gcc_assert (POINTER_TYPE_P (TREE_TYPE (orig_name))
                && POINTER_TYPE_P (TREE_TYPE (new_name)));

#if defined ENABLE_CHECKING
    gcc_assert (useless_type_conversion_p (TREE_TYPE (orig_name),
                                           TREE_TYPE (new_name)));

    /* Check that flow-sensitive information is compatible.  Notice that
       we may not merge flow-sensitive information here.  This function
       is called when propagating equivalences dictated by the IL, like
       a copy operation P_i = Q_j, and from equivalences dictated by
       control-flow, like if (P_i == Q_j).

       In the former case, P_i and Q_j are equivalent in every block
       dominated by the assignment, so their flow-sensitive information
       is always the same.  However, in the latter case, the pointers
       P_i and Q_j are only equivalent in one of the sub-graphs out of
       the predicate, so their flow-sensitive information is not the
       same in every block dominated by the predicate.

       Since we cannot distinguish one case from another in this
       function, we can only make sure that if P_i and Q_j have
       flow-sensitive information, they should be compatible.

       As callers of merge_alias_info are supposed to call may_propagate_copy
       first, the following check is redundant.  Thus, only do it if checking
       is enabled.  */
    if (SSA_NAME_PTR_INFO (orig_name) && SSA_NAME_PTR_INFO (new_name))
    {
        struct ptr_info_def *orig_ptr_info = SSA_NAME_PTR_INFO (orig_name);
        struct ptr_info_def *new_ptr_info = SSA_NAME_PTR_INFO (new_name);

        /* Note that pointer NEW and ORIG may actually have different
        pointed-to variables (e.g., PR 18291 represented in
         testsuite/gcc.c-torture/compile/pr18291.c).  However, since
         NEW is being copy-propagated into ORIG, it must always be
         true that the pointed-to set for pointer NEW is the same, or
         a subset, of the pointed-to set for pointer ORIG.  If this
         isn't the case, we shouldn't have been able to do the
         propagation of NEW into ORIG.  */
        if (orig_ptr_info->name_mem_tag
                && new_ptr_info->name_mem_tag
                && orig_ptr_info->pt_vars
                && new_ptr_info->pt_vars)
            gcc_assert (bitmap_intersect_p (new_ptr_info->pt_vars,
                                            orig_ptr_info->pt_vars));
    }
#endif

    /* Synchronize the symbol tags.  If both pointers had a tag and they
       are different, then something has gone wrong.  Symbol tags can
       always be merged because they are flow insensitive, all the SSA
       names of the same base DECL share the same symbol tag.  */
    if (new_ann->symbol_mem_tag == NULL_TREE)
        new_ann->symbol_mem_tag = orig_ann->symbol_mem_tag;
    else if (orig_ann->symbol_mem_tag == NULL_TREE)
        orig_ann->symbol_mem_tag = new_ann->symbol_mem_tag;
    else
        gcc_assert (new_ann->symbol_mem_tag == orig_ann->symbol_mem_tag);

    /* Copy flow-sensitive alias information in case that NEW_NAME
       didn't get a NMT but was set to pt_anything for optimization
       purposes.  In case ORIG_NAME has a NMT we can safely use its
       flow-sensitive alias information as a conservative estimate.  */
    if (SSA_NAME_PTR_INFO (orig_name)
            && SSA_NAME_PTR_INFO (orig_name)->name_mem_tag
            && (!SSA_NAME_PTR_INFO (new_name)
                || !SSA_NAME_PTR_INFO (new_name)->name_mem_tag))
    {
        struct ptr_info_def *orig_ptr_info = SSA_NAME_PTR_INFO (orig_name);
        struct ptr_info_def *new_ptr_info = get_ptr_info (new_name);
        memcpy (new_ptr_info, orig_ptr_info, sizeof (struct ptr_info_def));
    }
}
static bool
conditional_replacement (basic_block cond_bb, basic_block middle_bb,
                         edge e0, edge e1, gimple phi,
                         tree arg0, tree arg1)
{
    tree result;
    gimple stmt, new_stmt;
    tree cond;
    gimple_stmt_iterator gsi;
    edge true_edge, false_edge;
    tree new_var, new_var2;

    /* FIXME: Gimplification of complex type is too hard for now.  */
    if (TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
            || TREE_CODE (TREE_TYPE (arg1)) == COMPLEX_TYPE)
        return false;

    /* The PHI arguments have the constants 0 and 1, then convert
       it to the conditional.  */
    if ((integer_zerop (arg0) && integer_onep (arg1))
            || (integer_zerop (arg1) && integer_onep (arg0)))
        ;
    else
        return false;

    if (!empty_block_p (middle_bb))
        return false;

    /* At this point we know we have a GIMPLE_COND with two successors.
       One successor is BB, the other successor is an empty block which
       falls through into BB.

       There is a single PHI node at the join point (BB) and its arguments
       are constants (0, 1).

       So, given the condition COND, and the two PHI arguments, we can
       rewrite this PHI into non-branching code:

         dest = (COND) or dest = COND'

       We use the condition as-is if the argument associated with the
       true edge has the value one or the argument associated with the
       false edge as the value zero.  Note that those conditions are not
       the same since only one of the outgoing edges from the GIMPLE_COND
       will directly reach BB and thus be associated with an argument.  */

    stmt = last_stmt (cond_bb);
    result = PHI_RESULT (phi);

    /* To handle special cases like floating point comparison, it is easier and
       less error-prone to build a tree and gimplify it on the fly though it is
       less efficient.  */
    cond = fold_build2 (gimple_cond_code (stmt), boolean_type_node,
                        gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));

    /* We need to know which is the true edge and which is the false
       edge so that we know when to invert the condition below.  */
    extract_true_false_edges_from_block (cond_bb, &true_edge, &false_edge);
    if ((e0 == true_edge && integer_zerop (arg0))
            || (e0 == false_edge && integer_onep (arg0))
            || (e1 == true_edge && integer_zerop (arg1))
            || (e1 == false_edge && integer_onep (arg1)))
        cond = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (cond), cond);

    /* Insert our new statements at the end of conditional block before the
       COND_STMT.  */
    gsi = gsi_for_stmt (stmt);
    new_var = force_gimple_operand_gsi (&gsi, cond, true, NULL, true,
                                        GSI_SAME_STMT);

    if (!useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (new_var)))
    {
        new_var2 = create_tmp_var (TREE_TYPE (result), NULL);
        add_referenced_var (new_var2);
        new_stmt = gimple_build_assign_with_ops (CONVERT_EXPR, new_var2,
                   new_var, NULL);
        new_var2 = make_ssa_name (new_var2, new_stmt);
        gimple_assign_set_lhs (new_stmt, new_var2);
        gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
        new_var = new_var2;
    }

    replace_phi_edge_with_variable (cond_bb, e1, phi, new_var);

    /* Note that we optimized this PHI.  */
    return true;
}
Beispiel #11
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
  tree stmt = *stmt_p;
  struct pointer_set_t *p_set = (struct pointer_set_t*) data;

  if (is_invisiref_parm (stmt)
      /* Don't dereference parms in a thunk, pass the references through. */
      && !(DECL_THUNK_P (current_function_decl)
	   && TREE_CODE (stmt) == PARM_DECL))
    {
      *stmt_p = convert_from_reference (stmt);
      *walk_subtrees = 0;
      return NULL;
    }

  /* Map block scope extern declarations to visible declarations with the
     same name and type in outer scopes if any.  */
  if (cp_function_chain->extern_decl_map
      && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
      && DECL_EXTERNAL (stmt))
    {
      struct cxx_int_tree_map *h, in;
      in.uid = DECL_UID (stmt);
      h = (struct cxx_int_tree_map *)
	  htab_find_with_hash (cp_function_chain->extern_decl_map,
			       &in, in.uid);
      if (h)
	{
	  *stmt_p = h->to;
	  *walk_subtrees = 0;
	  return NULL;
	}
    }

  /* Other than invisiref parms, don't walk the same tree twice.  */
  if (pointer_set_contains (p_set, stmt))
    {
      *walk_subtrees = 0;
      return NULL_TREE;
    }

  if (TREE_CODE (stmt) == ADDR_EXPR
      && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    {
      *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == RETURN_EXPR
	   && TREE_OPERAND (stmt, 0)
	   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
    *walk_subtrees = 0;
  else if (TREE_CODE (stmt) == OMP_CLAUSE)
    switch (OMP_CLAUSE_CODE (stmt))
      {
      case OMP_CLAUSE_LASTPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  {
	    *walk_subtrees = 0;
	    if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
	      cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
			    cp_genericize_r, p_set, NULL);
	  }
	break;
      case OMP_CLAUSE_PRIVATE:
      case OMP_CLAUSE_SHARED:
      case OMP_CLAUSE_FIRSTPRIVATE:
      case OMP_CLAUSE_COPYIN:
      case OMP_CLAUSE_COPYPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	break;
      case OMP_CLAUSE_REDUCTION:
	gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
	break;
      default:
	break;
      }
  else if (IS_TYPE_OR_DECL_P (stmt))
    *walk_subtrees = 0;

  /* Due to the way voidify_wrapper_expr is written, we don't get a chance
     to lower this construct before scanning it, so we need to lower these
     before doing anything else.  */
  else if (TREE_CODE (stmt) == CLEANUP_STMT)
    *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
					     : TRY_FINALLY_EXPR,
		      void_type_node,
		      CLEANUP_BODY (stmt),
		      CLEANUP_EXPR (stmt));

  else if (TREE_CODE (stmt) == IF_STMT)
    {
      genericize_if_stmt (stmt_p);
      /* *stmt_p has changed, tail recurse to handle it again.  */
      return cp_genericize_r (stmt_p, walk_subtrees, data);
    }

  /* COND_EXPR might have incompatible types in branches if one or both
     arms are bitfields.  Fix it up now.  */
  else if (TREE_CODE (stmt) == COND_EXPR)
    {
      tree type_left
	= (TREE_OPERAND (stmt, 1)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
	   : NULL_TREE);
      tree type_right
	= (TREE_OPERAND (stmt, 2)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
	   : NULL_TREE);
      if (type_left
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 1))))
	{
	  TREE_OPERAND (stmt, 1)
	    = fold_convert (type_left, TREE_OPERAND (stmt, 1));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_left));
	}
      if (type_right
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 2))))
	{
	  TREE_OPERAND (stmt, 2)
	    = fold_convert (type_right, TREE_OPERAND (stmt, 2));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_right));
	}
    }

  pointer_set_insert (p_set, *stmt_p);

  return NULL;
}
Beispiel #12
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
tree stmt = *stmt_p;
struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
struct pointer_set_t *p_set = wtd->p_set;

if (is_invisiref_parm (stmt)
  /* Don't dereference parms in a thunk, pass the references through. */
  && !(DECL_THUNK_P (current_function_decl)
   && TREE_CODE (stmt) == PARM_DECL))
{
  *stmt_p = convert_from_reference (stmt);
  *walk_subtrees = 0;
  return NULL;
}

/* Map block scope extern declarations to visible declarations with the
 same name and type in outer scopes if any.  */
if (cp_function_chain->extern_decl_map
  && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
  && DECL_EXTERNAL (stmt))
{
  struct cxx_int_tree_map *h, in;
  in.uid = DECL_UID (stmt);
  h = (struct cxx_int_tree_map *)
  htab_find_with_hash (cp_function_chain->extern_decl_map,
			   &in, in.uid);
  if (h)
{
  *stmt_p = h->to;
  *walk_subtrees = 0;
  return NULL;
}
}

/* Other than invisiref parms, don't walk the same tree twice.  */
if (pointer_set_contains (p_set, stmt))
{
  *walk_subtrees = 0;
  return NULL_TREE;
}

if (TREE_CODE (stmt) == ADDR_EXPR
  && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
{
  *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
  *walk_subtrees = 0;
}
else if (TREE_CODE (stmt) == RETURN_EXPR
   && TREE_OPERAND (stmt, 0)
   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
/* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
*walk_subtrees = 0;
else if (TREE_CODE (stmt) == OMP_CLAUSE)
switch (OMP_CLAUSE_CODE (stmt))
  {
  case OMP_CLAUSE_LASTPRIVATE:
/* Don't dereference an invisiref in OpenMP clauses.  */
if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
  {
	*walk_subtrees = 0;
	if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
	  cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
			cp_genericize_r, data, NULL);
  }
break;
  case OMP_CLAUSE_PRIVATE:
  case OMP_CLAUSE_SHARED:
  case OMP_CLAUSE_FIRSTPRIVATE:
  case OMP_CLAUSE_COPYIN:
  case OMP_CLAUSE_COPYPRIVATE:
/* Don't dereference an invisiref in OpenMP clauses.  */
if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
  *walk_subtrees = 0;
break;
  case OMP_CLAUSE_REDUCTION:
gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
break;
  default:
break;
  }
else if (IS_TYPE_OR_DECL_P (stmt))
*walk_subtrees = 0;

/* Due to the way voidify_wrapper_expr is written, we don't get a chance
 to lower this construct before scanning it, so we need to lower these
 before doing anything else.  */
else if (TREE_CODE (stmt) == CLEANUP_STMT)
*stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
					 : TRY_FINALLY_EXPR,
		  void_type_node,
		  CLEANUP_BODY (stmt),
		  CLEANUP_EXPR (stmt));

else if (TREE_CODE (stmt) == IF_STMT)
{
  genericize_if_stmt (stmt_p);
  /* *stmt_p has changed, tail recurse to handle it again.  */
  return cp_genericize_r (stmt_p, walk_subtrees, data);
}

/* COND_EXPR might have incompatible types in branches if one or both
 arms are bitfields.FILE *
my_dump_begin (int phase, int *flag_ptr)  Fix it up now.  */
else if (TREE_CODE (stmt) == COND_EXPR)
{
  tree type_left
= (TREE_OPERAND (stmt, 1)
   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
   : NULL_TREE);
  tree type_right
= (TREE_OPERAND (stmt, 2)
   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
   : NULL_TREE);
  if (type_left
  && !useless_type_conversion_p (TREE_TYPE (stmt),
				 TREE_TYPE (TREE_OPERAND (stmt, 1))))
{
  TREE_OPERAND (stmt, 1)
	= fold_convert (type_left, TREE_OPERAND (stmt, 1));
  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
					 type_left));
}
  if (type_right
  && !useless_type_conversion_p (TREE_TYPE (stmt),
				 TREE_TYPE (TREE_OPERAND (stmt, 2))))
{
  TREE_OPERAND (stmt, 2)
	= fold_convert (type_right, TREE_OPERAND (stmt, 2));
  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
					 type_right));
}
}

else if (TREE_CODE (stmt) == BIND_EXPR)
{
  VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
  cp_walk_tree (&BIND_EXPR_BODY (stmt),
		cp_genericize_r, data, NULL);
  VEC_pop (tree, wtd->bind_expr_stack);
}

else if (TREE_CODE (stmt) == USING_STMT)
{
  tree block = NULL_TREE;

  /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
	 BLOCK, and append an IMPORTED_DECL to its
 BLOCK_VARS chained list.  */
  if (wtd->bind_expr_stack)
{
  int i;
  for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
	if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
						 wtd->bind_expr_stack, i))))
	  break;
}
  if (block)
{
  tree using_directive;
  gcc_assert (TREE_OPERAND (stmt, 0));

  using_directive = make_node (IMPORTED_DECL);
  TREE_TYPE (using_directive) = void_type_node;

  IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
	= TREE_OPERAND (stmt, 0);
  DECL_CHAIN (using_directive) = BLOCK_VARS (block);
  BLOCK_VARS (block) = using_directive;
}
  /* The USING_STMT won't appear in GENERIC.  */
  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
  *walk_subtrees = 0;
}

else if (TREE_CODE (stmt) == DECL_EXPR
   && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
{
  /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
  *walk_subtrees = 0;
}

pointer_set_insert (p_set, *stmt_p);

return NULL;
}
Beispiel #13
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
  tree stmt = *stmt_p;
  struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
  struct pointer_set_t *p_set = wtd->p_set;

  /* If in an OpenMP context, note var uses.  */
  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
      && (VAR_P (stmt)
	  || TREE_CODE (stmt) == PARM_DECL
	  || TREE_CODE (stmt) == RESULT_DECL)
      && omp_var_to_track (stmt))
    omp_cxx_notice_variable (wtd->omp_ctx, stmt);

  if (is_invisiref_parm (stmt)
      /* Don't dereference parms in a thunk, pass the references through. */
      && !(DECL_THUNK_P (current_function_decl)
	   && TREE_CODE (stmt) == PARM_DECL))
    {
      *stmt_p = convert_from_reference (stmt);
      *walk_subtrees = 0;
      return NULL;
    }

  /* Map block scope extern declarations to visible declarations with the
     same name and type in outer scopes if any.  */
  if (cp_function_chain->extern_decl_map
      && VAR_OR_FUNCTION_DECL_P (stmt)
      && DECL_EXTERNAL (stmt))
    {
      struct cxx_int_tree_map *h, in;
      in.uid = DECL_UID (stmt);
      h = (struct cxx_int_tree_map *)
	  htab_find_with_hash (cp_function_chain->extern_decl_map,
			       &in, in.uid);
      if (h)
	{
	  *stmt_p = h->to;
	  *walk_subtrees = 0;
	  return NULL;
	}
    }

  /* Other than invisiref parms, don't walk the same tree twice.  */
  if (pointer_set_contains (p_set, stmt))
    {
      *walk_subtrees = 0;
      return NULL_TREE;
    }

  if (TREE_CODE (stmt) == ADDR_EXPR
      && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    {
      /* If in an OpenMP context, note var uses.  */
      if (__builtin_expect (wtd->omp_ctx != NULL, 0)
	  && omp_var_to_track (TREE_OPERAND (stmt, 0)))
	omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
      *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == RETURN_EXPR
	   && TREE_OPERAND (stmt, 0)
	   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
    *walk_subtrees = 0;
  else if (TREE_CODE (stmt) == OMP_CLAUSE)
    switch (OMP_CLAUSE_CODE (stmt))
      {
      case OMP_CLAUSE_LASTPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  {
	    *walk_subtrees = 0;
	    if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
	      cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
			    cp_genericize_r, data, NULL);
	  }
	break;
      case OMP_CLAUSE_PRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	else if (wtd->omp_ctx != NULL)
	  {
	    /* Private clause doesn't cause any references to the
	       var in outer contexts, avoid calling
	       omp_cxx_notice_variable for it.  */
	    struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
	    wtd->omp_ctx = NULL;
	    cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
			  data, NULL);
	    wtd->omp_ctx = old;
	    *walk_subtrees = 0;
	  }
	break;
      case OMP_CLAUSE_SHARED:
      case OMP_CLAUSE_FIRSTPRIVATE:
      case OMP_CLAUSE_COPYIN:
      case OMP_CLAUSE_COPYPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	break;
      case OMP_CLAUSE_REDUCTION:
	/* Don't dereference an invisiref in reduction clause's
	   OMP_CLAUSE_DECL either.  OMP_CLAUSE_REDUCTION_{INIT,MERGE}
	   still needs to be genericized.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  {
	    *walk_subtrees = 0;
	    if (OMP_CLAUSE_REDUCTION_INIT (stmt))
	      cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
			    cp_genericize_r, data, NULL);
	    if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
	      cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
			    cp_genericize_r, data, NULL);
	  }
	break;
      default:
	break;
      }
  else if (IS_TYPE_OR_DECL_P (stmt))
    *walk_subtrees = 0;

  /* Due to the way voidify_wrapper_expr is written, we don't get a chance
     to lower this construct before scanning it, so we need to lower these
     before doing anything else.  */
  else if (TREE_CODE (stmt) == CLEANUP_STMT)
    *stmt_p = build2_loc (EXPR_LOCATION (stmt),
			  CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
						 : TRY_FINALLY_EXPR,
			  void_type_node,
			  CLEANUP_BODY (stmt),
			  CLEANUP_EXPR (stmt));

  else if (TREE_CODE (stmt) == IF_STMT)
    {
      genericize_if_stmt (stmt_p);
      /* *stmt_p has changed, tail recurse to handle it again.  */
      return cp_genericize_r (stmt_p, walk_subtrees, data);
    }

  /* COND_EXPR might have incompatible types in branches if one or both
     arms are bitfields.  Fix it up now.  */
  else if (TREE_CODE (stmt) == COND_EXPR)
    {
      tree type_left
	= (TREE_OPERAND (stmt, 1)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
	   : NULL_TREE);
      tree type_right
	= (TREE_OPERAND (stmt, 2)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
	   : NULL_TREE);
      if (type_left
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 1))))
	{
	  TREE_OPERAND (stmt, 1)
	    = fold_convert (type_left, TREE_OPERAND (stmt, 1));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_left));
	}
      if (type_right
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 2))))
	{
	  TREE_OPERAND (stmt, 2)
	    = fold_convert (type_right, TREE_OPERAND (stmt, 2));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_right));
	}
    }

  else if (TREE_CODE (stmt) == BIND_EXPR)
    {
      if (__builtin_expect (wtd->omp_ctx != NULL, 0))
	{
	  tree decl;
	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
	    if (VAR_P (decl)
		&& !DECL_EXTERNAL (decl)
		&& omp_var_to_track (decl))
	      {
		splay_tree_node n
		  = splay_tree_lookup (wtd->omp_ctx->variables,
				       (splay_tree_key) decl);
		if (n == NULL)
		  splay_tree_insert (wtd->omp_ctx->variables,
				     (splay_tree_key) decl,
				     TREE_STATIC (decl)
				     ? OMP_CLAUSE_DEFAULT_SHARED
				     : OMP_CLAUSE_DEFAULT_PRIVATE);
	      }
	}
      wtd->bind_expr_stack.safe_push (stmt);
      cp_walk_tree (&BIND_EXPR_BODY (stmt),
		    cp_genericize_r, data, NULL);
      wtd->bind_expr_stack.pop ();
    }

  else if (TREE_CODE (stmt) == USING_STMT)
    {
      tree block = NULL_TREE;

      /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
         BLOCK, and append an IMPORTED_DECL to its
	 BLOCK_VARS chained list.  */
      if (wtd->bind_expr_stack.exists ())
	{
	  int i;
	  for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
	    if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
	      break;
	}
      if (block)
	{
	  tree using_directive;
	  gcc_assert (TREE_OPERAND (stmt, 0));

	  using_directive = make_node (IMPORTED_DECL);
	  TREE_TYPE (using_directive) = void_type_node;

	  IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
	    = TREE_OPERAND (stmt, 0);
	  DECL_CHAIN (using_directive) = BLOCK_VARS (block);
	  BLOCK_VARS (block) = using_directive;
	}
      /* The USING_STMT won't appear in GENERIC.  */
      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
      *walk_subtrees = 0;
    }

  else if (TREE_CODE (stmt) == DECL_EXPR
	   && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
    {
      /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
    {
      struct cp_genericize_omp_taskreg omp_ctx;
      tree c, decl;
      splay_tree_node n;

      *walk_subtrees = 0;
      cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
      omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
      omp_ctx.default_shared = omp_ctx.is_parallel;
      omp_ctx.outer = wtd->omp_ctx;
      omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
      wtd->omp_ctx = &omp_ctx;
      for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
	switch (OMP_CLAUSE_CODE (c))
	  {
	  case OMP_CLAUSE_SHARED:
	  case OMP_CLAUSE_PRIVATE:
	  case OMP_CLAUSE_FIRSTPRIVATE:
	  case OMP_CLAUSE_LASTPRIVATE:
	    decl = OMP_CLAUSE_DECL (c);
	    if (decl == error_mark_node || !omp_var_to_track (decl))
	      break;
	    n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
	    if (n != NULL)
	      break;
	    splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
			       ? OMP_CLAUSE_DEFAULT_SHARED
			       : OMP_CLAUSE_DEFAULT_PRIVATE);
	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
		&& omp_ctx.outer)
	      omp_cxx_notice_variable (omp_ctx.outer, decl);
	    break;
	  case OMP_CLAUSE_DEFAULT:
	    if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
	      omp_ctx.default_shared = true;
	  default:
	    break;
	  }
      cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
      wtd->omp_ctx = omp_ctx.outer;
      splay_tree_delete (omp_ctx.variables);
    }
  else if (TREE_CODE (stmt) == CONVERT_EXPR)
    gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
  else if (TREE_CODE (stmt) == FOR_STMT)
    genericize_for_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == WHILE_STMT)
    genericize_while_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == DO_STMT)
    genericize_do_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == SWITCH_STMT)
    genericize_switch_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == CONTINUE_STMT)
    genericize_continue_stmt (stmt_p);
  else if (TREE_CODE (stmt) == BREAK_STMT)
    genericize_break_stmt (stmt_p);
  else if (TREE_CODE (stmt) == OMP_FOR
	   || TREE_CODE (stmt) == OMP_SIMD
	   || TREE_CODE (stmt) == OMP_DISTRIBUTE)
    genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == SIZEOF_EXPR)
    {
      if (SIZEOF_EXPR_TYPE_P (stmt))
	*stmt_p
	  = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
					SIZEOF_EXPR, false);
      else if (TYPE_P (TREE_OPERAND (stmt, 0)))
	*stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
					      SIZEOF_EXPR, false);
      else
	*stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
					      SIZEOF_EXPR, false);
      if (*stmt_p == error_mark_node)
	*stmt_p = size_one_node;
      return NULL;
    }    

  pointer_set_insert (p_set, *stmt_p);

  return NULL;
}
Beispiel #14
0
int
cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
{
  int saved_stmts_are_full_exprs_p = 0;
  enum tree_code code = TREE_CODE (*expr_p);
  enum gimplify_status ret;

  if (STATEMENT_CODE_P (code))
    {
      saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
      current_stmt_tree ()->stmts_are_full_exprs_p
	= STMT_IS_FULL_EXPR_P (*expr_p);
    }

  switch (code)
    {
    case PTRMEM_CST:
      *expr_p = cplus_expand_constant (*expr_p);
      ret = GS_OK;
      break;

    case AGGR_INIT_EXPR:
      simplify_aggr_init_expr (expr_p);
      ret = GS_OK;
      break;

    case VEC_INIT_EXPR:
      {
	location_t loc = input_location;
	tree init = VEC_INIT_EXPR_INIT (*expr_p);
	int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
	gcc_assert (EXPR_HAS_LOCATION (*expr_p));
	input_location = EXPR_LOCATION (*expr_p);
	*expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
				  init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
				  from_array,
				  tf_warning_or_error);
	cp_genericize_tree (expr_p);
	ret = GS_OK;
	input_location = loc;
      }
      break;

    case THROW_EXPR:
      /* FIXME communicate throw type to back end, probably by moving
	 THROW_EXPR into ../tree.def.  */
      *expr_p = TREE_OPERAND (*expr_p, 0);
      ret = GS_OK;
      break;

    case MUST_NOT_THROW_EXPR:
      ret = gimplify_must_not_throw_expr (expr_p, pre_p);
      break;

      /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
	 LHS of an assignment might also be involved in the RHS, as in bug
	 25979.  */
    case INIT_EXPR:
      if (fn_contains_cilk_spawn_p (cfun)
	  && cilk_detect_spawn_and_unwrap (expr_p)
	  && !seen_error ())
	return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
      cp_gimplify_init_expr (expr_p);
      if (TREE_CODE (*expr_p) != INIT_EXPR)
	return GS_OK;
      /* Otherwise fall through.  */
    case MODIFY_EXPR:
      {
	if (fn_contains_cilk_spawn_p (cfun)
	    && cilk_detect_spawn_and_unwrap (expr_p)
	    && !seen_error ())
	  return (enum gimplify_status) gimplify_cilk_spawn (expr_p);

	/* If the back end isn't clever enough to know that the lhs and rhs
	   types are the same, add an explicit conversion.  */
	tree op0 = TREE_OPERAND (*expr_p, 0);
	tree op1 = TREE_OPERAND (*expr_p, 1);

	if (!error_operand_p (op0)
	    && !error_operand_p (op1)
	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
					      TREE_TYPE (op0), op1);

	else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
		  || (TREE_CODE (op1) == CONSTRUCTOR
		      && CONSTRUCTOR_NELTS (op1) == 0
		      && !TREE_CLOBBER_P (op1))
		  || (TREE_CODE (op1) == CALL_EXPR
		      && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
		 && is_really_empty_class (TREE_TYPE (op0)))
	  {
	    /* Remove any copies of empty classes.  We check that the RHS
	       has a simple form so that TARGET_EXPRs and non-empty
	       CONSTRUCTORs get reduced properly, and we leave the return
	       slot optimization alone because it isn't a copy (FIXME so it
	       shouldn't be represented as one).

	       Also drop volatile variables on the RHS to avoid infinite
	       recursion from gimplify_expr trying to load the value.  */
	    if (!TREE_SIDE_EFFECTS (op1)
		|| (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
	      *expr_p = op0;
	    else if (TREE_CODE (op1) == MEM_REF
		     && TREE_THIS_VOLATILE (op1))
	      {
		/* Similarly for volatile MEM_REFs on the RHS.  */
		if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
		  *expr_p = op0;
		else
		  *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
				    TREE_OPERAND (op1, 0), op0);
	      }
	    else
	      *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
				op0, op1);
	  }
      }
      ret = GS_OK;
      break;

    case EMPTY_CLASS_EXPR:
      /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
      *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
      ret = GS_OK;
      break;

    case BASELINK:
      *expr_p = BASELINK_FUNCTIONS (*expr_p);
      ret = GS_OK;
      break;

    case TRY_BLOCK:
      genericize_try_block (expr_p);
      ret = GS_OK;
      break;

    case HANDLER:
      genericize_catch_block (expr_p);
      ret = GS_OK;
      break;

    case EH_SPEC_BLOCK:
      genericize_eh_spec_block (expr_p);
      ret = GS_OK;
      break;

    case USING_STMT:
      gcc_unreachable ();

    case FOR_STMT:
    case WHILE_STMT:
    case DO_STMT:
    case SWITCH_STMT:
    case CONTINUE_STMT:
    case BREAK_STMT:
      gcc_unreachable ();

    case OMP_FOR:
    case OMP_SIMD:
    case OMP_DISTRIBUTE:
      ret = cp_gimplify_omp_for (expr_p, pre_p);
      break;

    case EXPR_STMT:
      gimplify_expr_stmt (expr_p);
      ret = GS_OK;
      break;

    case UNARY_PLUS_EXPR:
      {
	tree arg = TREE_OPERAND (*expr_p, 0);
	tree type = TREE_TYPE (*expr_p);
	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
					    : arg;
	ret = GS_OK;
      }
      break;

    case CILK_SPAWN_STMT:
      gcc_assert 
	(fn_contains_cilk_spawn_p (cfun) 
	 && cilk_detect_spawn_and_unwrap (expr_p));

      /* If errors are seen, then just process it as a CALL_EXPR.  */
      if (!seen_error ())
	return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
      
    case CALL_EXPR:
      if (fn_contains_cilk_spawn_p (cfun)
	  && cilk_detect_spawn_and_unwrap (expr_p)
	  && !seen_error ())
	return (enum gimplify_status) gimplify_cilk_spawn (expr_p);

    default:
      ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
      break;
    }

  /* Restore saved state.  */
  if (STATEMENT_CODE_P (code))
    current_stmt_tree ()->stmts_are_full_exprs_p
      = saved_stmts_are_full_exprs_p;

  return ret;
}