Beispiel #1
0
void
insert_pending_capture_proxies (void)
{
  tree lam;
  vec<tree, va_gc> *proxies;
  unsigned i;

  if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
    return;

  lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
  proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
  for (i = 0; i < vec_safe_length (proxies); ++i)
    {
      tree var = (*proxies)[i];
      insert_capture_proxy (var);
    }
  release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
  LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
}
Beispiel #2
0
tree
build_throw (tree exp)
{
  if (exp == error_mark_node)
    return exp;

  if (processing_template_decl)
    {
      if (cfun)
	current_function_returns_abnormally = 1;
      exp = build_min (THROW_EXPR, void_type_node, exp);
      SET_EXPR_LOCATION (exp, input_location);
      return exp;
    }

  if (exp && null_node_p (exp))
    warning (0, "throwing NULL, which has integral, not pointer type");

  if (exp != NULL_TREE)
    {
      if (!is_admissible_throw_operand_or_catch_parameter (exp, true))
	return error_mark_node;
    }

  if (! doing_eh ())
    return error_mark_node;

  if (exp)
    {
      tree throw_type;
      tree temp_type;
      tree cleanup;
      tree object, ptr;
      tree tmp;
      tree allocate_expr;

      /* The CLEANUP_TYPE is the internal type of a destructor.  */
      if (!cleanup_type)
	{
	  tmp = build_function_type_list (void_type_node,
					  ptr_type_node, NULL_TREE);
	  cleanup_type = build_pointer_type (tmp);
	}

      if (!throw_fn)
	{
	  tree name = get_identifier ("__cxa_throw");
	  throw_fn = get_global_binding (name);
	  if (!throw_fn)
	    {
	      /* Declare void __cxa_throw (void*, void*, void (*)(void*)).  */
	      /* ??? Second argument is supposed to be "std::type_info*".  */
	      tmp = build_function_type_list (void_type_node,
					      ptr_type_node, ptr_type_node,
					      cleanup_type, NULL_TREE);
	      throw_fn = push_throw_library_fn (name, tmp);

	      if (flag_tm)
		{
		  tree itm_name = get_identifier ("_ITM_cxa_throw");
		  tree itm_fn = get_global_binding (itm_name);
		  if (!itm_fn)
		    itm_fn = push_throw_library_fn (itm_name, tmp);
		  apply_tm_attr (itm_fn, get_identifier ("transaction_pure"));
		  record_tm_replacement (throw_fn, itm_fn);
		}
	    }
	}

      /* [except.throw]

	 A throw-expression initializes a temporary object, the type
	 of which is determined by removing any top-level
	 cv-qualifiers from the static type of the operand of throw
	 and adjusting the type from "array of T" or "function return
	 T" to "pointer to T" or "pointer to function returning T"
	 respectively.  */
      temp_type = is_bitfield_expr_with_lowered_type (exp);
      if (!temp_type)
	temp_type = cv_unqualified (type_decays_to (TREE_TYPE (exp)));

      /* OK, this is kind of wacky.  The standard says that we call
	 terminate when the exception handling mechanism, after
	 completing evaluation of the expression to be thrown but
	 before the exception is caught (_except.throw_), calls a
	 user function that exits via an uncaught exception.

	 So we have to protect the actual initialization of the
	 exception object with terminate(), but evaluate the
	 expression first.  Since there could be temps in the
	 expression, we need to handle that, too.  We also expand
	 the call to __cxa_allocate_exception first (which doesn't
	 matter, since it can't throw).  */

      /* Allocate the space for the exception.  */
      allocate_expr = do_allocate_exception (temp_type);
      allocate_expr = get_target_expr (allocate_expr);
      ptr = TARGET_EXPR_SLOT (allocate_expr);
      TARGET_EXPR_CLEANUP (allocate_expr) = do_free_exception (ptr);
      CLEANUP_EH_ONLY (allocate_expr) = 1;

      object = build_nop (build_pointer_type (temp_type), ptr);
      object = cp_build_fold_indirect_ref (object);

      /* And initialize the exception object.  */
      if (CLASS_TYPE_P (temp_type))
	{
	  int flags = LOOKUP_NORMAL | LOOKUP_ONLYCONVERTING;
	  vec<tree, va_gc> *exp_vec;
	  bool converted = false;

	  /* Under C++0x [12.8/16 class.copy], a thrown lvalue is sometimes
	     treated as an rvalue for the purposes of overload resolution
	     to favor move constructors over copy constructors.  */
	  if (/* Must be a local, automatic variable.  */
	      VAR_P (exp)
	      && DECL_CONTEXT (exp) == current_function_decl
	      && ! TREE_STATIC (exp)
	      /* The variable must not have the `volatile' qualifier.  */
	      && !(cp_type_quals (TREE_TYPE (exp)) & TYPE_QUAL_VOLATILE))
	    {
	      tree moved = move (exp);
	      exp_vec = make_tree_vector_single (moved);
	      moved = (build_special_member_call
		       (object, complete_ctor_identifier, &exp_vec,
			TREE_TYPE (object), flags|LOOKUP_PREFER_RVALUE,
			tf_none));
	      release_tree_vector (exp_vec);
	      if (moved != error_mark_node)
		{
		  exp = moved;
		  converted = true;
		}
	    }

	  /* Call the copy constructor.  */
	  if (!converted)
	    {
	      exp_vec = make_tree_vector_single (exp);
	      exp = (build_special_member_call
		     (object, complete_ctor_identifier, &exp_vec,
		      TREE_TYPE (object), flags, tf_warning_or_error));
	      release_tree_vector (exp_vec);
	    }

	  if (exp == error_mark_node)
	    {
	      error ("  in thrown expression");
	      return error_mark_node;
	    }
	}
      else
	{
	  tmp = decay_conversion (exp, tf_warning_or_error);
	  if (tmp == error_mark_node)
	    return error_mark_node;
	  exp = build2 (INIT_EXPR, temp_type, object, tmp);
	}

      /* Mark any cleanups from the initialization as MUST_NOT_THROW, since
	 they are run after the exception object is initialized.  */
      cp_walk_tree_without_duplicates (&exp, wrap_cleanups_r, 0);

      /* Prepend the allocation.  */
      exp = build2 (COMPOUND_EXPR, TREE_TYPE (exp), allocate_expr, exp);

      /* Force all the cleanups to be evaluated here so that we don't have
	 to do them during unwinding.  */
      exp = build1 (CLEANUP_POINT_EXPR, void_type_node, exp);

      throw_type = build_eh_type_type (prepare_eh_type (TREE_TYPE (object)));

      cleanup = NULL_TREE;
      if (type_build_dtor_call (TREE_TYPE (object)))
	{
	  tree dtor_fn = lookup_fnfields (TYPE_BINFO (TREE_TYPE (object)),
					  complete_dtor_identifier, 0);
	  dtor_fn = BASELINK_FUNCTIONS (dtor_fn);
	  mark_used (dtor_fn);
	  if (TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (object)))
	    {
	      cxx_mark_addressable (dtor_fn);
	      /* Pretend it's a normal function.  */
	      cleanup = build1 (ADDR_EXPR, cleanup_type, dtor_fn);
	    }
	}
      if (cleanup == NULL_TREE)
	cleanup = build_int_cst (cleanup_type, 0);

      /* ??? Indicate that this function call throws throw_type.  */
      tmp = cp_build_function_call_nary (throw_fn, tf_warning_or_error,
					 ptr, throw_type, cleanup, NULL_TREE);

      /* Tack on the initialization stuff.  */
      exp = build2 (COMPOUND_EXPR, TREE_TYPE (tmp), exp, tmp);
    }
  else
    {
      /* Rethrow current exception.  */
      if (!rethrow_fn)
	{
	  tree name = get_identifier ("__cxa_rethrow");
	  rethrow_fn = get_global_binding (name);
	  if (!rethrow_fn)
	    /* Declare void __cxa_rethrow (void).  */
	    rethrow_fn = push_throw_library_fn
	      (name, build_function_type_list (void_type_node, NULL_TREE));

	  if (flag_tm)
	    apply_tm_attr (rethrow_fn, get_identifier ("transaction_pure"));
	}

      /* ??? Indicate that this function call allows exceptions of the type
	 of the enclosing catch block (if known).  */
      exp = cp_build_function_call_vec (rethrow_fn, NULL, tf_warning_or_error);
    }

  exp = build1 (THROW_EXPR, void_type_node, exp);
  SET_EXPR_LOCATION (exp, input_location);

  return exp;
}
Beispiel #3
0
tree
ocp_convert (tree type, tree expr, int convtype, int flags)
{
  tree e = expr;
  enum tree_code code = TREE_CODE (type);
  const char *invalid_conv_diag;
  tree e1;

  if (error_operand_p (e) || type == error_mark_node)
    return error_mark_node;

  complete_type (type);
  complete_type (TREE_TYPE (expr));

  if ((invalid_conv_diag
       = targetm.invalid_conversion (TREE_TYPE (expr), type)))
    {
      error (invalid_conv_diag);
      return error_mark_node;
    }

  e = integral_constant_value (e);
  if (error_operand_p (e))
    return error_mark_node;

  if (MAYBE_CLASS_TYPE_P (type) && (convtype & CONV_FORCE_TEMP))
    /* We need a new temporary; don't take this shortcut.  */;
  else if (same_type_ignoring_top_level_qualifiers_p (type, TREE_TYPE (e)))
    {
      if (same_type_p (type, TREE_TYPE (e)))
	/* The call to fold will not always remove the NOP_EXPR as
	   might be expected, since if one of the types is a typedef;
	   the comparison in fold is just equality of pointers, not a
	   call to comptypes.  We don't call fold in this case because
	   that can result in infinite recursion; fold will call
	   convert, which will call ocp_convert, etc.  */
	return e;
      /* For complex data types, we need to perform componentwise
	 conversion.  */
      else if (TREE_CODE (type) == COMPLEX_TYPE)
	return fold_if_not_in_template (convert_to_complex (type, e));
      else if (TREE_CODE (e) == TARGET_EXPR)
	{
	  /* Don't build a NOP_EXPR of class type.  Instead, change the
	     type of the temporary.  */
	  TREE_TYPE (e) = TREE_TYPE (TARGET_EXPR_SLOT (e)) = type;
	  return e;
	}
      else
	{
	  /* We shouldn't be treating objects of ADDRESSABLE type as
	     rvalues.  */
	  gcc_assert (!TREE_ADDRESSABLE (type));
	  return fold_if_not_in_template (build_nop (type, e));
	}
    }

  e1 = targetm.convert_to_type (type, e);
  if (e1)
    return e1;

  if (code == VOID_TYPE && (convtype & CONV_STATIC))
    {
      e = convert_to_void (e, ICV_CAST, tf_warning_or_error);
      return e;
    }

  if (INTEGRAL_CODE_P (code))
    {
      tree intype = TREE_TYPE (e);

      if (TREE_CODE (type) == ENUMERAL_TYPE)
	{
	  /* enum = enum, enum = int, enum = float, (enum)pointer are all
	     errors.  */
	  if (((INTEGRAL_OR_ENUMERATION_TYPE_P (intype)
		|| TREE_CODE (intype) == REAL_TYPE)
	       && ! (convtype & CONV_STATIC))
	      || TREE_CODE (intype) == POINTER_TYPE)
	    {
	      if (flags & LOOKUP_COMPLAIN)
		permerror (input_location, "conversion from %q#T to %q#T", intype, type);

	      if (!flag_permissive)
		return error_mark_node;
	    }

	  /* [expr.static.cast]

	     8. A value of integral or enumeration type can be explicitly
	     converted to an enumeration type. The value is unchanged if
	     the original value is within the range of the enumeration
	     values. Otherwise, the resulting enumeration value is
	     unspecified.  */
	  if (TREE_CODE (expr) == INTEGER_CST
	      && !int_fits_type_p (expr, ENUM_UNDERLYING_TYPE (type)))
	    warning (OPT_Wconversion, 
		     "the result of the conversion is unspecified because "
		     "%qE is outside the range of type %qT",
		     expr, type);
	}
      if (MAYBE_CLASS_TYPE_P (intype))
	{
	  tree rval;
	  rval = build_type_conversion (type, e);
	  if (rval)
	    return rval;
	  if (flags & LOOKUP_COMPLAIN)
	    error ("%q#T used where a %qT was expected", intype, type);
	  return error_mark_node;
	}
      if (code == BOOLEAN_TYPE)
	return cp_truthvalue_conversion (e);

      return fold_if_not_in_template (convert_to_integer (type, e));
    }
  if (NULLPTR_TYPE_P (type) && e && null_ptr_cst_p (e))
    return nullptr_node;
  if (POINTER_TYPE_P (type) || TYPE_PTR_TO_MEMBER_P (type))
    return fold_if_not_in_template (cp_convert_to_pointer (type, e));
  if (code == VECTOR_TYPE)
    {
      tree in_vtype = TREE_TYPE (e);
      if (MAYBE_CLASS_TYPE_P (in_vtype))
	{
	  tree ret_val;
	  ret_val = build_type_conversion (type, e);
	  if (ret_val)
	    return ret_val;
	  if (flags & LOOKUP_COMPLAIN)
	    error ("%q#T used where a %qT was expected", in_vtype, type);
	  return error_mark_node;
	}
      return fold_if_not_in_template (convert_to_vector (type, e));
    }
  if (code == REAL_TYPE || code == COMPLEX_TYPE)
    {
      if (MAYBE_CLASS_TYPE_P (TREE_TYPE (e)))
	{
	  tree rval;
	  rval = build_type_conversion (type, e);
	  if (rval)
	    return rval;
	  else
	    if (flags & LOOKUP_COMPLAIN)
	      error ("%q#T used where a floating point value was expected",
			TREE_TYPE (e));
	}
      if (code == REAL_TYPE)
	return fold_if_not_in_template (convert_to_real (type, e));
      else if (code == COMPLEX_TYPE)
	return fold_if_not_in_template (convert_to_complex (type, e));
    }

  /* New C++ semantics:  since assignment is now based on
     memberwise copying,  if the rhs type is derived from the
     lhs type, then we may still do a conversion.  */
  if (RECORD_OR_UNION_CODE_P (code))
    {
      tree dtype = TREE_TYPE (e);
      tree ctor = NULL_TREE;

      dtype = TYPE_MAIN_VARIANT (dtype);

      /* Conversion between aggregate types.  New C++ semantics allow
	 objects of derived type to be cast to objects of base type.
	 Old semantics only allowed this between pointers.

	 There may be some ambiguity between using a constructor
	 vs. using a type conversion operator when both apply.  */

      ctor = e;

      if (abstract_virtuals_error (NULL_TREE, type))
	return error_mark_node;

      if (BRACE_ENCLOSED_INITIALIZER_P (ctor))
	ctor = perform_implicit_conversion (type, ctor, tf_warning_or_error);
      else if ((flags & LOOKUP_ONLYCONVERTING)
	       && ! (CLASS_TYPE_P (dtype) && DERIVED_FROM_P (type, dtype)))
	/* For copy-initialization, first we create a temp of the proper type
	   with a user-defined conversion sequence, then we direct-initialize
	   the target with the temp (see [dcl.init]).  */
	ctor = build_user_type_conversion (type, ctor, flags);
      else
	{
	  VEC(tree,gc) *ctor_vec = make_tree_vector_single (ctor);
	  ctor = build_special_member_call (NULL_TREE,
					    complete_ctor_identifier,
					    &ctor_vec,
					    type, flags,
					    tf_warning_or_error);
	  release_tree_vector (ctor_vec);
	}
      if (ctor)
	return build_cplus_new (type, ctor);
    }

  if (flags & LOOKUP_COMPLAIN)
    {
      /* If the conversion failed and expr was an invalid use of pointer to
	 member function, try to report a meaningful error.  */
      if (invalid_nonstatic_memfn_p (expr, tf_warning_or_error))
	/* We displayed the error message.  */;
      else
	error ("conversion from %qT to non-scalar type %qT requested",
	       TREE_TYPE (expr), type);
    }
  return error_mark_node;
}
Beispiel #4
0
static tree
expand_sec_reduce_builtin (tree an_builtin_fn, tree *new_var)
{
  tree new_var_type = NULL_TREE, func_parm, new_yes_expr, new_no_expr;
  tree array_ind_value = NULL_TREE, new_no_ind, new_yes_ind, new_no_list;
  tree new_yes_list, new_cond_expr, new_expr = NULL_TREE; 
  vec<tree, va_gc> *array_list = NULL, *array_operand = NULL;
  size_t list_size = 0, rank = 0, ii = 0;
  tree  body, an_init, loop_with_init = alloc_stmt_list ();
  tree array_op0, comp_node = NULL_TREE;
  tree call_fn = NULL_TREE, identity_value = NULL_TREE;
  tree init = NULL_TREE, cond_init = NULL_TREE;
  enum tree_code code = NOP_EXPR;
  location_t location = UNKNOWN_LOCATION;
  vec<vec<an_parts> > an_info = vNULL;
  auto_vec<an_loop_parts> an_loop_info;
  enum built_in_function an_type =
    is_cilkplus_reduce_builtin (CALL_EXPR_FN (an_builtin_fn));
  vec <tree, va_gc> *func_args;
  
  if (an_type == BUILT_IN_NONE)
    return NULL_TREE;

  if (an_type != BUILT_IN_CILKPLUS_SEC_REDUCE
      && an_type != BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING)
    func_parm = CALL_EXPR_ARG (an_builtin_fn, 0);
  else
    {
      call_fn = CALL_EXPR_ARG (an_builtin_fn, 2);

      /* We need to do this because we are "faking" the builtin function types,
	 so the compiler does a bunch of typecasts and this will get rid of
	 all that!  */
      STRIP_NOPS (call_fn);
      if (TREE_CODE (call_fn) != OVERLOAD
	  && TREE_CODE (call_fn) != FUNCTION_DECL)
	call_fn = TREE_OPERAND (call_fn, 0);
      identity_value = CALL_EXPR_ARG (an_builtin_fn, 0);
      func_parm = CALL_EXPR_ARG (an_builtin_fn, 1);
      STRIP_NOPS (identity_value);
    }
  STRIP_NOPS (func_parm);
  
  location = EXPR_LOCATION (an_builtin_fn);
  
  /* Note about using find_rank (): If find_rank returns false, then it must
     have already reported an error, thus we just return an error_mark_node
     without any doing any error emission.  */  
  if (!find_rank (location, an_builtin_fn, an_builtin_fn, true, &rank))
      return error_mark_node;
  if (rank == 0)
    {
      error_at (location, "Invalid builtin arguments");
      return error_mark_node;
    }
  else if (rank > 1 
	   && (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND
	       || an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND))
    { 
      error_at (location, "__sec_reduce_min_ind or __sec_reduce_max_ind cannot "
		"have arrays with dimension greater than 1");
      return error_mark_node;
    }
  
  extract_array_notation_exprs (func_parm, true, &array_list);
  list_size = vec_safe_length (array_list);
  switch (an_type)
    {
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ADD:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUL:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN:
      new_var_type = TREE_TYPE ((*array_list)[0]);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_NONZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_NONZERO:
      new_var_type = boolean_type_node;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND:
      new_var_type = size_type_node;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE:
      if (call_fn && identity_value)
	new_var_type = TREE_TYPE ((*array_list)[0]);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING:
      new_var_type = NULL_TREE;
      break;
    default:
      gcc_unreachable ();
    }
    
  if (new_var_type && TREE_CODE (new_var_type) == ARRAY_TYPE)
    new_var_type = TREE_TYPE (new_var_type);
  an_loop_info.safe_grow_cleared (rank);

  an_init = push_stmt_list ();

  /* Assign the array notation components to variable so that they can satisfy
     the exec-once rule.  */
  for (ii = 0; ii < list_size; ii++)
    if (TREE_CODE ((*array_list)[ii]) == ARRAY_NOTATION_REF)
      {
	tree anode = (*array_list)[ii];
	make_triplet_val_inv (&ARRAY_NOTATION_START (anode));
	make_triplet_val_inv (&ARRAY_NOTATION_LENGTH (anode));
	make_triplet_val_inv (&ARRAY_NOTATION_STRIDE (anode));
      }
  cilkplus_extract_an_triplets (array_list, list_size, rank, &an_info);
  for (ii = 0; ii < rank; ii++)
    {
      tree typ = ptrdiff_type_node;

      /* In this place, we are using get_temp_regvar instead of 
	 create_temporary_var if an_type is SEC_REDUCE_MAX/MIN_IND because
	 the array_ind_value depends on this value being initalized to 0.  */
      if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND
	  || an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND) 
	an_loop_info[ii].var = get_temp_regvar (typ, build_zero_cst (typ));
      else
	{
	  an_loop_info[ii].var = create_temporary_var (typ);
	  add_decl_expr (an_loop_info[ii].var);
	}
      an_loop_info[ii].ind_init = 
	build_x_modify_expr (location, an_loop_info[ii].var, INIT_EXPR,
			     build_zero_cst (typ), tf_warning_or_error);
    }
  array_operand = create_array_refs (location, an_info, an_loop_info,
				      list_size, rank);
  replace_array_notations (&func_parm, true, array_list, array_operand);
  
  if (!TREE_TYPE (func_parm))      
    TREE_TYPE (func_parm) = TREE_TYPE ((*array_list)[0]);
  
  create_cmp_incr (location, &an_loop_info, rank, an_info, tf_warning_or_error);
  if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND
      || an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND) 
    array_ind_value = get_temp_regvar (TREE_TYPE (func_parm), func_parm);

  array_op0 = (*array_operand)[0];
  if (INDIRECT_REF_P (array_op0))
    array_op0 = TREE_OPERAND (array_op0, 0);
  switch (an_type)
    {
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ADD:
      code = PLUS_EXPR;
      init = build_zero_cst (new_var_type);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUL:
      code = MULT_EXPR;
      init = build_one_cst (new_var_type);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_NONZERO:
      code = ((an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO) ? EQ_EXPR
	: NE_EXPR);
      init = build_zero_cst (new_var_type);
      cond_init = build_one_cst (new_var_type);
      comp_node = build_zero_cst (TREE_TYPE (func_parm));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_NONZERO:
      code = ((an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO) ? NE_EXPR
	: EQ_EXPR);
      init = build_one_cst (new_var_type);
      cond_init = build_zero_cst (new_var_type);
      comp_node = build_zero_cst (TREE_TYPE (func_parm));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX:
      code = MAX_EXPR;
      init = (TYPE_MIN_VALUE (new_var_type) ? TYPE_MIN_VALUE (new_var_type)
	: func_parm);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN:
      code = MIN_EXPR;
      init = (TYPE_MAX_VALUE (new_var_type) ? TYPE_MAX_VALUE (new_var_type)
	: func_parm);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND:
      code = (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND ? LE_EXPR
	: GE_EXPR);
      init = an_loop_info[0].var;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE:
      init = identity_value;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING:
      init = NULL_TREE;
      break;
    default:
      gcc_unreachable ();
    }

  if (an_type != BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING)
    *new_var = get_temp_regvar (new_var_type, init);
  else
    *new_var = NULL_TREE;

  switch (an_type)
    {
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ADD:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUL:      
      new_expr = build_x_modify_expr (location, *new_var, code, func_parm,
				      tf_warning_or_error);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_NONZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_NONZERO:
      /* In all these cases, assume the false case is true and as soon as
	 we find a true case,  set the true flag on and latch it in.  */
      new_yes_expr = build_x_modify_expr (location, *new_var, NOP_EXPR,
					  cond_init, tf_warning_or_error);
      new_no_expr = build_x_modify_expr (location, *new_var, NOP_EXPR,
					 *new_var, tf_warning_or_error);
      new_cond_expr = build_x_binary_op
	(location, code, func_parm, TREE_CODE (func_parm), comp_node,
	 TREE_CODE (comp_node), NULL, tf_warning_or_error);
      new_expr = build_x_conditional_expr (location, new_cond_expr,
					   new_yes_expr, new_no_expr,
					   tf_warning_or_error);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN:
      new_cond_expr = build_x_binary_op
	(location, code, *new_var, TREE_CODE (*new_var), func_parm,
	 TREE_CODE (func_parm), NULL, tf_warning_or_error);
      new_expr = build_x_modify_expr (location, *new_var, NOP_EXPR, func_parm,
				      tf_warning_or_error);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND:
      new_yes_expr = build_x_modify_expr (location, array_ind_value, NOP_EXPR,
					  func_parm, tf_warning_or_error);
      new_no_expr = build_x_modify_expr (location, array_ind_value, NOP_EXPR,
					 array_ind_value, tf_warning_or_error);
      if (list_size > 1)
	new_yes_ind = build_x_modify_expr (location, *new_var, NOP_EXPR,
					   an_loop_info[0].var,
					   tf_warning_or_error);
      else
	new_yes_ind = build_x_modify_expr (location, *new_var, NOP_EXPR,
					   TREE_OPERAND (array_op0, 1),
					   tf_warning_or_error);
      new_no_ind = build_x_modify_expr (location, *new_var, NOP_EXPR, *new_var,
					tf_warning_or_error);
      new_yes_list = alloc_stmt_list ();
      append_to_statement_list (new_yes_ind, &new_yes_list);
      append_to_statement_list (new_yes_expr, &new_yes_list);

      new_no_list = alloc_stmt_list ();
      append_to_statement_list (new_no_ind, &new_no_list);
      append_to_statement_list (new_no_expr, &new_no_list);

      new_cond_expr = build_x_binary_op (location, code, array_ind_value,
					 TREE_CODE (array_ind_value), func_parm,
					 TREE_CODE (func_parm), NULL,
					 tf_warning_or_error);
      new_expr = build_x_conditional_expr (location, new_cond_expr,
					   new_yes_list, new_no_list,
					   tf_warning_or_error);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING:
      func_args = make_tree_vector ();
      if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE)
	vec_safe_push (func_args, *new_var);
      else
	vec_safe_push (func_args, identity_value);
      vec_safe_push (func_args, func_parm);

      new_expr = finish_call_expr (call_fn, &func_args, false, true,
				   tf_warning_or_error);
      if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE)
	new_expr = build_x_modify_expr (location, *new_var, NOP_EXPR, new_expr,
					tf_warning_or_error);
      release_tree_vector (func_args);
      break;
    default:
      gcc_unreachable ();
    }
  an_init = pop_stmt_list (an_init);
  append_to_statement_list (an_init, &loop_with_init);
  body = new_expr;

  for (ii = 0; ii < rank; ii++)
    {
      tree new_loop = push_stmt_list ();
      create_an_loop (an_loop_info[ii].ind_init, an_loop_info[ii].cmp,
		      an_loop_info[ii].incr, body);
      body = pop_stmt_list (new_loop);
    }
  append_to_statement_list (body, &loop_with_init);

  release_vec_vec (an_info);

  return loop_with_init;
}
Beispiel #5
0
static void
do_build_assign_ref (tree fndecl)
{
  tree parm = TREE_CHAIN (DECL_ARGUMENTS (fndecl));
  tree compound_stmt;

  compound_stmt = begin_compound_stmt (0);
  parm = convert_from_reference (parm);

  if (TYPE_HAS_TRIVIAL_ASSIGN_REF (current_class_type)
      && is_empty_class (current_class_type))
    /* Don't copy the padding byte; it might not have been allocated
       if *this is a base subobject.  */;
  else if (TYPE_HAS_TRIVIAL_ASSIGN_REF (current_class_type))
    {
      tree t = build2 (MODIFY_EXPR, void_type_node, current_class_ref, parm);
      finish_expr_stmt (t);
    }
  else
    {
      tree fields;
      int cvquals = cp_type_quals (TREE_TYPE (parm));
      int i;
      tree binfo, base_binfo;

      /* Assign to each of the direct base classes.  */
      for (binfo = TYPE_BINFO (current_class_type), i = 0;
	   BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
	{
	  tree converted_parm;
	  VEC(tree,gc) *parmvec;

	  /* We must convert PARM directly to the base class
	     explicitly since the base class may be ambiguous.  */
	  converted_parm = build_base_path (PLUS_EXPR, parm, base_binfo, 1);
	  /* Call the base class assignment operator.  */
	  parmvec = make_tree_vector_single (converted_parm);
	  finish_expr_stmt
	    (build_special_member_call (current_class_ref,
					ansi_assopname (NOP_EXPR),
					&parmvec,
					base_binfo,
					LOOKUP_NORMAL | LOOKUP_NONVIRTUAL,
                                        tf_warning_or_error));
	  release_tree_vector (parmvec);
	}

      /* Assign to each of the non-static data members.  */
      for (fields = TYPE_FIELDS (current_class_type);
	   fields;
	   fields = TREE_CHAIN (fields))
	{
	  tree comp = current_class_ref;
	  tree init = parm;
	  tree field = fields;
	  tree expr_type;
	  int quals;

	  if (TREE_CODE (field) != FIELD_DECL || DECL_ARTIFICIAL (field))
	    continue;

	  expr_type = TREE_TYPE (field);

	  if (CP_TYPE_CONST_P (expr_type))
	    {
	      error ("non-static const member %q#D, can't use default "
		     "assignment operator", field);
	      continue;
	    }
	  else if (TREE_CODE (expr_type) == REFERENCE_TYPE)
	    {
	      error ("non-static reference member %q#D, can't use "
		     "default assignment operator", field);
	      continue;
	    }

	  if (DECL_NAME (field))
	    {
	      if (VFIELD_NAME_P (DECL_NAME (field)))
		continue;
	    }
	  else if (ANON_AGGR_TYPE_P (expr_type)
		   && TYPE_FIELDS (expr_type) != NULL_TREE)
	    /* Just use the field; anonymous types can't have
	       nontrivial copy ctors or assignment ops.  */;
	  else
	    continue;

	  comp = build3 (COMPONENT_REF, expr_type, comp, field, NULL_TREE);

	  /* Compute the type of init->field  */
	  quals = cvquals;
	  if (DECL_MUTABLE_P (field))
	    quals &= ~TYPE_QUAL_CONST;
	  expr_type = cp_build_qualified_type (expr_type, quals);

	  init = build3 (COMPONENT_REF, expr_type, init, field, NULL_TREE);

	  if (DECL_NAME (field))
	    init = cp_build_modify_expr (comp, NOP_EXPR, init, 
					 tf_warning_or_error);
	  else
	    init = build2 (MODIFY_EXPR, TREE_TYPE (comp), comp, init);
	  finish_expr_stmt (init);
	}
    }
  finish_return_stmt (current_class_ref);
  finish_compound_stmt (compound_stmt);
}