Ejemplo n.º 1
0
static void
ipcp_process_devirtualization_opportunities (struct cgraph_node *node)
{
  struct ipa_node_params *info = IPA_NODE_REF (node);
  struct cgraph_edge *ie, *next_ie;

  for (ie = node->indirect_calls; ie; ie = next_ie)
    {
      int param_index, types_count, j;
      HOST_WIDE_INT token;
      tree target, delta;

      next_ie = ie->next_callee;
      if (!ie->indirect_info->polymorphic)
	continue;
      param_index = ie->indirect_info->param_index;
      if (param_index == -1
	  || ipa_param_cannot_devirtualize_p (info, param_index)
	  || ipa_param_types_vec_empty (info, param_index))
	continue;

      token = ie->indirect_info->otr_token;
      target = NULL_TREE;
      types_count = VEC_length (tree, info->params[param_index].types);
      for (j = 0; j < types_count; j++)
	{
	  tree binfo = VEC_index (tree, info->params[param_index].types, j);
	  tree d;
	  tree t = gimple_get_virt_mehtod_for_binfo (token, binfo, &d, true);

	  if (!t)
	    {
	      target = NULL_TREE;
	      break;
	    }
	  else if (!target)
	    {
	      target = t;
	      delta = d;
	    }
	  else if (target != t || !tree_int_cst_equal (delta, d))
	    {
	      target = NULL_TREE;
	      break;
	    }
	}

      if (target)
	ipa_make_edge_direct_to_target (ie, target, delta);
    }
}
Ejemplo n.º 2
0
bool
length_mismatch_in_expr_p (location_t loc, vec<vec<an_parts> >list)
{
  size_t ii, jj;
  tree length = NULL_TREE;
  
  size_t x = list.length ();
  size_t y = list[0].length ();
  
  for (jj = 0; jj < y; jj++)
    {
      length = NULL_TREE;
      for (ii = 0; ii < x; ii++)
	{
	  if (!length)
	    length = list[ii][jj].length;
	  else if (TREE_CODE (length) == INTEGER_CST)
	    {
	      /* If length is a INTEGER, and list[ii][jj] is an integer then
		 check if they are equal.  If they are not equal then return
		 true.  */
	      if (TREE_CODE (list[ii][jj].length) == INTEGER_CST
		  && !tree_int_cst_equal (list[ii][jj].length, length))
		{ 
		  error_at (loc, "length mismatch in expression"); 
		  return true;
		}
	    }
	  else
	    /* We set the length node as the current node just in case it turns
	       out to be an integer.  */
	    length = list[ii][jj].length;
	}
    }
  return false;
}
Ejemplo n.º 3
0
static bool
pp_c_enumeration_constant (c_pretty_printer *pp, tree e)
{
  bool value_is_named = true;
  tree type = TREE_TYPE (e);
  tree value;

  /* Find the name of this constant.  */
  for (value = TYPE_VALUES (type);
       value != NULL_TREE && !tree_int_cst_equal (TREE_VALUE (value), e);
       value = TREE_CHAIN (value))
    ;

  if (value != NULL_TREE)
    pp_id_expression (pp, TREE_PURPOSE (value));
  else
    {
      /* Value must have been cast.  */
      pp_c_type_cast (pp, type);
      value_is_named = false;
    }

  return value_is_named;
}
Ejemplo n.º 4
0
static tree
build_common_decl (gfc_common_head *com, tree union_type, bool is_init)
{
  gfc_symbol *common_sym;
  tree decl;

  /* Create a namespace to store symbols for common blocks.  */
  if (gfc_common_ns == NULL)
    gfc_common_ns = gfc_get_namespace (NULL, 0);

  gfc_get_symbol (com->name, gfc_common_ns, &common_sym);
  decl = common_sym->backend_decl;

  /* Update the size of this common block as needed.  */
  if (decl != NULL_TREE)
    {
      tree size = TYPE_SIZE_UNIT (union_type);

      /* Named common blocks of the same name shall be of the same size
	 in all scoping units of a program in which they appear, but
	 blank common blocks may be of different sizes.  */
      if (!tree_int_cst_equal (DECL_SIZE_UNIT (decl), size)
	  && strcmp (com->name, BLANK_COMMON_NAME))
	gfc_warning ("Named COMMON block '%s' at %L shall be of the "
		     "same size as elsewhere (%lu vs %lu bytes)", com->name,
		     &com->where,
		     (unsigned long) TREE_INT_CST_LOW (size),
		     (unsigned long) TREE_INT_CST_LOW (DECL_SIZE_UNIT (decl)));

      if (tree_int_cst_lt (DECL_SIZE_UNIT (decl), size))
	{
	  DECL_SIZE (decl) = TYPE_SIZE (union_type);
	  DECL_SIZE_UNIT (decl) = size;
	  DECL_MODE (decl) = TYPE_MODE (union_type);
	  TREE_TYPE (decl) = union_type;
	  layout_decl (decl, 0);
	}
     }

  /* If this common block has been declared in a previous program unit,
     and either it is already initialized or there is no new initialization
     for it, just return.  */
  if ((decl != NULL_TREE) && (!is_init || DECL_INITIAL (decl)))
    return decl;

  /* If there is no backend_decl for the common block, build it.  */
  if (decl == NULL_TREE)
    {
      decl = build_decl (input_location,
			 VAR_DECL, get_identifier (com->name), union_type);
      gfc_set_decl_assembler_name (decl, gfc_sym_mangled_common_id (com));
      TREE_PUBLIC (decl) = 1;
      TREE_STATIC (decl) = 1;
      DECL_IGNORED_P (decl) = 1;
      if (!com->is_bind_c)
	DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
      else
        {
	  /* Do not set the alignment for bind(c) common blocks to
	     BIGGEST_ALIGNMENT because that won't match what C does.  Also,
	     for common blocks with one element, the alignment must be
	     that of the field within the common block in order to match
	     what C will do.  */
	  tree field = NULL_TREE;
	  field = TYPE_FIELDS (TREE_TYPE (decl));
	  if (DECL_CHAIN (field) == NULL_TREE)
	    DECL_ALIGN (decl) = TYPE_ALIGN (TREE_TYPE (field));
	}
      DECL_USER_ALIGN (decl) = 0;
      GFC_DECL_COMMON_OR_EQUIV (decl) = 1;

      gfc_set_decl_location (decl, &com->where);

      if (com->threadprivate)
	DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);

      /* Place the back end declaration for this common block in
         GLOBAL_BINDING_LEVEL.  */
      common_sym->backend_decl = pushdecl_top_level (decl);
    }

  /* Has no initial values.  */
  if (!is_init)
    {
      DECL_INITIAL (decl) = NULL_TREE;
      DECL_COMMON (decl) = 1;
      DECL_DEFER_OUTPUT (decl) = 1;
    }
  else
    {
      DECL_INITIAL (decl) = error_mark_node;
      DECL_COMMON (decl) = 0;
      DECL_DEFER_OUTPUT (decl) = 0;
    }
  return decl;
}
Ejemplo n.º 5
0
bool
useless_type_conversion_p (tree outer_type, tree inner_type)
{
  /* Do the following before stripping toplevel qualifiers.  */
  if (POINTER_TYPE_P (inner_type)
      && POINTER_TYPE_P (outer_type))
    {
      /* Do not lose casts between pointers to different address spaces.  */
      if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
	  != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
	return false;
      /* Do not lose casts to function pointer types.  */
      if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
	   || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
	  && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
	       || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
	return false;
    }

  /* From now on qualifiers on value types do not matter.  */
  inner_type = TYPE_MAIN_VARIANT (inner_type);
  outer_type = TYPE_MAIN_VARIANT (outer_type);

  if (inner_type == outer_type)
    return true;

  /* Changes in machine mode are never useless conversions because the RTL
     middle-end expects explicit conversions between modes.  */
  if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type))
    return false;

  /* If both the inner and outer types are integral types, then the
     conversion is not necessary if they have the same mode and
     signedness and precision, and both or neither are boolean.  */
  if (INTEGRAL_TYPE_P (inner_type)
      && INTEGRAL_TYPE_P (outer_type))
    {
      /* Preserve changes in signedness or precision.  */
      if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
	  || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
	return false;

      /* Preserve conversions to/from BOOLEAN_TYPE if types are not
	 of precision one.  */
      if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
	   != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
	  && TYPE_PRECISION (outer_type) != 1)
	return false;

      /* We don't need to preserve changes in the types minimum or
	 maximum value in general as these do not generate code
	 unless the types precisions are different.  */
      return true;
    }

  /* Scalar floating point types with the same mode are compatible.  */
  else if (SCALAR_FLOAT_TYPE_P (inner_type)
	   && SCALAR_FLOAT_TYPE_P (outer_type))
    return true;

  /* Fixed point types with the same mode are compatible.  */
  else if (FIXED_POINT_TYPE_P (inner_type)
	   && FIXED_POINT_TYPE_P (outer_type))
    return true;

  /* We need to take special care recursing to pointed-to types.  */
  else if (POINTER_TYPE_P (inner_type)
	   && POINTER_TYPE_P (outer_type))
    {
      /* We do not care for const qualification of the pointed-to types
	 as const qualification has no semantic value to the middle-end.  */

      /* Otherwise pointers/references are equivalent.  */
      return true;
    }

  /* Recurse for complex types.  */
  else if (TREE_CODE (inner_type) == COMPLEX_TYPE
	   && TREE_CODE (outer_type) == COMPLEX_TYPE)
    return useless_type_conversion_p (TREE_TYPE (outer_type),
				      TREE_TYPE (inner_type));

  /* Recurse for vector types with the same number of subparts.  */
  else if (TREE_CODE (inner_type) == VECTOR_TYPE
	   && TREE_CODE (outer_type) == VECTOR_TYPE
	   && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
    return useless_type_conversion_p (TREE_TYPE (outer_type),
				      TREE_TYPE (inner_type));

  else if (TREE_CODE (inner_type) == ARRAY_TYPE
	   && TREE_CODE (outer_type) == ARRAY_TYPE)
    {
      /* Preserve various attributes.  */
      if (TYPE_REVERSE_STORAGE_ORDER (inner_type)
	  != TYPE_REVERSE_STORAGE_ORDER (outer_type))
	return false;
      if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
	return false;

      /* Conversions from array types with unknown extent to
	 array types with known extent are not useless.  */
      if (!TYPE_DOMAIN (inner_type) && TYPE_DOMAIN (outer_type))
	return false;

      /* Nor are conversions from array types with non-constant size to
         array types with constant size or to different size.  */
      if (TYPE_SIZE (outer_type)
	  && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
	  && (!TYPE_SIZE (inner_type)
	      || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
	      || !tree_int_cst_equal (TYPE_SIZE (outer_type),
				      TYPE_SIZE (inner_type))))
	return false;

      /* Check conversions between arrays with partially known extents.
	 If the array min/max values are constant they have to match.
	 Otherwise allow conversions to unknown and variable extents.
	 In particular this declares conversions that may change the
	 mode to BLKmode as useless.  */
      if (TYPE_DOMAIN (inner_type)
	  && TYPE_DOMAIN (outer_type)
	  && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
	{
	  tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
	  tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
	  tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
	  tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));

	  /* After gimplification a variable min/max value carries no
	     additional information compared to a NULL value.  All that
	     matters has been lowered to be part of the IL.  */
	  if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
	    inner_min = NULL_TREE;
	  if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
	    outer_min = NULL_TREE;
	  if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
	    inner_max = NULL_TREE;
	  if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
	    outer_max = NULL_TREE;

	  /* Conversions NULL / variable <- cst are useless, but not
	     the other way around.  */
	  if (outer_min
	      && (!inner_min
		  || !tree_int_cst_equal (inner_min, outer_min)))
	    return false;
	  if (outer_max
	      && (!inner_max
		  || !tree_int_cst_equal (inner_max, outer_max)))
	    return false;
	}

      /* Recurse on the element check.  */
      return useless_type_conversion_p (TREE_TYPE (outer_type),
					TREE_TYPE (inner_type));
    }

  else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
	    || TREE_CODE (inner_type) == METHOD_TYPE)
	   && TREE_CODE (inner_type) == TREE_CODE (outer_type))
    {
      tree outer_parm, inner_parm;

      /* If the return types are not compatible bail out.  */
      if (!useless_type_conversion_p (TREE_TYPE (outer_type),
				      TREE_TYPE (inner_type)))
	return false;

      /* Method types should belong to a compatible base class.  */
      if (TREE_CODE (inner_type) == METHOD_TYPE
	  && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
					 TYPE_METHOD_BASETYPE (inner_type)))
	return false;

      /* A conversion to an unprototyped argument list is ok.  */
      if (!prototype_p (outer_type))
	return true;

      /* If the unqualified argument types are compatible the conversion
	 is useless.  */
      if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
	return true;

      for (outer_parm = TYPE_ARG_TYPES (outer_type),
	   inner_parm = TYPE_ARG_TYPES (inner_type);
	   outer_parm && inner_parm;
	   outer_parm = TREE_CHAIN (outer_parm),
	   inner_parm = TREE_CHAIN (inner_parm))
	if (!useless_type_conversion_p
	       (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
		TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
	  return false;

      /* If there is a mismatch in the number of arguments the functions
	 are not compatible.  */
      if (outer_parm || inner_parm)
	return false;

      /* Defer to the target if necessary.  */
      if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
	return comp_type_attributes (outer_type, inner_type) != 0;

      return true;
    }

  /* For aggregates we rely on TYPE_CANONICAL exclusively and require
     explicit conversions for types involving to be structurally
     compared types.  */
  else if (AGGREGATE_TYPE_P (inner_type)
	   && TREE_CODE (inner_type) == TREE_CODE (outer_type))
    return TYPE_CANONICAL (inner_type)
	   && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type);

  else if (TREE_CODE (inner_type) == OFFSET_TYPE
	   && TREE_CODE (outer_type) == OFFSET_TYPE)
    return useless_type_conversion_p (TREE_TYPE (outer_type),
				      TREE_TYPE (inner_type))
	   && useless_type_conversion_p
	        (TYPE_OFFSET_BASETYPE (outer_type),
		 TYPE_OFFSET_BASETYPE (inner_type));

  return false;
}
Ejemplo n.º 6
0
static bool
forward_propagate_addr_into_variable_array_index (tree offset,
						  tree def_rhs,
						  gimple_stmt_iterator *use_stmt_gsi)
{
  tree index, tunit;
  gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
  tree tmp;

  tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
  if (!host_integerp (tunit, 1))
    return false;

  /* Get the offset's defining statement.  */
  offset_def = SSA_NAME_DEF_STMT (offset);

  /* Try to find an expression for a proper index.  This is either a
     multiplication expression by the element size or just the ssa name we came
     along in case the element size is one. In that case, however, we do not
     allow multiplications because they can be computing index to a higher
     level dimension (PR 37861). */
  if (integer_onep (tunit))
    {
      if (is_gimple_assign (offset_def)
	  && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
	return false;

      index = offset;
    }
  else
    {
      /* The statement which defines OFFSET before type conversion
         must be a simple GIMPLE_ASSIGN.  */
      if (!is_gimple_assign (offset_def))
	return false;

      /* The RHS of the statement which defines OFFSET must be a
	 multiplication of an object by the size of the array elements.
	 This implicitly verifies that the size of the array elements
	 is constant.  */
     if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
	 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
	 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
       {
	 /* The first operand to the MULT_EXPR is the desired index.  */
	 index = gimple_assign_rhs1 (offset_def);
       }
     /* If we have idx * tunit + CST * tunit re-associate that.  */
     else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
	       || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
	      && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
	      && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
	      && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
					       gimple_assign_rhs2 (offset_def),
					       tunit)) != NULL_TREE)
       {
	 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
	 if (is_gimple_assign (offset_def2)
	     && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
	     && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
	     && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
	   {
	     index = fold_build2 (gimple_assign_rhs_code (offset_def),
				  TREE_TYPE (offset),
				  gimple_assign_rhs1 (offset_def2), tmp);
	   }
	 else
	   return false;
       }
     else
	return false;
    }

  /* Replace the pointer addition with array indexing.  */
  index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
				    true, GSI_SAME_STMT);
  gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
  use_stmt = gsi_stmt (*use_stmt_gsi);
  TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
    = index;

  /* That should have created gimple, so there is no need to
     record information to undo the propagation.  */
  fold_stmt_inplace (use_stmt);
  tidy_after_forward_propagate_addr (use_stmt);
  return true;
}
Ejemplo n.º 7
0
static tree
expand_an_in_modify_expr (location_t location, tree lhs,
			  enum tree_code modifycode, tree rhs,
			  tsubst_flags_t complain)
{
  tree array_expr_lhs = NULL_TREE, array_expr_rhs = NULL_TREE;
  tree array_expr = NULL_TREE;
  tree body = NULL_TREE;
  auto_vec<tree> cond_expr;
  vec<tree, va_gc> *lhs_array_operand = NULL, *rhs_array_operand = NULL;
  size_t lhs_rank = 0, rhs_rank = 0, ii = 0;
  vec<tree, va_gc> *rhs_list = NULL, *lhs_list = NULL;
  size_t rhs_list_size = 0, lhs_list_size = 0;
  tree new_modify_expr, new_var = NULL_TREE, builtin_loop, scalar_mods;
  bool found_builtin_fn = false;
  tree an_init, loop_with_init = alloc_stmt_list ();
  vec<vec<an_parts> > lhs_an_info = vNULL, rhs_an_info = vNULL;
  auto_vec<an_loop_parts> lhs_an_loop_info, rhs_an_loop_info;
  tree lhs_len, rhs_len;

  if (!find_rank (location, rhs, rhs, false, &rhs_rank))
    return error_mark_node;
  extract_array_notation_exprs (rhs, false, &rhs_list);
  rhs_list_size = vec_safe_length (rhs_list);
  an_init = push_stmt_list ();
  if (rhs_rank)
    {
      scalar_mods = replace_invariant_exprs (&rhs);
      if (scalar_mods)
	finish_expr_stmt (scalar_mods);
    }
  for (ii = 0; ii < rhs_list_size; ii++)
    {
      tree rhs_node = (*rhs_list)[ii];
      if (TREE_CODE (rhs_node) == CALL_EXPR)
	{
	  builtin_loop = expand_sec_reduce_builtin (rhs_node, &new_var);
	  if (builtin_loop == error_mark_node)
	    return error_mark_node;
	  else if (builtin_loop)
	    {
	      finish_expr_stmt (builtin_loop);
	      found_builtin_fn = true;
	      if (new_var)
		{
		  vec <tree, va_gc> *rhs_sub_list = NULL, *new_var_list = NULL;
		  vec_safe_push (rhs_sub_list, rhs_node);
		  vec_safe_push (new_var_list, new_var);
		  replace_array_notations (&rhs, false, rhs_sub_list,
					   new_var_list);
		}
	    }
	}
    }
  lhs_rank = 0;
  rhs_rank = 0;
  if (!find_rank (location, lhs, lhs, true, &lhs_rank)
      || !find_rank (location, rhs, rhs, true, &rhs_rank))
    {
      pop_stmt_list (an_init);
      return error_mark_node;
    }

  /* If both are scalar, then the only reason why we will get this far is if
     there is some array notations inside it and was using a builtin array
     notation functions.  If so, we have already broken those guys up and now 
     a simple build_x_modify_expr would do.  */
  if (lhs_rank == 0 && rhs_rank == 0)
    {
      if (found_builtin_fn)
	{
	  new_modify_expr = build_x_modify_expr (location, lhs,
						 modifycode, rhs, complain);
	  finish_expr_stmt (new_modify_expr);
	  pop_stmt_list (an_init);
	  return an_init;
	}
      else
	gcc_unreachable ();
    }

  /* If for some reason location is not set, then find if LHS or RHS has
     location info.  If so, then use that so we atleast have an idea.  */
  if (location == UNKNOWN_LOCATION)
    {
      if (EXPR_LOCATION (lhs) != UNKNOWN_LOCATION)
	location = EXPR_LOCATION (lhs);
      else if (EXPR_LOCATION (rhs) != UNKNOWN_LOCATION)
	location = EXPR_LOCATION (rhs);
    }
      
  /* We need this when we have a scatter issue.  */
  extract_array_notation_exprs (lhs, true, &lhs_list);
  rhs_list = NULL;
  extract_array_notation_exprs (rhs, true, &rhs_list);
  rhs_list_size = vec_safe_length (rhs_list);
  lhs_list_size = vec_safe_length (lhs_list);
    
  if (lhs_rank == 0 && rhs_rank != 0)
    {
      error_at (location, "%qE cannot be scalar when %qE is not", lhs, rhs);
      return error_mark_node;
    }
  if (lhs_rank != 0 && rhs_rank != 0 && lhs_rank != rhs_rank)
    {
      error_at (location, "rank mismatch between %qE and %qE", lhs, rhs);
      return error_mark_node;
    }
  
  /* Assign the array notation components to variable so that they can satisfy
     the execute-once rule.  */
  for (ii = 0; ii < lhs_list_size; ii++)
    {
      tree anode = (*lhs_list)[ii];
      make_triplet_val_inv (&ARRAY_NOTATION_START (anode));
      make_triplet_val_inv (&ARRAY_NOTATION_LENGTH (anode));
      make_triplet_val_inv (&ARRAY_NOTATION_STRIDE (anode));
    }
  for (ii = 0; ii < rhs_list_size; ii++)
    if ((*rhs_list)[ii] && TREE_CODE ((*rhs_list)[ii]) == ARRAY_NOTATION_REF)
      {
	tree aa = (*rhs_list)[ii];
	make_triplet_val_inv (&ARRAY_NOTATION_START (aa));
	make_triplet_val_inv (&ARRAY_NOTATION_LENGTH (aa));
	make_triplet_val_inv (&ARRAY_NOTATION_STRIDE (aa));
      }
  lhs_an_loop_info.safe_grow_cleared (lhs_rank);
  
  if (rhs_rank)
    rhs_an_loop_info.safe_grow_cleared (rhs_rank);

  cond_expr.safe_grow_cleared (MAX (lhs_rank, rhs_rank));
  cilkplus_extract_an_triplets (lhs_list, lhs_list_size, lhs_rank,
				&lhs_an_info);
  if (rhs_list)
    cilkplus_extract_an_triplets (rhs_list, rhs_list_size, rhs_rank,
				  &rhs_an_info);
  if (length_mismatch_in_expr_p (EXPR_LOCATION (lhs), lhs_an_info)
      || (rhs_list && length_mismatch_in_expr_p (EXPR_LOCATION (rhs),
						 rhs_an_info)))
    {
      pop_stmt_list (an_init);
      goto error;
    }
  rhs_len = ((rhs_list_size > 0 && rhs_rank > 0) ?
    rhs_an_info[0][0].length : NULL_TREE);
  lhs_len = ((lhs_list_size > 0 && lhs_rank > 0) ?
    lhs_an_info[0][0].length : NULL_TREE);
  if (lhs_list_size > 0 && rhs_list_size > 0 && lhs_rank > 0 && rhs_rank > 0
      && TREE_CODE (lhs_len) == INTEGER_CST && rhs_len
      && TREE_CODE (rhs_len) == INTEGER_CST 
      && !tree_int_cst_equal (rhs_len, lhs_len))
    { 
      error_at (location, "length mismatch between LHS and RHS"); 
      pop_stmt_list (an_init); 
      goto error;
    }
   for (ii = 0; ii < lhs_rank; ii++) 
     {
       tree typ = ptrdiff_type_node; 
       lhs_an_loop_info[ii].var = create_temporary_var (typ);
       add_decl_expr (lhs_an_loop_info[ii].var);
       lhs_an_loop_info[ii].ind_init = build_x_modify_expr 
	 (location, lhs_an_loop_info[ii].var, INIT_EXPR, build_zero_cst (typ), 
	  complain);
     }
   
   if (rhs_list_size > 0)
     {
       rhs_array_operand = fix_sec_implicit_args (location, rhs_list,
						  lhs_an_loop_info, lhs_rank,
						  lhs); 
       if (!rhs_array_operand)
	 goto error;
     }
  replace_array_notations (&rhs, true, rhs_list, rhs_array_operand);
  rhs_list_size = 0;
  rhs_list = NULL;
  extract_array_notation_exprs (rhs, true, &rhs_list);
  rhs_list_size = vec_safe_length (rhs_list);    

  for (ii = 0; ii < rhs_rank; ii++)
    {
      tree typ = ptrdiff_type_node;
      rhs_an_loop_info[ii].var = create_temporary_var (typ);
      add_decl_expr (rhs_an_loop_info[ii].var);
      rhs_an_loop_info[ii].ind_init = build_x_modify_expr
	(location, rhs_an_loop_info[ii].var, INIT_EXPR, build_zero_cst (typ), 
	 complain);
    }

  if (lhs_rank)
    {
      lhs_array_operand =
	create_array_refs (location, lhs_an_info, lhs_an_loop_info,
			    lhs_list_size, lhs_rank);
      replace_array_notations (&lhs, true, lhs_list, lhs_array_operand);
    }
  
  if (rhs_array_operand)
    vec_safe_truncate (rhs_array_operand, 0);
  if (rhs_rank)
    {
      rhs_array_operand = create_array_refs (location, rhs_an_info,
					      rhs_an_loop_info, rhs_list_size,
					      rhs_rank);
      /* Replace all the array refs created by the above function because this
	 variable is blown away by the fix_sec_implicit_args function below.  */
      replace_array_notations (&rhs, true, rhs_list, rhs_array_operand);
      vec_safe_truncate (rhs_array_operand , 0);
      rhs_array_operand = fix_sec_implicit_args (location, rhs_list,
						 rhs_an_loop_info, rhs_rank,
						 rhs);
      if (!rhs_array_operand)
	goto error;
      replace_array_notations (&rhs, true, rhs_list, rhs_array_operand);
    }

  array_expr_rhs = rhs;
  array_expr_lhs = lhs;
  
  array_expr = build_x_modify_expr (location, array_expr_lhs, modifycode,
				    array_expr_rhs, complain);
  create_cmp_incr (location, &lhs_an_loop_info, lhs_rank, lhs_an_info,
		   complain);
  if (rhs_rank) 
    create_cmp_incr (location, &rhs_an_loop_info, rhs_rank, rhs_an_info, 
		     complain);
  for (ii = 0; ii < MAX (rhs_rank, lhs_rank); ii++)
    if (ii < lhs_rank && ii < rhs_rank)
      cond_expr[ii] = build_x_binary_op
	(location, TRUTH_ANDIF_EXPR, lhs_an_loop_info[ii].cmp,
	 TREE_CODE (lhs_an_loop_info[ii].cmp), rhs_an_loop_info[ii].cmp,
	 TREE_CODE (rhs_an_loop_info[ii].cmp), NULL, complain);
    else if (ii < lhs_rank && ii >= rhs_rank)
      cond_expr[ii] = lhs_an_loop_info[ii].cmp;
    else
      /* No need to compare ii < rhs_rank && ii >= lhs_rank because in a valid 
	 Array notation expression, rank of RHS cannot be greater than LHS.  */
      gcc_unreachable ();
  
  an_init = pop_stmt_list (an_init);
  append_to_statement_list (an_init, &loop_with_init);
  body = array_expr;
  for (ii = 0; ii < MAX (lhs_rank, rhs_rank); ii++)
    {
      tree incr_list = alloc_stmt_list ();
      tree init_list = alloc_stmt_list ();
      tree new_loop = push_stmt_list ();

      if (lhs_rank)
	{
	  append_to_statement_list (lhs_an_loop_info[ii].ind_init, &init_list);
	  append_to_statement_list (lhs_an_loop_info[ii].incr, &incr_list);
	}
      if (rhs_rank)
	{
	  append_to_statement_list (rhs_an_loop_info[ii].ind_init, &init_list);
	  append_to_statement_list (rhs_an_loop_info[ii].incr, &incr_list);
	}
      create_an_loop (init_list, cond_expr[ii], incr_list, body);
      body = pop_stmt_list (new_loop);
    }
  append_to_statement_list (body, &loop_with_init);

  release_vec_vec (lhs_an_info);
  release_vec_vec (rhs_an_info);

  return loop_with_init;

error:
  release_vec_vec (lhs_an_info);
  release_vec_vec (rhs_an_info);

  return error_mark_node;
}
Ejemplo n.º 8
0
static bool
lto_symtab_merge (symtab_node *prevailing, symtab_node *entry)
{
  tree prevailing_decl = prevailing->decl;
  tree decl = entry->decl;

  if (prevailing_decl == decl)
    return true;

  /* Merge decl state in both directions, we may still end up using
     the new decl.  */
  TREE_ADDRESSABLE (prevailing_decl) |= TREE_ADDRESSABLE (decl);
  TREE_ADDRESSABLE (decl) |= TREE_ADDRESSABLE (prevailing_decl);

  /* The linker may ask us to combine two incompatible symbols.
     Detect this case and notify the caller of required diagnostics.  */

  if (TREE_CODE (decl) == FUNCTION_DECL)
    {
      /* Merge decl state in both directions, we may still end up using
	 the new decl.  */
      DECL_POSSIBLY_INLINED (prevailing_decl) |= DECL_POSSIBLY_INLINED (decl);
      DECL_POSSIBLY_INLINED (decl) |= DECL_POSSIBLY_INLINED (prevailing_decl);

      if (warn_type_compatibility_p (TREE_TYPE (prevailing_decl),
			             TREE_TYPE (decl),
				     DECL_COMMON (decl)
				     || DECL_EXTERNAL (decl)))
	return false;

      return true;
    }

  if (warn_type_compatibility_p (TREE_TYPE (prevailing_decl),
				 TREE_TYPE (decl),
				 DECL_COMMON (decl) || DECL_EXTERNAL (decl)))
    return false;

  /* There is no point in comparing too many details of the decls here.
     The type compatibility checks or the completing of types has properly
     dealt with most issues.  */

  /* The following should all not invoke fatal errors as in non-LTO
     mode the linker wouldn't complain either.  Just emit warnings.  */

  /* Report a warning if user-specified alignments do not match.  */
  if ((DECL_USER_ALIGN (prevailing_decl) && DECL_USER_ALIGN (decl))
      && DECL_ALIGN (prevailing_decl) < DECL_ALIGN (decl))
    return false;

  if (DECL_SIZE (decl) && DECL_SIZE (prevailing_decl)
      && !tree_int_cst_equal (DECL_SIZE (decl), DECL_SIZE (prevailing_decl))
      /* As a special case do not warn about merging
	 int a[];
	 and
	 int a[]={1,2,3};
	 here the first declaration is COMMON
	 and sizeof(a) == sizeof (int).  */
      && ((!DECL_COMMON (decl) && !DECL_EXTERNAL (decl))
	  || TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
	  || TYPE_SIZE (TREE_TYPE (decl))
	     != TYPE_SIZE (TREE_TYPE (TREE_TYPE (decl)))))
    return false;

  return true;
}
Ejemplo n.º 9
0
static int
warn_type_compatibility_p (tree prevailing_type, tree type,
			   bool common_or_extern)
{
  int lev = 0;
  bool odr_p = odr_or_derived_type_p (prevailing_type)
	       && odr_or_derived_type_p (type);

  if (prevailing_type == type)
    return 0;

  /* C++ provide a robust way to check for type compatibility via the ODR
     rule.  */
  if (odr_p && !odr_types_equivalent_p (prevailing_type, type))
    lev |= 2;

  /* Function types needs special care, because types_compatible_p never
     thinks prototype is compatible to non-prototype.  */
  if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
    {
      if (TREE_CODE (type) != TREE_CODE (prevailing_type))
	lev |= 1;
      lev |= warn_type_compatibility_p (TREE_TYPE (prevailing_type),
				        TREE_TYPE (type), false);
      if (TREE_CODE (type) == METHOD_TYPE
	  && TREE_CODE (prevailing_type) == METHOD_TYPE)
	lev |= warn_type_compatibility_p (TYPE_METHOD_BASETYPE (prevailing_type),
					  TYPE_METHOD_BASETYPE (type), false);
      if (prototype_p (prevailing_type) && prototype_p (type)
	  && TYPE_ARG_TYPES (prevailing_type) != TYPE_ARG_TYPES (type))
	{
	  tree parm1, parm2;
	  for (parm1 = TYPE_ARG_TYPES (prevailing_type),
	       parm2 = TYPE_ARG_TYPES (type);
	       parm1 && parm2;
	       parm1 = TREE_CHAIN (parm1),
	       parm2 = TREE_CHAIN (parm2))
	    lev |= warn_type_compatibility_p (TREE_VALUE (parm1),
					      TREE_VALUE (parm2), false);
	  if (parm1 || parm2)
	    lev |= odr_p ? 3 : 1;
	}
      if (comp_type_attributes (prevailing_type, type) == 0)
	lev |= 1;
      return lev;
    }

  /* Get complete type.  */
  prevailing_type = TYPE_MAIN_VARIANT (prevailing_type);
  type = TYPE_MAIN_VARIANT (type);

  /* We can not use types_compatible_p because we permit some changes
     across types.  For example unsigned size_t and "signed size_t" may be
     compatible when merging C and Fortran types.  */
  if (COMPLETE_TYPE_P (prevailing_type)
      && COMPLETE_TYPE_P (type)
      /* While global declarations are never variadic, we can recurse here
	 for function parameter types.  */
      && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
      && TREE_CODE (TYPE_SIZE (prevailing_type)) == INTEGER_CST
      && !tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prevailing_type)))
    {
       /* As a special case do not warn about merging
	  int a[];
	  and
	  int a[]={1,2,3};
	  here the first declaration is COMMON or EXTERN
	  and sizeof(a) == sizeof (int).  */
       if (!common_or_extern
	   || TREE_CODE (type) != ARRAY_TYPE
	   || TYPE_SIZE (type) != TYPE_SIZE (TREE_TYPE (type)))
       lev |= 1;
    }

  /* Verify TBAA compatibility.  Take care of alias set 0 and the fact that
     we make ptr_type_node to TBAA compatible with every other type.  */
  if (type_with_alias_set_p (type) && type_with_alias_set_p (prevailing_type))
    {
      alias_set_type set1 = get_alias_set (type);
      alias_set_type set2 = get_alias_set (prevailing_type);

      if (set1 && set2 && set1 != set2 
          && (!POINTER_TYPE_P (type) || !POINTER_TYPE_P (prevailing_type)
	      || (set1 != TYPE_ALIAS_SET (ptr_type_node)
		  && set2 != TYPE_ALIAS_SET (ptr_type_node))))
        lev |= 5;
    }

  return lev;
}
static void
build_constructors (gimple swtch)
{
  unsigned i, branch_num = gimple_switch_num_labels (swtch);
  tree pos = info.range_min;

  for (i = 1; i < branch_num; i++)
    {
      tree cs = gimple_switch_label (swtch, i);
      basic_block bb = label_to_block (CASE_LABEL (cs));
      edge e;
      tree high;
      gimple_stmt_iterator gsi;
      int j;

      if (bb == info.final_bb)
	e = find_edge (info.switch_bb, bb);
      else
	e = single_succ_edge (bb);
      gcc_assert (e);

      while (tree_int_cst_lt (pos, CASE_LOW (cs)))
	{
	  int k;
	  for (k = 0; k < info.phi_count; k++)
	    {
	      constructor_elt *elt;

	      elt = VEC_quick_push (constructor_elt,
				    info.constructors[k], NULL);
	      elt->index = int_const_binop (MINUS_EXPR, pos,
					    info.range_min, 0);
	      elt->value = info.default_values[k];
	    }

	  pos = int_const_binop (PLUS_EXPR, pos, integer_one_node, 0);
	}
      gcc_assert (tree_int_cst_equal (pos, CASE_LOW (cs)));

      j = 0;
      if (CASE_HIGH (cs))
	high = CASE_HIGH (cs);
      else
	high = CASE_LOW (cs);
      for (gsi = gsi_start_phis (info.final_bb);
	   !gsi_end_p (gsi); gsi_next (&gsi))
	{
	  gimple phi = gsi_stmt (gsi);
	  tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
	  tree low = CASE_LOW (cs);
	  pos = CASE_LOW (cs);

	  do 
	    {
	      constructor_elt *elt;

	      elt = VEC_quick_push (constructor_elt,
				    info.constructors[j], NULL);
	      elt->index = int_const_binop (MINUS_EXPR, pos, info.range_min, 0);
	      elt->value = val;

	      pos = int_const_binop (PLUS_EXPR, pos, integer_one_node, 0);
	    } while (!tree_int_cst_lt (high, pos) && tree_int_cst_lt (low, pos));
	  j++;
	}
    }
}
Ejemplo n.º 11
0
static tree
gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
		  gfc_omp_clauses *do_clauses)
{
  gfc_se se;
  tree dovar, stmt, from, to, step, type, init, cond, incr;
  tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
  stmtblock_t block;
  stmtblock_t body;
  int simple = 0;
  bool dovar_found = false;
  gfc_omp_clauses *clauses = code->ext.omp_clauses;

  code = code->block->next;
  gcc_assert (code->op == EXEC_DO);

  if (pblock == NULL)
    {
      gfc_start_block (&block);
      pblock = &block;
    }

  omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
  if (clauses)
    {
      gfc_namelist *n;
      for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL; n = n->next)
	if (code->ext.iterator->var->symtree->n.sym == n->sym)
	  break;
      if (n == NULL)
	for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
	  if (code->ext.iterator->var->symtree->n.sym == n->sym)
	    break;
      if (n != NULL)
	dovar_found = true;
    }

  /* Evaluate all the expressions in the iterator.  */
  gfc_init_se (&se, NULL);
  gfc_conv_expr_lhs (&se, code->ext.iterator->var);
  gfc_add_block_to_block (pblock, &se.pre);
  dovar = se.expr;
  type = TREE_TYPE (dovar);
  gcc_assert (TREE_CODE (type) == INTEGER_TYPE);

  gfc_init_se (&se, NULL);
  gfc_conv_expr_val (&se, code->ext.iterator->start);
  gfc_add_block_to_block (pblock, &se.pre);
  from = gfc_evaluate_now (se.expr, pblock);

  gfc_init_se (&se, NULL);
  gfc_conv_expr_val (&se, code->ext.iterator->end);
  gfc_add_block_to_block (pblock, &se.pre);
  to = gfc_evaluate_now (se.expr, pblock);

  gfc_init_se (&se, NULL);
  gfc_conv_expr_val (&se, code->ext.iterator->step);
  gfc_add_block_to_block (pblock, &se.pre);
  step = gfc_evaluate_now (se.expr, pblock);

  /* Special case simple loops.  */
  if (integer_onep (step))
    simple = 1;
  else if (tree_int_cst_equal (step, integer_minus_one_node))
    simple = -1;

  /* Loop body.  */
  if (simple)
    {
      init = build2_v (MODIFY_EXPR, dovar, from);
      cond = build2 (simple > 0 ? LE_EXPR : GE_EXPR, boolean_type_node,
		     dovar, to);
      incr = fold_build2 (PLUS_EXPR, type, dovar, step);
      incr = fold_build2 (MODIFY_EXPR, type, dovar, incr);
      if (pblock != &block)
	{
	  pushlevel (0);
	  gfc_start_block (&block);
	}
      gfc_start_block (&body);
    }
  else
    {
      /* STEP is not 1 or -1.  Use:
	 for (count = 0; count < (to + step - from) / step; count++)
	   {
	     dovar = from + count * step;
	     body;
	   cycle_label:;
	   }  */
      tmp = fold_build2 (MINUS_EXPR, type, step, from);
      tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
      tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
      tmp = gfc_evaluate_now (tmp, pblock);
      count = gfc_create_var (type, "count");
      init = build2_v (MODIFY_EXPR, count, build_int_cst (type, 0));
      cond = build2 (LT_EXPR, boolean_type_node, count, tmp);
      incr = fold_build2 (PLUS_EXPR, type, count, build_int_cst (type, 1));
      incr = fold_build2 (MODIFY_EXPR, type, count, incr);

      if (pblock != &block)
	{
	  pushlevel (0);
	  gfc_start_block (&block);
	}
      gfc_start_block (&body);

      /* Initialize DOVAR.  */
      tmp = fold_build2 (MULT_EXPR, type, count, step);
      tmp = build2 (PLUS_EXPR, type, from, tmp);
      gfc_add_modify_expr (&body, dovar, tmp);
    }

  if (!dovar_found)
    {
      tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
      OMP_CLAUSE_DECL (tmp) = dovar;
      omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
    }
  if (!simple)
    {
      tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
      OMP_CLAUSE_DECL (tmp) = count;
      omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
    }

  /* Cycle statement is implemented with a goto.  Exit statement must not be
     present for this loop.  */
  cycle_label = gfc_build_label_decl (NULL_TREE);

  /* Put these labels where they can be found later. We put the
     labels in a TREE_LIST node (because TREE_CHAIN is already
     used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
     label in TREE_VALUE (backend_decl).  */

  code->block->backend_decl = tree_cons (cycle_label, NULL, NULL);

  /* Main loop body.  */
  tmp = gfc_trans_omp_code (code->block->next, true);
  gfc_add_expr_to_block (&body, tmp);

  /* Label for cycle statements (if needed).  */
  if (TREE_USED (cycle_label))
    {
      tmp = build1_v (LABEL_EXPR, cycle_label);
      gfc_add_expr_to_block (&body, tmp);
    }

  /* End of loop body.  */
  stmt = make_node (OMP_FOR);

  TREE_TYPE (stmt) = void_type_node;
  OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
  OMP_FOR_CLAUSES (stmt) = omp_clauses;
  OMP_FOR_INIT (stmt) = init;
  OMP_FOR_COND (stmt) = cond;
  OMP_FOR_INCR (stmt) = incr;
  gfc_add_expr_to_block (&block, stmt);

  return gfc_finish_block (&block);
}
Ejemplo n.º 12
0
tree
make_thunk (tree function, bool this_adjusting,
            tree fixed_offset, tree virtual_offset)
{
    HOST_WIDE_INT d;
    tree thunk;

    gcc_assert (TREE_CODE (function) == FUNCTION_DECL);
    /* We can have this thunks to covariant thunks, but not vice versa.  */
    gcc_assert (!DECL_THIS_THUNK_P (function));
    gcc_assert (!DECL_RESULT_THUNK_P (function) || this_adjusting);

    /* Scale the VIRTUAL_OFFSET to be in terms of bytes.  */
    if (this_adjusting && virtual_offset)
        virtual_offset
            = size_binop (MULT_EXPR,
                          virtual_offset,
                          convert (ssizetype,
                                   TYPE_SIZE_UNIT (vtable_entry_type)));

    d = tree_low_cst (fixed_offset, 0);

    /* See if we already have the thunk in question.  For this_adjusting
       thunks VIRTUAL_OFFSET will be an INTEGER_CST, for covariant thunks it
       will be a BINFO.  */
    for (thunk = DECL_THUNKS (function); thunk; thunk = TREE_CHAIN (thunk))
        if (DECL_THIS_THUNK_P (thunk) == this_adjusting
                && THUNK_FIXED_OFFSET (thunk) == d
                && !virtual_offset == !THUNK_VIRTUAL_OFFSET (thunk)
                && (!virtual_offset
                    || (this_adjusting
                        ? tree_int_cst_equal (THUNK_VIRTUAL_OFFSET (thunk),
                                              virtual_offset)
                        : THUNK_VIRTUAL_OFFSET (thunk) == virtual_offset)))
            return thunk;

    /* All thunks must be created before FUNCTION is actually emitted;
       the ABI requires that all thunks be emitted together with the
       function to which they transfer control.  */
    gcc_assert (!TREE_ASM_WRITTEN (function));
    /* Likewise, we can only be adding thunks to a function declared in
       the class currently being laid out.  */
    gcc_assert (TYPE_SIZE (DECL_CONTEXT (function))
                && TYPE_BEING_DEFINED (DECL_CONTEXT (function)));

    thunk = build_decl (FUNCTION_DECL, NULL_TREE, TREE_TYPE (function));
    DECL_LANG_SPECIFIC (thunk) = DECL_LANG_SPECIFIC (function);
    cxx_dup_lang_specific_decl (thunk);
    DECL_THUNKS (thunk) = NULL_TREE;

    DECL_CONTEXT (thunk) = DECL_CONTEXT (function);
    TREE_READONLY (thunk) = TREE_READONLY (function);
    TREE_THIS_VOLATILE (thunk) = TREE_THIS_VOLATILE (function);
    TREE_PUBLIC (thunk) = TREE_PUBLIC (function);
    SET_DECL_THUNK_P (thunk, this_adjusting);
    THUNK_TARGET (thunk) = function;
    THUNK_FIXED_OFFSET (thunk) = d;
    THUNK_VIRTUAL_OFFSET (thunk) = virtual_offset;
    THUNK_ALIAS (thunk) = NULL_TREE;

    /* The thunk itself is not a constructor or destructor, even if
       the thing it is thunking to is.  */
    DECL_INTERFACE_KNOWN (thunk) = 1;
    DECL_NOT_REALLY_EXTERN (thunk) = 1;
    DECL_SAVED_FUNCTION_DATA (thunk) = NULL;
    DECL_DESTRUCTOR_P (thunk) = 0;
    DECL_CONSTRUCTOR_P (thunk) = 0;
    DECL_EXTERNAL (thunk) = 1;
    DECL_ARTIFICIAL (thunk) = 1;
    /* Even if this thunk is a member of a local class, we don't
       need a static chain.  */
    DECL_NO_STATIC_CHAIN (thunk) = 1;
    /* The THUNK is not a pending inline, even if the FUNCTION is.  */
    DECL_PENDING_INLINE_P (thunk) = 0;
    DECL_INLINE (thunk) = 0;
    DECL_DECLARED_INLINE_P (thunk) = 0;
    /* Nor has it been deferred.  */
    DECL_DEFERRED_FN (thunk) = 0;
    /* Nor is it a template instantiation.  */
    DECL_USE_TEMPLATE (thunk) = 0;
    DECL_TEMPLATE_INFO (thunk) = NULL;

    /* Add it to the list of thunks associated with FUNCTION.  */
    TREE_CHAIN (thunk) = DECL_THUNKS (function);
    DECL_THUNKS (function) = thunk;

    return thunk;
}
Ejemplo n.º 13
0
static bool
check_process_case (tree cs, struct switch_conv_info *info)
{
  tree ldecl;
  basic_block label_bb, following_bb;
  edge e;

  ldecl = CASE_LABEL (cs);
  label_bb = label_to_block (ldecl);

  e = find_edge (info->switch_bb, label_bb);
  gcc_assert (e);

  if (CASE_LOW (cs) == NULL_TREE)
    {
      /* Default branch.  */
      info->default_prob = e->probability;
      info->default_count = e->count;
    }
  else
    {
      int i;
      info->other_count += e->count;
      for (i = 0; i < 2; i++)
	if (info->bit_test_bb[i] == label_bb)
	  break;
	else if (info->bit_test_bb[i] == NULL)
	  {
	    info->bit_test_bb[i] = label_bb;
	    info->bit_test_uniq++;
	    break;
	  }
      if (i == 2)
	info->bit_test_uniq = 3;
      if (CASE_HIGH (cs) != NULL_TREE
	  && ! tree_int_cst_equal (CASE_LOW (cs), CASE_HIGH (cs)))
	info->bit_test_count += 2;
      else
	info->bit_test_count++;
    }

  if (!label_bb)
    {
      info->reason = "bad case - cs BB  label is NULL";
      return false;
    }

  if (!single_pred_p (label_bb))
    {
      if (info->final_bb && info->final_bb != label_bb)
	{
	  info->reason = "bad case - a non-final BB has two predecessors";
	  return false; /* sth complex going on in this branch  */
	}

      following_bb = label_bb;
    }
  else
    {
      if (!empty_block_p (label_bb))
	{
	  info->reason = "bad case - a non-final BB not empty";
	  return false;
	}

      e = single_succ_edge (label_bb);
      following_bb = single_succ (label_bb);
    }

  if (!info->final_bb)
    info->final_bb = following_bb;
  else if (info->final_bb != following_bb)
    {
      info->reason = "bad case - different final BB";
      return false; /* the only successor is not common for all the branches */
    }

  return true;
}
Ejemplo n.º 14
0
static tree
gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
		  gfc_omp_clauses *do_clauses, tree par_clauses)
{
  gfc_se se;
  tree dovar, stmt, from, to, step, type, init, cond, incr;
  tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
  stmtblock_t block;
  stmtblock_t body;
  gfc_omp_clauses *clauses = code->ext.omp_clauses;
  int i, collapse = clauses->collapse;
  tree dovar_init = NULL_TREE;

  if (collapse <= 0)
    collapse = 1;

  code = code->block->next;
  gcc_assert (code->op == EXEC_DO);

  init = make_tree_vec (collapse);
  cond = make_tree_vec (collapse);
  incr = make_tree_vec (collapse);

  if (pblock == NULL)
    {
      gfc_start_block (&block);
      pblock = &block;
    }

  omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);

  for (i = 0; i < collapse; i++)
    {
      int simple = 0;
      int dovar_found = 0;
      tree dovar_decl;

      if (clauses)
	{
	  gfc_namelist *n;
	  for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
	       n = n->next)
	    if (code->ext.iterator->var->symtree->n.sym == n->sym)
	      break;
	  if (n != NULL)
	    dovar_found = 1;
	  else if (n == NULL)
	    for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
	      if (code->ext.iterator->var->symtree->n.sym == n->sym)
		break;
	  if (n != NULL)
	    dovar_found++;
	}

      /* Evaluate all the expressions in the iterator.  */
      gfc_init_se (&se, NULL);
      gfc_conv_expr_lhs (&se, code->ext.iterator->var);
      gfc_add_block_to_block (pblock, &se.pre);
      dovar = se.expr;
      type = TREE_TYPE (dovar);
      gcc_assert (TREE_CODE (type) == INTEGER_TYPE);

      gfc_init_se (&se, NULL);
      gfc_conv_expr_val (&se, code->ext.iterator->start);
      gfc_add_block_to_block (pblock, &se.pre);
      from = gfc_evaluate_now (se.expr, pblock);

      gfc_init_se (&se, NULL);
      gfc_conv_expr_val (&se, code->ext.iterator->end);
      gfc_add_block_to_block (pblock, &se.pre);
      to = gfc_evaluate_now (se.expr, pblock);

      gfc_init_se (&se, NULL);
      gfc_conv_expr_val (&se, code->ext.iterator->step);
      gfc_add_block_to_block (pblock, &se.pre);
      step = gfc_evaluate_now (se.expr, pblock);
      dovar_decl = dovar;

      /* Special case simple loops.  */
      if (TREE_CODE (dovar) == VAR_DECL)
	{
	  if (integer_onep (step))
	    simple = 1;
	  else if (tree_int_cst_equal (step, integer_minus_one_node))
	    simple = -1;
	}
      else
	dovar_decl
	  = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym);

      /* Loop body.  */
      if (simple)
	{
	  TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
	  TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR,
						boolean_type_node, dovar, to);
	  TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step);
	  TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, dovar,
						TREE_VEC_ELT (incr, i));
	}
      else
	{
	  /* STEP is not 1 or -1.  Use:
	     for (count = 0; count < (to + step - from) / step; count++)
	       {
		 dovar = from + count * step;
		 body;
	       cycle_label:;
	       }  */
	  tmp = fold_build2 (MINUS_EXPR, type, step, from);
	  tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
	  tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
	  tmp = gfc_evaluate_now (tmp, pblock);
	  count = gfc_create_var (type, "count");
	  TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
					     build_int_cst (type, 0));
	  TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node,
						count, tmp);
	  TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count,
						build_int_cst (type, 1));
	  TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type,
						count, TREE_VEC_ELT (incr, i));

	  /* Initialize DOVAR.  */
	  tmp = fold_build2 (MULT_EXPR, type, count, step);
	  tmp = fold_build2 (PLUS_EXPR, type, from, tmp);
	  dovar_init = tree_cons (dovar, tmp, dovar_init);
	}

      if (!dovar_found)
	{
	  tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
	  OMP_CLAUSE_DECL (tmp) = dovar_decl;
	  omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
	}
      else if (dovar_found == 2)
	{
	  tree c = NULL;

	  tmp = NULL;
	  if (!simple)
	    {
	      /* If dovar is lastprivate, but different counter is used,
		 dovar += step needs to be added to
		 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
		 will have the value on entry of the last loop, rather
		 than value after iterator increment.  */
	      tmp = gfc_evaluate_now (step, pblock);
	      tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp);
	      tmp = fold_build2 (MODIFY_EXPR, type, dovar, tmp);
	      for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
		    && OMP_CLAUSE_DECL (c) == dovar_decl)
		  {
		    OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
		    break;
		  }
	    }
	  if (c == NULL && par_clauses != NULL)
	    {
	      for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
		    && OMP_CLAUSE_DECL (c) == dovar_decl)
		  {
		    tree l = build_omp_clause (input_location,
					       OMP_CLAUSE_LASTPRIVATE);
		    OMP_CLAUSE_DECL (l) = dovar_decl;
		    OMP_CLAUSE_CHAIN (l) = omp_clauses;
		    OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
		    omp_clauses = l;
		    OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
		    break;
		  }
	    }
	  gcc_assert (simple || c != NULL);
	}
      if (!simple)
	{
	  tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
	  OMP_CLAUSE_DECL (tmp) = count;
	  omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
	}

      if (i + 1 < collapse)
	code = code->block->next;
    }

  if (pblock != &block)
    {
      pushlevel (0);
      gfc_start_block (&block);
    }

  gfc_start_block (&body);

  dovar_init = nreverse (dovar_init);
  while (dovar_init)
    {
      gfc_add_modify (&body, TREE_PURPOSE (dovar_init),
			   TREE_VALUE (dovar_init));
      dovar_init = TREE_CHAIN (dovar_init);
    }

  /* Cycle statement is implemented with a goto.  Exit statement must not be
     present for this loop.  */
  cycle_label = gfc_build_label_decl (NULL_TREE);

  /* Put these labels where they can be found later. We put the
     labels in a TREE_LIST node (because TREE_CHAIN is already
     used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
     label in TREE_VALUE (backend_decl).  */

  code->block->backend_decl = tree_cons (cycle_label, NULL, NULL);

  /* Main loop body.  */
  tmp = gfc_trans_omp_code (code->block->next, true);
  gfc_add_expr_to_block (&body, tmp);

  /* Label for cycle statements (if needed).  */
  if (TREE_USED (cycle_label))
    {
      tmp = build1_v (LABEL_EXPR, cycle_label);
      gfc_add_expr_to_block (&body, tmp);
    }

  /* End of loop body.  */
  stmt = make_node (OMP_FOR);

  TREE_TYPE (stmt) = void_type_node;
  OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
  OMP_FOR_CLAUSES (stmt) = omp_clauses;
  OMP_FOR_INIT (stmt) = init;
  OMP_FOR_COND (stmt) = cond;
  OMP_FOR_INCR (stmt) = incr;
  gfc_add_expr_to_block (&block, stmt);

  return gfc_finish_block (&block);
}