Exemple #1
0
static bool
gimple_check_call_args (gimple stmt, tree fndecl, bool args_count_match)
{
    tree parms, p;
    unsigned int i, nargs;

    /* Calls to internal functions always match their signature.  */
    if (gimple_call_internal_p (stmt))
        return true;

    nargs = gimple_call_num_args (stmt);

    /* Get argument types for verification.  */
    if (fndecl)
        parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
    else
        parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));

    /* Verify if the type of the argument matches that of the function
       declaration.  If we cannot verify this or there is a mismatch,
       return false.  */
    if (fndecl && DECL_ARGUMENTS (fndecl))
    {
        for (i = 0, p = DECL_ARGUMENTS (fndecl);
                i < nargs;
                i++, p = DECL_CHAIN (p))
        {
            tree arg;
            /* We cannot distinguish a varargs function from the case
               of excess parameters, still deferring the inlining decision
               to the callee is possible.  */
            if (!p)
                break;
            arg = gimple_call_arg (stmt, i);
            if (p == error_mark_node
                    || arg == error_mark_node
                    || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
                        && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
                return false;
        }
        if (args_count_match && p)
            return false;
    }
    else if (parms)
    {
        for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
        {
            tree arg;
            /* If this is a varargs function defer inlining decision
               to callee.  */
            if (!p)
                break;
            arg = gimple_call_arg (stmt, i);
            if (TREE_VALUE (p) == error_mark_node
                    || arg == error_mark_node
                    || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
                    || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
                        && !fold_convertible_p (TREE_VALUE (p), arg)))
                return false;
        }
    }
    else
    {
        if (nargs != 0)
            return false;
    }
    return true;
}
Exemple #2
0
static rtx
aarch64_simd_expand_args (rtx target, int icode, int have_retval,
			  tree exp, builtin_simd_arg *args,
			  enum machine_mode builtin_mode)
{
  rtx pat;
  rtx op[SIMD_MAX_BUILTIN_ARGS + 1]; /* First element for result operand.  */
  int opc = 0;

  if (have_retval)
    {
      machine_mode tmode = insn_data[icode].operand[0].mode;
      if (!target
	  || GET_MODE (target) != tmode
	  || !(*insn_data[icode].operand[0].predicate) (target, tmode))
	target = gen_reg_rtx (tmode);
      op[opc++] = target;
    }

  for (;;)
    {
      builtin_simd_arg thisarg = args[opc - have_retval];

      if (thisarg == SIMD_ARG_STOP)
	break;
      else
	{
	  tree arg = CALL_EXPR_ARG (exp, opc - have_retval);
	  enum machine_mode mode = insn_data[icode].operand[opc].mode;
	  op[opc] = expand_normal (arg);

	  switch (thisarg)
	    {
	    case SIMD_ARG_COPY_TO_REG:
	      if (POINTER_TYPE_P (TREE_TYPE (arg)))
		op[opc] = convert_memory_address (Pmode, op[opc]);
	      /*gcc_assert (GET_MODE (op[opc]) == mode); */
	      if (!(*insn_data[icode].operand[opc].predicate)
		  (op[opc], mode))
		op[opc] = copy_to_mode_reg (mode, op[opc]);
	      break;

	    case SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX:
	      gcc_assert (opc > 1);
	      if (CONST_INT_P (op[opc]))
		{
		  aarch64_simd_lane_bounds (op[opc], 0,
					    GET_MODE_NUNITS (builtin_mode),
					    exp);
		  /* Keep to GCC-vector-extension lane indices in the RTL.  */
		  op[opc] =
		    GEN_INT (ENDIAN_LANE_N (builtin_mode, INTVAL (op[opc])));
		}
	      goto constant_arg;

	    case SIMD_ARG_LANE_INDEX:
	      /* Must be a previous operand into which this is an index.  */
	      gcc_assert (opc > 0);
	      if (CONST_INT_P (op[opc]))
		{
		  machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
		  aarch64_simd_lane_bounds (op[opc],
					    0, GET_MODE_NUNITS (vmode), exp);
		  /* Keep to GCC-vector-extension lane indices in the RTL.  */
		  op[opc] = GEN_INT (ENDIAN_LANE_N (vmode, INTVAL (op[opc])));
		}
	      /* Fall through - if the lane index isn't a constant then
		 the next case will error.  */
	    case SIMD_ARG_CONSTANT:
constant_arg:
	      if (!(*insn_data[icode].operand[opc].predicate)
		  (op[opc], mode))
	      {
		error ("%Kargument %d must be a constant immediate",
		       exp, opc + 1 - have_retval);
		return const0_rtx;
	      }
	      break;

	    case SIMD_ARG_STOP:
	      gcc_unreachable ();
	    }

	  opc++;
	}
    }

  switch (opc)
    {
    case 1:
      pat = GEN_FCN (icode) (op[0]);
      break;

    case 2:
      pat = GEN_FCN (icode) (op[0], op[1]);
      break;

    case 3:
      pat = GEN_FCN (icode) (op[0], op[1], op[2]);
      break;

    case 4:
      pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
      break;

    case 5:
      pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
      break;

    case 6:
      pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
      break;

    default:
      gcc_unreachable ();
    }

  if (!pat)
    return NULL_RTX;

  emit_insn (pat);

  return target;
}
Exemple #3
0
bool
gcc_cp_dump_tree (void* dump_info, tree t)
{
  enum tree_code code;
  dump_info_p di = (dump_info_p) dump_info;

  /* Figure out what kind of node this is.  */
  code = TREE_CODE (t);

  if (DECL_P (t))
    {
      if (DECL_LANG_SPECIFIC (t) && DECL_LANGUAGE (t) != lang_cplusplus)
	gcc_dump_string_field (di, "lang", language_to_string (DECL_LANGUAGE (t)));
    }

  switch (code)
    {
    case IDENTIFIER_NODE:
      if (IDENTIFIER_OPNAME_P (t))
	{
	  gcc_dump_string_field (di, "note", "operator");
	  return true;
	}
      else if (IDENTIFIER_TYPENAME_P (t))
	{
	  dump_child ("tynm", TREE_TYPE (t));
	  return true;
	}
      break;

    case OFFSET_TYPE:
      gcc_dump_string_field (di, "note", "ptrmem");
      dump_child ("ptd", TYPE_PTRMEM_POINTED_TO_TYPE (t));
      dump_child ("cls", TYPE_PTRMEM_CLASS_TYPE (t));
      return true;

    case RECORD_TYPE:
      if (TYPE_PTRMEMFUNC_P (t))
	{
	  gcc_dump_string_field (di, "note", "ptrmem");
	  dump_child ("ptd", TYPE_PTRMEM_POINTED_TO_TYPE (t));
	  dump_child ("cls", TYPE_PTRMEM_CLASS_TYPE (t));
	  return true;
	}
      /* Fall through.  */

    case UNION_TYPE:
      /* Is it a type used as a base? */
      if (TYPE_CONTEXT (t) && TREE_CODE (TYPE_CONTEXT (t)) == TREE_CODE (t)
	  && CLASSTYPE_AS_BASE (TYPE_CONTEXT (t)) == t)
	{
	  dump_child ("bfld", TYPE_CONTEXT (t));
	  return true;
	}

      if (! MAYBE_CLASS_TYPE_P (t))
	break;

      dump_child ("vfld", TYPE_VFIELD (t));
      if (CLASSTYPE_TEMPLATE_SPECIALIZATION(t))
	gcc_dump_string(di, "spec");

      if (!dump_flag (di, TDF_SLIM, t) && TYPE_BINFO (t))
	{
	  int i;
	  tree binfo;
	  tree base_binfo;

	  for (binfo = TYPE_BINFO (t), i = 0;
	       BINFO_BASE_ITERATE (binfo, i, base_binfo); ++i)
	    {
	      dump_child ("base", BINFO_TYPE (base_binfo));
	      if (BINFO_VIRTUAL_P (base_binfo))
		gcc_dump_string_field (di, "spec", "virt");
	      dump_access (di, base_binfo);
	    }
	}
      break;

    case FIELD_DECL:
      dump_access (di, t);
      if (DECL_MUTABLE_P (t))
	gcc_dump_string_field (di, "spec", "mutable");
      break;

    case VAR_DECL:
      if (TREE_CODE (CP_DECL_CONTEXT (t)) == RECORD_TYPE)
	dump_access (di, t);
      if (TREE_STATIC (t) && !TREE_PUBLIC (t))
	gcc_dump_string_field (di, "link", "static");
      break;

    case FUNCTION_DECL:
      if (!DECL_THUNK_P (t))
	{
	  if (DECL_OVERLOADED_OPERATOR_P (t)) {
	    gcc_dump_string_field (di, "note", "operator");
	    dump_op (di, t);
	  }
	  if (DECL_FUNCTION_MEMBER_P (t))
	    {
	      gcc_dump_string_field (di, "note", "member");
	      dump_access (di, t);
	    }
	  if (DECL_PURE_VIRTUAL_P (t))
	    gcc_dump_string_field (di, "spec", "pure");
	  if (DECL_VIRTUAL_P (t))
	    gcc_dump_string_field (di, "spec", "virt");
	  if (DECL_CONSTRUCTOR_P (t))
	    gcc_dump_string_field (di, "note", "constructor");
	  if (DECL_DESTRUCTOR_P (t))
	    gcc_dump_string_field (di, "note", "destructor");
	  if (DECL_CONV_FN_P (t))
	    gcc_dump_string_field (di, "note", "conversion");
	  if (DECL_GLOBAL_CTOR_P (t))
	    gcc_dump_string_field (di, "note", "global init");
	  if (DECL_GLOBAL_DTOR_P (t))
	    gcc_dump_string_field (di, "note", "global fini");
	  if (DECL_FRIEND_PSEUDO_TEMPLATE_INSTANTIATION (t))
	    gcc_dump_string_field (di, "note", "pseudo tmpl");
	}
      else
	{
	  tree virt = THUNK_VIRTUAL_OFFSET (t);

	  gcc_dump_string_field (di, "note", "thunk");
	  if (DECL_THIS_THUNK_P (t))
	    gcc_dump_string_field (di, "note", "this adjusting");
	  else
	    {
	      gcc_dump_string_field (di, "note", "result adjusting");
	      if (virt)
		virt = BINFO_VPTR_FIELD (virt);
	    }
	  gcc_dump_int (di, "fixd", THUNK_FIXED_OFFSET (t));
	  if (virt)
	    gcc_dump_int (di, "virt", tree_low_cst (virt, 0));
	  dump_child ("fn", DECL_INITIAL (t));
	}
      break;

    case NAMESPACE_DECL:
      if (DECL_NAMESPACE_ALIAS (t))
	dump_child ("alis", DECL_NAMESPACE_ALIAS (t));
      else if (!dump_flag (di, TDF_SLIM, t))
	dump_child ("dcls", cp_namespace_decls (t));
      break;

    case TEMPLATE_DECL:
      dump_child ("rslt", DECL_TEMPLATE_RESULT (t));
      dump_child ("inst", DECL_TEMPLATE_INSTANTIATIONS (t));
      dump_child ("spcs", DECL_TEMPLATE_SPECIALIZATIONS (t));
      dump_child ("prms", DECL_TEMPLATE_PARMS (t));
      break;

    case OVERLOAD:
      dump_child ("crnt", OVL_CURRENT (t));
      dump_child ("chan", OVL_CHAIN (t));
      break;

    case TRY_BLOCK:
      gcc_dump_stmt (di, t);
      if (CLEANUP_P (t))
	dump_string_field (di, "note", "cleanup");
      dump_child ("body", TRY_STMTS (t));
      dump_child ("hdlr", TRY_HANDLERS (t));
      break;

    case EH_SPEC_BLOCK:
      gcc_dump_stmt (di, t);
      dump_child ("body", EH_SPEC_STMTS (t));
      dump_child ("raises", EH_SPEC_RAISES (t));
      break;

    case PTRMEM_CST:
      dump_child ("clas", PTRMEM_CST_CLASS (t));
      dump_child ("mbr", PTRMEM_CST_MEMBER (t));
      break;

    case THROW_EXPR:
      /* These nodes are unary, but do not have code class `1'.  */
      dump_child ("op 0", TREE_OPERAND (t, 0));
      break;

//    case AGGR_INIT_EXPR:
//      {
//	int i = 0;
//	tree arg;
//	aggr_init_expr_arg_iterator iter;
//	gcc_dump_int (di, "ctor", AGGR_INIT_VIA_CTOR_P (t));
//	dump_child ("fn", AGGR_INIT_EXPR_FN (t));
//	FOR_EACH_AGGR_INIT_EXPR_ARG (arg, iter, t)
//	  {
//	    char buffer[32];
//	    sprintf (buffer, "%u", i);
//	    dump_child (buffer, arg);
//	    i++;
//	  }
//	dump_child ("decl", AGGR_INIT_EXPR_SLOT (t));
//      }
//      break;

    case HANDLER:
      gcc_dump_stmt (di, t);
      dump_child ("parm", HANDLER_PARMS (t));
      dump_child ("body", HANDLER_BODY (t));
      break;

    case MUST_NOT_THROW_EXPR:
      gcc_dump_stmt (di, t);
      dump_child ("body", TREE_OPERAND (t, 0));
      break;

    case USING_STMT:
      gcc_dump_stmt (di, t);
      dump_child ("nmsp", USING_STMT_NAMESPACE (t));
      break;

    case CLEANUP_STMT:
      gcc_dump_stmt (di, t);
      dump_child ("decl", CLEANUP_DECL (t));
      dump_child ("expr", CLEANUP_EXPR (t));
      dump_child ("body", CLEANUP_BODY (t));
      break;

    case IF_STMT:
      gcc_dump_stmt (di, t);
      dump_child ("cond", IF_COND (t));
      dump_child ("then", THEN_CLAUSE (t));
      dump_child ("else", ELSE_CLAUSE (t));
      break;

    case BREAK_STMT:
    case CONTINUE_STMT:
      gcc_dump_stmt (di, t);
      break;

    case DO_STMT:
      gcc_dump_stmt (di, t);
      dump_child ("body", DO_BODY (t));
      dump_child ("cond", DO_COND (t));
      break;

    case FOR_STMT:
      gcc_dump_stmt (di, t);
      dump_child ("init", FOR_INIT_STMT (t));
      dump_child ("cond", FOR_COND (t));
      dump_child ("expr", FOR_EXPR (t));
      dump_child ("body", FOR_BODY (t));
      break;

    case RANGE_FOR_STMT:
      gcc_dump_stmt (di, t);
      dump_child ("decl", RANGE_FOR_DECL (t));
      dump_child ("expr", RANGE_FOR_EXPR (t));
      dump_child ("body", RANGE_FOR_BODY (t));
      break;

    case SWITCH_STMT:
      gcc_dump_stmt (di, t);
      dump_child ("cond", SWITCH_STMT_COND (t));
      dump_child ("body", SWITCH_STMT_BODY (t));
      break;

    case WHILE_STMT:
      gcc_dump_stmt (di, t);
      dump_child ("cond", WHILE_COND (t));
      dump_child ("body", WHILE_BODY (t));
      break;

    case STMT_EXPR:
      dump_child ("stmt", STMT_EXPR_STMT (t));
      break;

    case EXPR_STMT:
      gcc_dump_stmt (di, t);
      dump_child ("expr", EXPR_STMT_EXPR (t));
      break;

    default:
      break;
    }

  return gcc_c_dump_tree (di, t);
}
Exemple #4
0
static gimple_seq
gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
{
  gimple top, entry, stmt;
  gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
  tree cont_block, break_block;
  location_t stmt_locus;

  stmt_locus = input_location;
  stmt_list = NULL;
  body_seq = NULL;
  incr_seq = NULL;
  exit_seq = NULL;
  entry = NULL;

  break_block = begin_bc_block (bc_break);
  cont_block = begin_bc_block (bc_continue);

  /* If condition is zero don't generate a loop construct.  */
  if (cond && integer_zerop (cond))
    {
      top = NULL;
      if (cond_is_first)
	{
	  stmt = gimple_build_goto (get_bc_label (bc_break));
	  gimple_set_location (stmt, stmt_locus);
	  gimple_seq_add_stmt (&stmt_list, stmt);
	}
    }
  else
    {
      /* If we use a LOOP_EXPR here, we have to feed the whole thing
	 back through the main gimplifier to lower it.  Given that we
	 have to gimplify the loop body NOW so that we can resolve
	 break/continue stmts, seems easier to just expand to gotos.  */
      top = gimple_build_label (create_artificial_label (stmt_locus));

      /* If we have an exit condition, then we build an IF with gotos either
	 out of the loop, or to the top of it.  If there's no exit condition,
	 then we just build a jump back to the top.  */
      if (cond && !integer_nonzerop (cond))
	{
	  if (cond != error_mark_node)
	    { 
	      gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
	      stmt = gimple_build_cond (NE_EXPR, cond,
					build_int_cst (TREE_TYPE (cond), 0),
					gimple_label_label (top),
					get_bc_label (bc_break));
	      gimple_seq_add_stmt (&exit_seq, stmt);
	    }

	  if (cond_is_first)
	    {
	      if (incr)
		{
		  entry = gimple_build_label 
		    (create_artificial_label (stmt_locus));
		  stmt = gimple_build_goto (gimple_label_label (entry));
		}
	      else
		stmt = gimple_build_goto (get_bc_label (bc_continue));
	      gimple_set_location (stmt, stmt_locus);
	      gimple_seq_add_stmt (&stmt_list, stmt);
	    }
	}
      else
	{
	  stmt = gimple_build_goto (gimple_label_label (top));
	  gimple_seq_add_stmt (&exit_seq, stmt);
	}
    }

  gimplify_stmt (&body, &body_seq);
  gimplify_stmt (&incr, &incr_seq);

  body_seq = finish_bc_block (bc_continue, cont_block, body_seq);

  gimple_seq_add_stmt (&stmt_list, top);
  gimple_seq_add_seq (&stmt_list, body_seq);
  gimple_seq_add_seq (&stmt_list, incr_seq);
  gimple_seq_add_stmt (&stmt_list, entry);
  gimple_seq_add_seq (&stmt_list, exit_seq);

  annotate_all_with_location (stmt_list, stmt_locus);

  return finish_bc_block (bc_break, break_block, stmt_list);
}
Exemple #5
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
  tree stmt = *stmt_p;
  struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
  struct pointer_set_t *p_set = wtd->p_set;

  /* If in an OpenMP context, note var uses.  */
  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
      && (TREE_CODE (stmt) == VAR_DECL
	  || TREE_CODE (stmt) == PARM_DECL
	  || TREE_CODE (stmt) == RESULT_DECL)
      && omp_var_to_track (stmt))
    omp_cxx_notice_variable (wtd->omp_ctx, stmt);

  if (is_invisiref_parm (stmt)
      /* Don't dereference parms in a thunk, pass the references through. */
      && !(DECL_THUNK_P (current_function_decl)
	   && TREE_CODE (stmt) == PARM_DECL))
    {
      *stmt_p = convert_from_reference (stmt);
      *walk_subtrees = 0;
      return NULL;
    }

  /* Map block scope extern declarations to visible declarations with the
     same name and type in outer scopes if any.  */
  if (cp_function_chain->extern_decl_map
      && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
      && DECL_EXTERNAL (stmt))
    {
      struct cxx_int_tree_map *h, in;
      in.uid = DECL_UID (stmt);
      h = (struct cxx_int_tree_map *)
	  htab_find_with_hash (cp_function_chain->extern_decl_map,
			       &in, in.uid);
      if (h)
	{
	  *stmt_p = h->to;
	  *walk_subtrees = 0;
	  return NULL;
	}
    }

  /* Other than invisiref parms, don't walk the same tree twice.  */
  if (pointer_set_contains (p_set, stmt))
    {
      *walk_subtrees = 0;
      return NULL_TREE;
    }

  if (TREE_CODE (stmt) == ADDR_EXPR
      && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    {
      /* If in an OpenMP context, note var uses.  */
      if (__builtin_expect (wtd->omp_ctx != NULL, 0)
	  && omp_var_to_track (TREE_OPERAND (stmt, 0)))
	omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
      *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == RETURN_EXPR
	   && TREE_OPERAND (stmt, 0)
	   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
    *walk_subtrees = 0;
  else if (TREE_CODE (stmt) == OMP_CLAUSE)
    switch (OMP_CLAUSE_CODE (stmt))
      {
      case OMP_CLAUSE_LASTPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  {
	    *walk_subtrees = 0;
	    if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
	      cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
			    cp_genericize_r, data, NULL);
	  }
	break;
      case OMP_CLAUSE_PRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	else if (wtd->omp_ctx != NULL)
	  {
	    /* Private clause doesn't cause any references to the
	       var in outer contexts, avoid calling
	       omp_cxx_notice_variable for it.  */
	    struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
	    wtd->omp_ctx = NULL;
	    cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
			  data, NULL);
	    wtd->omp_ctx = old;
	    *walk_subtrees = 0;
	  }
	break;
      case OMP_CLAUSE_SHARED:
      case OMP_CLAUSE_FIRSTPRIVATE:
      case OMP_CLAUSE_COPYIN:
      case OMP_CLAUSE_COPYPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	break;
      case OMP_CLAUSE_REDUCTION:
	gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
	break;
      default:
	break;
      }
  else if (IS_TYPE_OR_DECL_P (stmt))
    *walk_subtrees = 0;

  /* Due to the way voidify_wrapper_expr is written, we don't get a chance
     to lower this construct before scanning it, so we need to lower these
     before doing anything else.  */
  else if (TREE_CODE (stmt) == CLEANUP_STMT)
    *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
					     : TRY_FINALLY_EXPR,
		      void_type_node,
		      CLEANUP_BODY (stmt),
		      CLEANUP_EXPR (stmt));

  else if (TREE_CODE (stmt) == IF_STMT)
    {
      genericize_if_stmt (stmt_p);
      /* *stmt_p has changed, tail recurse to handle it again.  */
      return cp_genericize_r (stmt_p, walk_subtrees, data);
    }

  /* COND_EXPR might have incompatible types in branches if one or both
     arms are bitfields.  Fix it up now.  */
  else if (TREE_CODE (stmt) == COND_EXPR)
    {
      tree type_left
	= (TREE_OPERAND (stmt, 1)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
	   : NULL_TREE);
      tree type_right
	= (TREE_OPERAND (stmt, 2)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
	   : NULL_TREE);
      if (type_left
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 1))))
	{
	  TREE_OPERAND (stmt, 1)
	    = fold_convert (type_left, TREE_OPERAND (stmt, 1));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_left));
	}
      if (type_right
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 2))))
	{
	  TREE_OPERAND (stmt, 2)
	    = fold_convert (type_right, TREE_OPERAND (stmt, 2));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_right));
	}
    }

  else if (TREE_CODE (stmt) == BIND_EXPR)
    {
      if (__builtin_expect (wtd->omp_ctx != NULL, 0))
	{
	  tree decl;
	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
	    if (TREE_CODE (decl) == VAR_DECL
		&& !DECL_EXTERNAL (decl)
		&& omp_var_to_track (decl))
	      {
		splay_tree_node n
		  = splay_tree_lookup (wtd->omp_ctx->variables,
				       (splay_tree_key) decl);
		if (n == NULL)
		  splay_tree_insert (wtd->omp_ctx->variables,
				     (splay_tree_key) decl,
				     TREE_STATIC (decl)
				     ? OMP_CLAUSE_DEFAULT_SHARED
				     : OMP_CLAUSE_DEFAULT_PRIVATE);
	      }
	}
      VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
      cp_walk_tree (&BIND_EXPR_BODY (stmt),
		    cp_genericize_r, data, NULL);
      VEC_pop (tree, wtd->bind_expr_stack);
    }

  else if (TREE_CODE (stmt) == USING_STMT)
    {
      tree block = NULL_TREE;

      /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
         BLOCK, and append an IMPORTED_DECL to its
	 BLOCK_VARS chained list.  */
      if (wtd->bind_expr_stack)
	{
	  int i;
	  for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
	    if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
						     wtd->bind_expr_stack, i))))
	      break;
	}
      if (block)
	{
	  tree using_directive;
	  gcc_assert (TREE_OPERAND (stmt, 0));

	  using_directive = make_node (IMPORTED_DECL);
	  TREE_TYPE (using_directive) = void_type_node;

	  IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
	    = TREE_OPERAND (stmt, 0);
	  DECL_CHAIN (using_directive) = BLOCK_VARS (block);
	  BLOCK_VARS (block) = using_directive;
	}
      /* The USING_STMT won't appear in GENERIC.  */
      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
      *walk_subtrees = 0;
    }

  else if (TREE_CODE (stmt) == DECL_EXPR
	   && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
    {
      /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
    {
      struct cp_genericize_omp_taskreg omp_ctx;
      tree c, decl;
      splay_tree_node n;

      *walk_subtrees = 0;
      cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
      omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
      omp_ctx.default_shared = omp_ctx.is_parallel;
      omp_ctx.outer = wtd->omp_ctx;
      omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
      wtd->omp_ctx = &omp_ctx;
      for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
	switch (OMP_CLAUSE_CODE (c))
	  {
	  case OMP_CLAUSE_SHARED:
	  case OMP_CLAUSE_PRIVATE:
	  case OMP_CLAUSE_FIRSTPRIVATE:
	  case OMP_CLAUSE_LASTPRIVATE:
	    decl = OMP_CLAUSE_DECL (c);
	    if (decl == error_mark_node || !omp_var_to_track (decl))
	      break;
	    n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
	    if (n != NULL)
	      break;
	    splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
			       ? OMP_CLAUSE_DEFAULT_SHARED
			       : OMP_CLAUSE_DEFAULT_PRIVATE);
	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
		&& omp_ctx.outer)
	      omp_cxx_notice_variable (omp_ctx.outer, decl);
	    break;
	  case OMP_CLAUSE_DEFAULT:
	    if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
	      omp_ctx.default_shared = true;
	  default:
	    break;
	  }
      cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
      wtd->omp_ctx = omp_ctx.outer;
      splay_tree_delete (omp_ctx.variables);
    }
  else if (TREE_CODE (stmt) == CONVERT_EXPR)
    gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));

  pointer_set_insert (p_set, *stmt_p);

  return NULL;
}
Exemple #6
0
void
do_jump_1 (enum tree_code code, tree op0, tree op1,
	   rtx_code_label *if_false_label, rtx_code_label *if_true_label,
	   int prob)
{
  machine_mode mode;
  rtx_code_label *drop_through_label = 0;

  switch (code)
    {
    case EQ_EXPR:
      {
        tree inner_type = TREE_TYPE (op0);

        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
		    != MODE_COMPLEX_FLOAT);
	gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
		    != MODE_COMPLEX_INT);

        if (integer_zerop (op1))
	  do_jump (op0, if_true_label, if_false_label, inv (prob));
        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
                 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
	  do_jump_by_parts_equality (op0, op1, if_false_label, if_true_label,
				     prob);
        else
	  do_compare_and_jump (op0, op1, EQ, EQ, if_false_label, if_true_label,
			       prob);
        break;
      }

    case NE_EXPR:
      {
        tree inner_type = TREE_TYPE (op0);

        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
		    != MODE_COMPLEX_FLOAT);
	gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
		    != MODE_COMPLEX_INT);

        if (integer_zerop (op1))
	  do_jump (op0, if_false_label, if_true_label, prob);
        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
           && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
	  do_jump_by_parts_equality (op0, op1, if_true_label, if_false_label,
				     inv (prob));
        else
	  do_compare_and_jump (op0, op1, NE, NE, if_false_label, if_true_label,
			       prob);
        break;
      }

    case LT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (op0));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LT, mode, ccp_jump))
	do_jump_by_parts_greater (op0, op1, 1, if_false_label, if_true_label,
				  prob);
      else
	do_compare_and_jump (op0, op1, LT, LTU, if_false_label, if_true_label,
			     prob);
      break;

    case LE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (op0));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LE, mode, ccp_jump))
	do_jump_by_parts_greater (op0, op1, 0, if_true_label, if_false_label,
				  inv (prob));
      else
	do_compare_and_jump (op0, op1, LE, LEU, if_false_label, if_true_label,
			     prob);
      break;

    case GT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (op0));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GT, mode, ccp_jump))
	do_jump_by_parts_greater (op0, op1, 0, if_false_label, if_true_label,
				  prob);
      else
	do_compare_and_jump (op0, op1, GT, GTU, if_false_label, if_true_label,
			     prob);
      break;

    case GE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (op0));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GE, mode, ccp_jump))
	do_jump_by_parts_greater (op0, op1, 1, if_true_label, if_false_label,
				  inv (prob));
      else
	do_compare_and_jump (op0, op1, GE, GEU, if_false_label, if_true_label,
			     prob);
      break;

    case ORDERED_EXPR:
      do_compare_and_jump (op0, op1, ORDERED, ORDERED,
			   if_false_label, if_true_label, prob);
      break;

    case UNORDERED_EXPR:
      do_compare_and_jump (op0, op1, UNORDERED, UNORDERED,
			   if_false_label, if_true_label, prob);
      break;

    case UNLT_EXPR:
      do_compare_and_jump (op0, op1, UNLT, UNLT, if_false_label, if_true_label,
			   prob);
      break;

    case UNLE_EXPR:
      do_compare_and_jump (op0, op1, UNLE, UNLE, if_false_label, if_true_label,
			   prob);
      break;

    case UNGT_EXPR:
      do_compare_and_jump (op0, op1, UNGT, UNGT, if_false_label, if_true_label,
			   prob);
      break;

    case UNGE_EXPR:
      do_compare_and_jump (op0, op1, UNGE, UNGE, if_false_label, if_true_label,
			   prob);
      break;

    case UNEQ_EXPR:
      do_compare_and_jump (op0, op1, UNEQ, UNEQ, if_false_label, if_true_label,
			   prob);
      break;

    case LTGT_EXPR:
      do_compare_and_jump (op0, op1, LTGT, LTGT, if_false_label, if_true_label,
			   prob);
      break;

    case TRUTH_ANDIF_EXPR:
      {
        /* Spread the probability that the expression is false evenly between
           the two conditions. So the first condition is false half the total
           probability of being false. The second condition is false the other
           half of the total probability of being false, so its jump has a false
           probability of half the total, relative to the probability we
           reached it (i.e. the first condition was true).  */
        int op0_prob = -1;
        int op1_prob = -1;
        if (prob != -1)
          {
            int false_prob = inv (prob);
            int op0_false_prob = false_prob / 2;
            int op1_false_prob = GCOV_COMPUTE_SCALE ((false_prob / 2),
                                                     inv (op0_false_prob));
            /* Get the probability that each jump below is true.  */
            op0_prob = inv (op0_false_prob);
            op1_prob = inv (op1_false_prob);
          }
	if (if_false_label == NULL)
          {
            drop_through_label = gen_label_rtx ();
	    do_jump (op0, drop_through_label, NULL, op0_prob);
	    do_jump (op1, NULL, if_true_label, op1_prob);
          }
        else
          {
	    do_jump (op0, if_false_label, NULL, op0_prob);
            do_jump (op1, if_false_label, if_true_label, op1_prob);
          }
        break;
      }

    case TRUTH_ORIF_EXPR:
      {
        /* Spread the probability evenly between the two conditions. So
           the first condition has half the total probability of being true.
           The second condition has the other half of the total probability,
           so its jump has a probability of half the total, relative to
           the probability we reached it (i.e. the first condition was false).  */
        int op0_prob = -1;
        int op1_prob = -1;
        if (prob != -1)
          {
            op0_prob = prob / 2;
            op1_prob = GCOV_COMPUTE_SCALE ((prob / 2), inv (op0_prob));
	  }
	if (if_true_label == NULL)
	  {
	    drop_through_label = gen_label_rtx ();
	    do_jump (op0, NULL, drop_through_label, op0_prob);
	    do_jump (op1, if_false_label, NULL, op1_prob);
	  }
	else
	  {
	    do_jump (op0, NULL, if_true_label, op0_prob);
	    do_jump (op1, if_false_label, if_true_label, op1_prob);
	  }
        break;
      }

    default:
      gcc_unreachable ();
    }

  if (drop_through_label)
    {
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}
Exemple #7
0
void
cp_genericize (tree fndecl)
{
  tree t;
  struct cp_genericize_data wtd;

  /* Fix up the types of parms passed by invisible reference.  */
  for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
    if (TREE_ADDRESSABLE (TREE_TYPE (t)))
      {
	/* If a function's arguments are copied to create a thunk,
	   then DECL_BY_REFERENCE will be set -- but the type of the
	   argument will be a pointer type, so we will never get
	   here.  */
	gcc_assert (!DECL_BY_REFERENCE (t));
	gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
	TREE_TYPE (t) = DECL_ARG_TYPE (t);
	DECL_BY_REFERENCE (t) = 1;
	TREE_ADDRESSABLE (t) = 0;
	relayout_decl (t);
      }

  /* Do the same for the return value.  */
  if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
    {
      t = DECL_RESULT (fndecl);
      TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
      DECL_BY_REFERENCE (t) = 1;
      TREE_ADDRESSABLE (t) = 0;
      relayout_decl (t);
      if (DECL_NAME (t))
	{
	  /* Adjust DECL_VALUE_EXPR of the original var.  */
	  tree outer = outer_curly_brace_block (current_function_decl);
	  tree var;

	  if (outer)
	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
	      if (DECL_NAME (t) == DECL_NAME (var)
		  && DECL_HAS_VALUE_EXPR_P (var)
		  && DECL_VALUE_EXPR (var) == t)
		{
		  tree val = convert_from_reference (t);
		  SET_DECL_VALUE_EXPR (var, val);
		  break;
		}
	}
    }

  /* If we're a clone, the body is already GIMPLE.  */
  if (DECL_CLONED_FUNCTION_P (fndecl))
    return;

  /* We do want to see every occurrence of the parms, so we can't just use
     walk_tree's hash functionality.  */
  wtd.p_set = pointer_set_create ();
  wtd.bind_expr_stack = NULL;
  wtd.omp_ctx = NULL;
  cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_genericize_r, &wtd, NULL);
  pointer_set_destroy (wtd.p_set);
  VEC_free (tree, heap, wtd.bind_expr_stack);

  /* Do everything else.  */
  c_genericize (fndecl);

  gcc_assert (bc_label[bc_break] == NULL);
  gcc_assert (bc_label[bc_continue] == NULL);
}
Exemple #8
0
unsigned HOST_WIDE_INT
compute_builtin_object_size (tree ptr, int object_size_type)
{
  gcc_assert (object_size_type >= 0 && object_size_type <= 3);

  if (! offset_limit)
    init_offset_limit ();

  if (TREE_CODE (ptr) == ADDR_EXPR)
    return addr_object_size (NULL, ptr, object_size_type);

  if (TREE_CODE (ptr) == SSA_NAME
      && POINTER_TYPE_P (TREE_TYPE (ptr))
      && object_sizes[object_size_type] != NULL)
    {
      if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
	{
	  struct object_size_info osi;
	  bitmap_iterator bi;
	  unsigned int i;

	  if (dump_file)
	    {
	      fprintf (dump_file, "Computing %s %sobject size for ",
		       (object_size_type & 2) ? "minimum" : "maximum",
		       (object_size_type & 1) ? "sub" : "");
	      print_generic_expr (dump_file, ptr, dump_flags);
	      fprintf (dump_file, ":\n");
	    }

	  osi.visited = BITMAP_ALLOC (NULL);
	  osi.reexamine = BITMAP_ALLOC (NULL);
	  osi.object_size_type = object_size_type;
	  osi.depths = NULL;
	  osi.stack = NULL;
	  osi.tos = NULL;

	  /* First pass: walk UD chains, compute object sizes that
	     can be computed.  osi.reexamine bitmap at the end will
	     contain what variables were found in dependency cycles
	     and therefore need to be reexamined.  */
	  osi.pass = 0;
	  osi.changed = false;
	  collect_object_sizes_for (&osi, ptr);

	  /* Second pass: keep recomputing object sizes of variables
	     that need reexamination, until no object sizes are
	     increased or all object sizes are computed.  */
	  if (! bitmap_empty_p (osi.reexamine))
	    {
	      bitmap reexamine = BITMAP_ALLOC (NULL);

	      /* If looking for minimum instead of maximum object size,
		 detect cases where a pointer is increased in a loop.
		 Although even without this detection pass 2 would eventually
		 terminate, it could take a long time.  If a pointer is
		 increasing this way, we need to assume 0 object size.
		 E.g. p = &buf[0]; while (cond) p = p + 4;  */
	      if (object_size_type & 2)
		{
		  osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
		  osi.stack = XNEWVEC (unsigned int, num_ssa_names);
		  osi.tos = osi.stack;
		  osi.pass = 1;
		  /* collect_object_sizes_for is changing
		     osi.reexamine bitmap, so iterate over a copy.  */
		  bitmap_copy (reexamine, osi.reexamine);
		  EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
		    if (bitmap_bit_p (osi.reexamine, i))
		      check_for_plus_in_loops (&osi, ssa_name (i));

		  free (osi.depths);
		  osi.depths = NULL;
		  free (osi.stack);
		  osi.stack = NULL;
		  osi.tos = NULL;
		}

	      do
		{
		  osi.pass = 2;
		  osi.changed = false;
		  /* collect_object_sizes_for is changing
		     osi.reexamine bitmap, so iterate over a copy.  */
		  bitmap_copy (reexamine, osi.reexamine);
		  EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
		    if (bitmap_bit_p (osi.reexamine, i))
		      {
			collect_object_sizes_for (&osi, ssa_name (i));
			if (dump_file && (dump_flags & TDF_DETAILS))
			  {
			    fprintf (dump_file, "Reexamining ");
			    print_generic_expr (dump_file, ssa_name (i),
						dump_flags);
			    fprintf (dump_file, "\n");
			  }
		      }
		}
	      while (osi.changed);

	      BITMAP_FREE (reexamine);
	    }
	  EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
	    bitmap_set_bit (computed[object_size_type], i);

	  /* Debugging dumps.  */
	  if (dump_file)
	    {
	      EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
		if (object_sizes[object_size_type][i]
		    != unknown[object_size_type])
		  {
		    print_generic_expr (dump_file, ssa_name (i),
					dump_flags);
		    fprintf (dump_file,
			     ": %s %sobject size "
			     HOST_WIDE_INT_PRINT_UNSIGNED "\n",
			     (object_size_type & 2) ? "minimum" : "maximum",
			     (object_size_type & 1) ? "sub" : "",
			     object_sizes[object_size_type][i]);
		  }
	    }

	  BITMAP_FREE (osi.reexamine);
	  BITMAP_FREE (osi.visited);
	}
Exemple #9
0
rtx
addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
		  bool really_expand)
{
  enum machine_mode address_mode = targetm.addr_space.address_mode (as);
  enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
  rtx address, sym, bse, idx, st, off;
  struct mem_addr_template *templ;

  if (addr->step && !integer_onep (addr->step))
    st = immed_double_int_const (tree_to_double_int (addr->step), pointer_mode);
  else
    st = NULL_RTX;

  if (addr->offset && !integer_zerop (addr->offset))
    off = immed_double_int_const
	    (tree_to_double_int (addr->offset)
	     .sext (TYPE_PRECISION (TREE_TYPE (addr->offset))),
	     pointer_mode);
  else
    off = NULL_RTX;

  if (!really_expand)
    {
      unsigned int templ_index
	= TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);

      if (templ_index >= vec_safe_length (mem_addr_template_list))
	vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);

      /* Reuse the templates for addresses, so that we do not waste memory.  */
      templ = &(*mem_addr_template_list)[templ_index];
      if (!templ->ref)
	{
	  sym = (addr->symbol ?
		 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
		 : NULL_RTX);
	  bse = (addr->base ?
		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
		 : NULL_RTX);
	  idx = (addr->index ?
		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
		 : NULL_RTX);

	  gen_addr_rtx (pointer_mode, sym, bse, idx,
			st? const0_rtx : NULL_RTX,
			off? const0_rtx : NULL_RTX,
			&templ->ref,
			&templ->step_p,
			&templ->off_p);
	}

      if (st)
	*templ->step_p = st;
      if (off)
	*templ->off_p = off;

      return templ->ref;
    }

  /* Otherwise really expand the expressions.  */
  sym = (addr->symbol
	 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
	 : NULL_RTX);
  bse = (addr->base
	 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
	 : NULL_RTX);
  idx = (addr->index
	 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
	 : NULL_RTX);

  gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
  if (pointer_mode != address_mode)
    address = convert_memory_address (address_mode, address);
  return address;
}
Exemple #10
0
bool
convert_affine_scev (struct loop *loop, tree type,
		     tree *base, tree *step, tree at_stmt,
		     bool use_overflow_semantics)
{
  tree ct = TREE_TYPE (*step);
  bool enforce_overflow_semantics;
  bool must_check_src_overflow, must_check_rslt_overflow;
  tree new_base, new_step;

  /* If we cannot perform arithmetic in TYPE, avoid creating an scev.  */
  if (avoid_arithmetics_in_type_p (type))
    return false;

  /* In general,
     (TYPE) (BASE + STEP * i) = (TYPE) BASE + (TYPE -- sign extend) STEP * i,
     but we must check some assumptions.
     
     1) If [BASE, +, STEP] wraps, the equation is not valid when precision
        of CT is smaller than the precision of TYPE.  For example, when we
	cast unsigned char [254, +, 1] to unsigned, the values on left side
	are 254, 255, 0, 1, ..., but those on the right side are
	254, 255, 256, 257, ...
     2) In case that we must also preserve the fact that signed ivs do not
        overflow, we must additionally check that the new iv does not wrap.
	For example, unsigned char [125, +, 1] casted to signed char could
	become a wrapping variable with values 125, 126, 127, -128, -127, ...,
	which would confuse optimizers that assume that this does not
	happen.  */
  must_check_src_overflow = TYPE_PRECISION (ct) < TYPE_PRECISION (type);

  enforce_overflow_semantics = (use_overflow_semantics
				&& nowrap_type_p (type));
  if (enforce_overflow_semantics)
    {
      /* We can avoid checking whether the result overflows in the following
	 cases:

	 -- must_check_src_overflow is true, and the range of TYPE is superset
	    of the range of CT -- i.e., in all cases except if CT signed and
	    TYPE unsigned.
         -- both CT and TYPE have the same precision and signedness.  */
      if (must_check_src_overflow)
	{
	  if (TYPE_UNSIGNED (type) && !TYPE_UNSIGNED (ct))
	    must_check_rslt_overflow = true;
	  else
	    must_check_rslt_overflow = false;
	}
      else if (TYPE_UNSIGNED (ct) == TYPE_UNSIGNED (type)
	       && TYPE_PRECISION (ct) == TYPE_PRECISION (type))
	must_check_rslt_overflow = false;
      else
	must_check_rslt_overflow = true;
    }
  else
    must_check_rslt_overflow = false;

  if (must_check_src_overflow
      && scev_probably_wraps_p (*base, *step, at_stmt, loop,
				use_overflow_semantics))
    return false;

  new_base = chrec_convert_1 (type, *base, at_stmt,
			      use_overflow_semantics);
  /* The step must be sign extended, regardless of the signedness
     of CT and TYPE.  This only needs to be handled specially when
     CT is unsigned -- to avoid e.g. unsigned char [100, +, 255]
     (with values 100, 99, 98, ...) from becoming signed or unsigned
     [100, +, 255] with values 100, 355, ...; the sign-extension is 
     performed by default when CT is signed.  */
  new_step = *step;
  if (TYPE_PRECISION (type) > TYPE_PRECISION (ct) && TYPE_UNSIGNED (ct))
    new_step = chrec_convert_1 (signed_type_for (ct), new_step, at_stmt,
				use_overflow_semantics);
  new_step = chrec_convert_1 (type, new_step, at_stmt, use_overflow_semantics);

  if (automatically_generated_chrec_p (new_base)
      || automatically_generated_chrec_p (new_step))
    return false;

  if (must_check_rslt_overflow
      /* Note that in this case we cannot use the fact that signed variables
	 do not overflow, as this is what we are verifying for the new iv.  */
      && scev_probably_wraps_p (new_base, new_step, at_stmt, loop, false))
    return false;

  *base = new_base;
  *step = new_step;
  return true;
}
Exemple #11
0
static unsigned HOST_WIDE_INT
addr_object_size (struct object_size_info *osi, const_tree ptr,
		  int object_size_type)
{
  tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;

  gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);

  pt_var = TREE_OPERAND (ptr, 0);
  if (REFERENCE_CLASS_P (pt_var))
    pt_var = get_base_address (pt_var);

  if (pt_var
      && TREE_CODE (pt_var) == INDIRECT_REF
      && TREE_CODE (TREE_OPERAND (pt_var, 0)) == SSA_NAME
      && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (pt_var, 0))))
    {
      unsigned HOST_WIDE_INT sz;

      if (!osi)
	sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
					  object_size_type);
      else
	{
	  tree var = TREE_OPERAND (pt_var, 0);
	  if (osi->pass == 0)
	    collect_object_sizes_for (osi, var);
	  if (bitmap_bit_p (computed[object_size_type],
			    SSA_NAME_VERSION (var)))
	    sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
	  else
	    sz = unknown[object_size_type];
	}

      if (sz != unknown[object_size_type] && sz < offset_limit)
	pt_var_size = size_int (sz);
    }
  else if (pt_var
	   && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
	   && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
	   && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
	   && (unsigned HOST_WIDE_INT)
	      tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
	      < offset_limit)
    pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
  else
    return unknown[object_size_type];

  if (pt_var != TREE_OPERAND (ptr, 0))
    {
      tree var;

      if (object_size_type & 1)
	{
	  var = TREE_OPERAND (ptr, 0);

	  while (var != pt_var
		 && TREE_CODE (var) != BIT_FIELD_REF
		 && TREE_CODE (var) != COMPONENT_REF
		 && TREE_CODE (var) != ARRAY_REF
		 && TREE_CODE (var) != ARRAY_RANGE_REF
		 && TREE_CODE (var) != REALPART_EXPR
		 && TREE_CODE (var) != IMAGPART_EXPR)
	    var = TREE_OPERAND (var, 0);
	  if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
	    var = TREE_OPERAND (var, 0);
	  if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
	      || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
	      || (pt_var_size
		  && tree_int_cst_lt (pt_var_size,
				      TYPE_SIZE_UNIT (TREE_TYPE (var)))))
	    var = pt_var;
	  else if (var != pt_var && TREE_CODE (pt_var) == INDIRECT_REF)
	    {
	      tree v = var;
	      /* For &X->fld, compute object size only if fld isn't the last
		 field, as struct { int i; char c[1]; } is often used instead
		 of flexible array member.  */
	      while (v && v != pt_var)
		switch (TREE_CODE (v))
		  {
		  case ARRAY_REF:
		    if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
			&& TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
		      {
			tree domain
			  = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
			if (domain
			    && TYPE_MAX_VALUE (domain)
			    && TREE_CODE (TYPE_MAX_VALUE (domain))
			       == INTEGER_CST
			    && tree_int_cst_lt (TREE_OPERAND (v, 1),
						TYPE_MAX_VALUE (domain)))
			  {
			    v = NULL_TREE;
			    break;
			  }
		      }
		    v = TREE_OPERAND (v, 0);
		    break;
		  case REALPART_EXPR:
		  case IMAGPART_EXPR:
		    v = NULL_TREE;
		    break;
		  case COMPONENT_REF:
		    if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
		      {
			v = NULL_TREE;
			break;
		      }
		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != UNION_TYPE
			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != QUAL_UNION_TYPE)
			break;
		      else
			v = TREE_OPERAND (v, 0);
		    if (TREE_CODE (v) == COMPONENT_REF
			&& TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			   == RECORD_TYPE)
		      {
			tree fld_chain = TREE_CHAIN (TREE_OPERAND (v, 1));
			for (; fld_chain; fld_chain = TREE_CHAIN (fld_chain))
			  if (TREE_CODE (fld_chain) == FIELD_DECL)
			    break;

			if (fld_chain)
			  {
			    v = NULL_TREE;
			    break;
			  }
			v = TREE_OPERAND (v, 0);
		      }
		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != UNION_TYPE
			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
			  != QUAL_UNION_TYPE)
			break;
		      else
			v = TREE_OPERAND (v, 0);
		    if (v != pt_var)
		      v = NULL_TREE;
		    else
		      v = pt_var;
		    break;
		  default:
		    v = pt_var;
		    break;
		  }
	      if (v == pt_var)
		var = pt_var;
	    }
	}
      else
	var = pt_var;

      if (var != pt_var)
	var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
      else if (!pt_var_size)
	return unknown[object_size_type];
      else
	var_size = pt_var_size;
      bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
      if (bytes != error_mark_node)
	{
	  if (TREE_CODE (bytes) == INTEGER_CST
	      && tree_int_cst_lt (var_size, bytes))
	    bytes = size_zero_node;
	  else
	    bytes = size_binop (MINUS_EXPR, var_size, bytes);
	}
      if (var != pt_var
	  && pt_var_size
	  && TREE_CODE (pt_var) == INDIRECT_REF
	  && bytes != error_mark_node)
	{
	  tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
	  if (bytes2 != error_mark_node)
	    {
	      if (TREE_CODE (bytes2) == INTEGER_CST
		  && tree_int_cst_lt (pt_var_size, bytes2))
		bytes2 = size_zero_node;
	      else
		bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
	      bytes = size_binop (MIN_EXPR, bytes, bytes2);
	    }
	}
    }
  else if (!pt_var_size)
    return unknown[object_size_type];
  else
    bytes = pt_var_size;

  if (host_integerp (bytes, 1))
    return tree_low_cst (bytes, 1);

  return unknown[object_size_type];
}
Exemple #12
0
static void
gen_conditions_for_pow_int_base (tree base, tree expn,
                                 vec<gimple> conds,
                                 unsigned *nconds)
{
  gimple base_def;
  tree base_val0;
  tree int_type;
  tree temp, tempn;
  tree cst0;
  gimple stmt1, stmt2;
  int bit_sz, max_exp;
  inp_domain exp_domain;

  base_def = SSA_NAME_DEF_STMT (base);
  base_val0 = gimple_assign_rhs1 (base_def);
  int_type = TREE_TYPE (base_val0);
  bit_sz = TYPE_PRECISION (int_type);
  gcc_assert (bit_sz > 0
              && bit_sz <= MAX_BASE_INT_BIT_SIZE);

  /* Determine the max exp argument value according to
     the size of the base integer.  The max exp value
     is conservatively estimated assuming IEEE754 double
     precision format.  */
  if (bit_sz == 8)
    max_exp = 128;
  else if (bit_sz == 16)
    max_exp = 64;
  else
    {
      gcc_assert (bit_sz == MAX_BASE_INT_BIT_SIZE);
      max_exp = 32;
    }

  /* For pow ((double)x, y), generate the following conditions:
     cond 1:
     temp1 = x;
     if (temp1 <= 0)

     cond 2:
     temp2 = y;
     if (temp2 > max_exp_real_cst)  */

  /* Generate condition in reverse order -- first
     the condition for the exp argument.  */

  exp_domain = get_domain (0, false, false,
                           max_exp, true, true);

  gen_conditions_for_domain (expn, exp_domain,
                             conds, nconds);

  /* Now generate condition for the base argument.
     Note it does not use the helper function
     gen_conditions_for_domain because the base
     type is integer.  */

  /* Push a separator.  */
  conds.quick_push (NULL);

  temp = create_tmp_var (int_type, "DCE_COND1");
  cst0 = build_int_cst (int_type, 0);
  stmt1 = gimple_build_assign (temp, base_val0);
  tempn = make_ssa_name (temp, stmt1);
  gimple_assign_set_lhs (stmt1, tempn);
  stmt2 = gimple_build_cond (LE_EXPR, tempn, cst0, NULL_TREE, NULL_TREE);

  conds.quick_push (stmt1);
  conds.quick_push (stmt2);
  (*nconds)++;
}
Exemple #13
0
static bool
check_pow (gimple pow_call)
{
  tree base, expn;
  enum tree_code bc, ec;

  if (gimple_call_num_args (pow_call) != 2)
    return false;

  base = gimple_call_arg (pow_call, 0);
  expn = gimple_call_arg (pow_call, 1);

  if (!check_target_format (expn))
    return false;

  bc = TREE_CODE (base);
  ec = TREE_CODE (expn);

  /* Folding candidates are not interesting.
     Can actually assert that it is already folded.  */
  if (ec == REAL_CST && bc == REAL_CST)
    return false;

  if (bc == REAL_CST)
    {
      /* Only handle a fixed range of constant.  */
      REAL_VALUE_TYPE mv;
      REAL_VALUE_TYPE bcv = TREE_REAL_CST (base);
      if (REAL_VALUES_EQUAL (bcv, dconst1))
        return false;
      if (REAL_VALUES_LESS (bcv, dconst1))
        return false;
      real_from_integer (&mv, TYPE_MODE (TREE_TYPE (base)), 256, 0, 1);
      if (REAL_VALUES_LESS (mv, bcv))
        return false;
      return true;
    }
  else if (bc == SSA_NAME)
    {
      tree base_val0, type;
      gimple base_def;
      int bit_sz;

      /* Only handles cases where base value is converted
         from integer values.  */
      base_def = SSA_NAME_DEF_STMT (base);
      if (gimple_code (base_def) != GIMPLE_ASSIGN)
        return false;

      if (gimple_assign_rhs_code (base_def) != FLOAT_EXPR)
        return false;
      base_val0 = gimple_assign_rhs1 (base_def);

      type = TREE_TYPE (base_val0);
      if (TREE_CODE (type) != INTEGER_TYPE)
        return false;
      bit_sz = TYPE_PRECISION (type);
      /* If the type of the base is too wide,
         the resulting shrink wrapping condition
	 will be too conservative.  */
      if (bit_sz > MAX_BASE_INT_BIT_SIZE)
        return false;

      return true;
    }
  else
    return false;
}
Exemple #14
0
static void
lower_builtin_setjmp (gimple_stmt_iterator *gsi)
{
    gimple stmt = gsi_stmt (*gsi);
    location_t loc = gimple_location (stmt);
    tree cont_label = create_artificial_label (loc);
    tree next_label = create_artificial_label (loc);
    tree dest, t, arg;
    gimple g;

    /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
       passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
    FORCED_LABEL (next_label) = 1;

    dest = gimple_call_lhs (stmt);

    /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
    arg = build_addr (next_label, current_function_decl);
    t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
    g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
    gimple_set_location (g, loc);
    gimple_set_block (g, gimple_block (stmt));
    gsi_insert_before (gsi, g, GSI_SAME_STMT);

    /* Build 'DEST = 0' and insert.  */
    if (dest)
    {
        g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
        gimple_set_location (g, loc);
        gimple_set_block (g, gimple_block (stmt));
        gsi_insert_before (gsi, g, GSI_SAME_STMT);
    }

    /* Build 'goto CONT_LABEL' and insert.  */
    g = gimple_build_goto (cont_label);
    gsi_insert_before (gsi, g, GSI_SAME_STMT);

    /* Build 'NEXT_LABEL:' and insert.  */
    g = gimple_build_label (next_label);
    gsi_insert_before (gsi, g, GSI_SAME_STMT);

    /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
    arg = build_addr (next_label, current_function_decl);
    t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
    g = gimple_build_call (t, 1, arg);
    gimple_set_location (g, loc);
    gimple_set_block (g, gimple_block (stmt));
    gsi_insert_before (gsi, g, GSI_SAME_STMT);

    /* Build 'DEST = 1' and insert.  */
    if (dest)
    {
        g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
                                 integer_one_node));
        gimple_set_location (g, loc);
        gimple_set_block (g, gimple_block (stmt));
        gsi_insert_before (gsi, g, GSI_SAME_STMT);
    }

    /* Build 'CONT_LABEL:' and insert.  */
    g = gimple_build_label (cont_label);
    gsi_insert_before (gsi, g, GSI_SAME_STMT);

    /* Remove the call to __builtin_setjmp.  */
    gsi_remove (gsi, false);
}
Exemple #15
0
static void
dequeue_and_dump (dump_info_p di)
{
  dump_queue_p dq;
  splay_tree_node stn;
  dump_node_info_p dni;
  tree t;
  unsigned int index;
  enum tree_code code;
  enum tree_code_class code_class;
  const char* code_name;

  /* Get the next node from the queue.  */
  dq = di->queue;
  stn = dq->node;
  t = (tree) stn->key;
  dni = (dump_node_info_p) stn->value;
  index = dni->index;

  /* Remove the node from the queue, and put it on the free list.  */
  di->queue = dq->next;
  if (!di->queue)
    di->queue_end = 0;
  dq->next = di->free_list;
  di->free_list = dq;

  /* Print the node index.  */
  dump_index (di, index);
  /* And the type of node this is.  */
  if (dni->binfo_p)
    code_name = "binfo";
  else
    code_name = tree_code_name[(int) TREE_CODE (t)];
  fprintf (di->stream, "%-16s ", code_name);
  di->column = 25;

  /* Figure out what kind of node this is.  */
  code = TREE_CODE (t);
  code_class = TREE_CODE_CLASS (code);

  /* Although BINFOs are TREE_VECs, we dump them specially so as to be
     more informative.  */
  if (dni->binfo_p)
    {
      unsigned ix;
      tree base;
      vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (t);

      dump_child ("type", BINFO_TYPE (t));

      if (BINFO_VIRTUAL_P (t))
	dump_string_field (di, "spec", "virt");

      dump_int (di, "bases", BINFO_N_BASE_BINFOS (t));
      for (ix = 0; BINFO_BASE_ITERATE (t, ix, base); ix++)
	{
	  tree access = (accesses ? (*accesses)[ix] : access_public_node);
	  const char *string = NULL;

	  if (access == access_public_node)
	    string = "pub";
	  else if (access == access_protected_node)
	    string = "prot";
	  else if (access == access_private_node)
	    string = "priv";
	  else
	    gcc_unreachable ();

	  dump_string_field (di, "accs", string);
	  queue_and_dump_index (di, "binf", base, DUMP_BINFO);
	}

      goto done;
    }

  /* We can knock off a bunch of expression nodes in exactly the same
     way.  */
  if (IS_EXPR_CODE_CLASS (code_class))
    {
      /* If we're dumping children, dump them now.  */
      queue_and_dump_type (di, t);

      switch (code_class)
	{
	case tcc_unary:
	  dump_child ("op 0", TREE_OPERAND (t, 0));
	  break;

	case tcc_binary:
	case tcc_comparison:
	  dump_child ("op 0", TREE_OPERAND (t, 0));
	  dump_child ("op 1", TREE_OPERAND (t, 1));
	  break;

	case tcc_expression:
	case tcc_reference:
	case tcc_statement:
	case tcc_vl_exp:
	  /* These nodes are handled explicitly below.  */
	  break;

	default:
	  gcc_unreachable ();
	}
    }
  else if (DECL_P (t))
    {
      expanded_location xloc;
      /* All declarations have names.  */
      if (DECL_NAME (t))
	dump_child ("name", DECL_NAME (t));
      if (DECL_ASSEMBLER_NAME_SET_P (t)
	  && DECL_ASSEMBLER_NAME (t) != DECL_NAME (t))
	dump_child ("mngl", DECL_ASSEMBLER_NAME (t));
      if (DECL_ABSTRACT_ORIGIN (t))
        dump_child ("orig", DECL_ABSTRACT_ORIGIN (t));
      /* And types.  */
      queue_and_dump_type (di, t);
      dump_child ("scpe", DECL_CONTEXT (t));
      /* And a source position.  */
      xloc = expand_location (DECL_SOURCE_LOCATION (t));
      if (xloc.file)
	{
	  const char *filename = lbasename (xloc.file);

	  dump_maybe_newline (di);
	  fprintf (di->stream, "srcp: %s:%-6d ", filename,
		   xloc.line);
	  di->column += 6 + strlen (filename) + 8;
	}
      /* And any declaration can be compiler-generated.  */
      if (CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_DECL_COMMON)
	  && DECL_ARTIFICIAL (t))
	dump_string_field (di, "note", "artificial");
      if (DECL_CHAIN (t) && !dump_flag (di, TDF_SLIM, NULL))
	dump_child ("chain", DECL_CHAIN (t));
    }
  else if (code_class == tcc_type)
    {
      /* All types have qualifiers.  */
      int quals = lang_hooks.tree_dump.type_quals (t);

      if (quals != TYPE_UNQUALIFIED)
	{
	  fprintf (di->stream, "qual: %c%c%c     ",
		   (quals & TYPE_QUAL_CONST) ? 'c' : ' ',
		   (quals & TYPE_QUAL_VOLATILE) ? 'v' : ' ',
		   (quals & TYPE_QUAL_RESTRICT) ? 'r' : ' ');
	  di->column += 14;
	}

      /* All types have associated declarations.  */
      dump_child ("name", TYPE_NAME (t));

      /* All types have a main variant.  */
      if (TYPE_MAIN_VARIANT (t) != t)
	dump_child ("unql", TYPE_MAIN_VARIANT (t));

      /* And sizes.  */
      dump_child ("size", TYPE_SIZE (t));

      /* All types have alignments.  */
      dump_int (di, "algn", TYPE_ALIGN (t));
    }
  else if (code_class == tcc_constant)
    /* All constants can have types.  */
    queue_and_dump_type (di, t);

  /* Give the language-specific code a chance to print something.  If
     it's completely taken care of things, don't bother printing
     anything more ourselves.  */
  if (lang_hooks.tree_dump.dump_tree (di, t))
    goto done;

  /* Now handle the various kinds of nodes.  */
  switch (code)
    {
      int i;

    case IDENTIFIER_NODE:
      dump_string_field (di, "strg", IDENTIFIER_POINTER (t));
      dump_int (di, "lngt", IDENTIFIER_LENGTH (t));
      break;

    case TREE_LIST:
      dump_child ("purp", TREE_PURPOSE (t));
      dump_child ("valu", TREE_VALUE (t));
      dump_child ("chan", TREE_CHAIN (t));
      break;

    case STATEMENT_LIST:
      {
	tree_stmt_iterator it;
	for (i = 0, it = tsi_start (t); !tsi_end_p (it); tsi_next (&it), i++)
	  {
	    char buffer[32];
	    sprintf (buffer, "%u", i);
	    dump_child (buffer, tsi_stmt (it));
	  }
      }
      break;

    case TREE_VEC:
      dump_int (di, "lngt", TREE_VEC_LENGTH (t));
      for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
	{
	  char buffer[32];
	  sprintf (buffer, "%u", i);
	  dump_child (buffer, TREE_VEC_ELT (t, i));
	}
      break;

    case INTEGER_TYPE:
    case ENUMERAL_TYPE:
      dump_int (di, "prec", TYPE_PRECISION (t));
      dump_string_field (di, "sign", TYPE_UNSIGNED (t) ? "unsigned": "signed");
      dump_child ("min", TYPE_MIN_VALUE (t));
      dump_child ("max", TYPE_MAX_VALUE (t));

      if (code == ENUMERAL_TYPE)
	dump_child ("csts", TYPE_VALUES (t));
      break;

    case REAL_TYPE:
      dump_int (di, "prec", TYPE_PRECISION (t));
      break;

    case FIXED_POINT_TYPE:
      dump_int (di, "prec", TYPE_PRECISION (t));
      dump_string_field (di, "sign", TYPE_UNSIGNED (t) ? "unsigned": "signed");
      dump_string_field (di, "saturating",
			 TYPE_SATURATING (t) ? "saturating": "non-saturating");
      break;

    case POINTER_TYPE:
      dump_child ("ptd", TREE_TYPE (t));
      break;

    case REFERENCE_TYPE:
      dump_child ("refd", TREE_TYPE (t));
      break;

    case METHOD_TYPE:
      dump_child ("clas", TYPE_METHOD_BASETYPE (t));
      /* Fall through.  */

    case FUNCTION_TYPE:
      dump_child ("retn", TREE_TYPE (t));
      dump_child ("prms", TYPE_ARG_TYPES (t));
      break;

    case ARRAY_TYPE:
      dump_child ("elts", TREE_TYPE (t));
      dump_child ("domn", TYPE_DOMAIN (t));
      break;

    case RECORD_TYPE:
    case UNION_TYPE:
      if (TREE_CODE (t) == RECORD_TYPE)
	dump_string_field (di, "tag", "struct");
      else
	dump_string_field (di, "tag", "union");

      dump_child ("flds", TYPE_FIELDS (t));
      dump_child ("fncs", TYPE_METHODS (t));
      queue_and_dump_index (di, "binf", TYPE_BINFO (t),
			    DUMP_BINFO);
      break;

    case CONST_DECL:
      dump_child ("cnst", DECL_INITIAL (t));
      break;

    case DEBUG_EXPR_DECL:
      dump_int (di, "-uid", DEBUG_TEMP_UID (t));
      /* Fall through.  */

    case VAR_DECL:
    case PARM_DECL:
    case FIELD_DECL:
    case RESULT_DECL:
      if (TREE_CODE (t) == PARM_DECL)
	dump_child ("argt", DECL_ARG_TYPE (t));
      else
	dump_child ("init", DECL_INITIAL (t));
      dump_child ("size", DECL_SIZE (t));
      dump_int (di, "algn", DECL_ALIGN (t));

      if (TREE_CODE (t) == FIELD_DECL)
	{
	  if (DECL_FIELD_OFFSET (t))
	    dump_child ("bpos", bit_position (t));
	}
      else if (TREE_CODE (t) == VAR_DECL
	       || TREE_CODE (t) == PARM_DECL)
	{
	  dump_int (di, "used", TREE_USED (t));
	  if (DECL_REGISTER (t))
	    dump_string_field (di, "spec", "register");
	}
      break;

    case FUNCTION_DECL:
      dump_child ("args", DECL_ARGUMENTS (t));
      if (DECL_EXTERNAL (t))
	dump_string_field (di, "body", "undefined");
      if (TREE_PUBLIC (t))
	dump_string_field (di, "link", "extern");
      else
	dump_string_field (di, "link", "static");
      if (DECL_SAVED_TREE (t) && !dump_flag (di, TDF_SLIM, t))
	dump_child ("body", DECL_SAVED_TREE (t));
      break;

    case INTEGER_CST:
      if (TREE_INT_CST_HIGH (t))
	dump_int (di, "high", TREE_INT_CST_HIGH (t));
      dump_int (di, "low", TREE_INT_CST_LOW (t));
      break;

    case STRING_CST:
      fprintf (di->stream, "strg: %-7s ", TREE_STRING_POINTER (t));
      dump_int (di, "lngt", TREE_STRING_LENGTH (t));
      break;

    case REAL_CST:
      dump_real (di, "valu", TREE_REAL_CST_PTR (t));
      break;

    case FIXED_CST:
      dump_fixed (di, "valu", TREE_FIXED_CST_PTR (t));
      break;

    case TRUTH_NOT_EXPR:
    case ADDR_EXPR:
    case INDIRECT_REF:
    case CLEANUP_POINT_EXPR:
    case SAVE_EXPR:
    case REALPART_EXPR:
    case IMAGPART_EXPR:
      /* These nodes are unary, but do not have code class `1'.  */
      dump_child ("op 0", TREE_OPERAND (t, 0));
      break;

    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
    case INIT_EXPR:
    case MODIFY_EXPR:
    case COMPOUND_EXPR:
    case PREDECREMENT_EXPR:
    case PREINCREMENT_EXPR:
    case POSTDECREMENT_EXPR:
    case POSTINCREMENT_EXPR:
      /* These nodes are binary, but do not have code class `2'.  */
      dump_child ("op 0", TREE_OPERAND (t, 0));
      dump_child ("op 1", TREE_OPERAND (t, 1));
      break;

    case COMPONENT_REF:
    case BIT_FIELD_REF:
      dump_child ("op 0", TREE_OPERAND (t, 0));
      dump_child ("op 1", TREE_OPERAND (t, 1));
      dump_child ("op 2", TREE_OPERAND (t, 2));
      break;

    case ARRAY_REF:
    case ARRAY_RANGE_REF:
      dump_child ("op 0", TREE_OPERAND (t, 0));
      dump_child ("op 1", TREE_OPERAND (t, 1));
      dump_child ("op 2", TREE_OPERAND (t, 2));
      dump_child ("op 3", TREE_OPERAND (t, 3));
      break;

    case COND_EXPR:
      dump_child ("op 0", TREE_OPERAND (t, 0));
      dump_child ("op 1", TREE_OPERAND (t, 1));
      dump_child ("op 2", TREE_OPERAND (t, 2));
      break;

    case TRY_FINALLY_EXPR:
      dump_child ("op 0", TREE_OPERAND (t, 0));
      dump_child ("op 1", TREE_OPERAND (t, 1));
      break;

    case CALL_EXPR:
      {
	int i = 0;
	tree arg;
	call_expr_arg_iterator iter;
	dump_child ("fn", CALL_EXPR_FN (t));
	FOR_EACH_CALL_EXPR_ARG (arg, iter, t)
	  {
	    char buffer[32];
	    sprintf (buffer, "%u", i);
	    dump_child (buffer, arg);
	    i++;
	  }
      }
      break;

    case CONSTRUCTOR:
      {
	unsigned HOST_WIDE_INT cnt;
	tree index, value;
	dump_int (di, "lngt", vec_safe_length (CONSTRUCTOR_ELTS (t)));
	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), cnt, index, value)
	  {
	    dump_child ("idx", index);
	    dump_child ("val", value);
	  }
      }
Exemple #16
0
tree
create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
		tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
{
  tree mem_ref, tmp;
  struct mem_address parts;

  addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
  gimplify_mem_ref_parts (gsi, &parts);
  mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
  if (mem_ref)
    return mem_ref;

  /* The expression is too complicated.  Try making it simpler.  */

  if (parts.step && !integer_onep (parts.step))
    {
      /* Move the multiplication to index.  */
      gcc_assert (parts.index);
      parts.index = force_gimple_operand_gsi (gsi,
				fold_build2 (MULT_EXPR, sizetype,
					     parts.index, parts.step),
				true, NULL_TREE, true, GSI_SAME_STMT);
      parts.step = NULL_TREE;

      mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
      if (mem_ref)
	return mem_ref;
    }

  if (parts.symbol)
    {
      tmp = parts.symbol;
      gcc_assert (is_gimple_val (tmp));

      /* Add the symbol to base, eventually forcing it to register.  */
      if (parts.base)
	{
	  gcc_assert (useless_type_conversion_p
				(sizetype, TREE_TYPE (parts.base)));

	  if (parts.index)
	    {
	      parts.base = force_gimple_operand_gsi_1 (gsi,
			fold_build_pointer_plus (tmp, parts.base),
			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
	    }
	  else
	    {
	      parts.index = parts.base;
	      parts.base = tmp;
	    }
	}
      else
	parts.base = tmp;
      parts.symbol = NULL_TREE;

      mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
      if (mem_ref)
	return mem_ref;
    }

  if (parts.index)
    {
      /* Add index to base.  */
      if (parts.base)
	{
	  parts.base = force_gimple_operand_gsi_1 (gsi,
			fold_build_pointer_plus (parts.base, parts.index),
			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
	}
      else
	parts.base = parts.index;
      parts.index = NULL_TREE;

      mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
      if (mem_ref)
	return mem_ref;
    }

  if (parts.offset && !integer_zerop (parts.offset))
    {
      /* Try adding offset to base.  */
      if (parts.base)
	{
	  parts.base = force_gimple_operand_gsi_1 (gsi,
			fold_build_pointer_plus (parts.base, parts.offset),
			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
	}
      else
	parts.base = parts.offset;

      parts.offset = NULL_TREE;

      mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
      if (mem_ref)
	return mem_ref;
    }

  /* Verify that the address is in the simplest possible shape
     (only a register).  If we cannot create such a memory reference,
     something is really wrong.  */
  gcc_assert (parts.symbol == NULL_TREE);
  gcc_assert (parts.index == NULL_TREE);
  gcc_assert (!parts.step || integer_onep (parts.step));
  gcc_assert (!parts.offset || integer_zerop (parts.offset));
  gcc_unreachable ();
}
Exemple #17
0
static void
do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
		     enum rtx_code unsigned_code,
		     rtx_code_label *if_false_label,
		     rtx_code_label *if_true_label, int prob)
{
  rtx op0, op1;
  tree type;
  machine_mode mode;
  int unsignedp;
  enum rtx_code code;

  /* Don't crash if the comparison was erroneous.  */
  op0 = expand_normal (treeop0);
  if (TREE_CODE (treeop0) == ERROR_MARK)
    return;

  op1 = expand_normal (treeop1);
  if (TREE_CODE (treeop1) == ERROR_MARK)
    return;

  type = TREE_TYPE (treeop0);
  mode = TYPE_MODE (type);
  if (TREE_CODE (treeop0) == INTEGER_CST
      && (TREE_CODE (treeop1) != INTEGER_CST
          || (GET_MODE_BITSIZE (mode)
              > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1))))))
    {
      /* op0 might have been replaced by promoted constant, in which
         case the type of second argument should be used.  */
      type = TREE_TYPE (treeop1);
      mode = TYPE_MODE (type);
    }
  unsignedp = TYPE_UNSIGNED (type);
  code = unsignedp ? unsigned_code : signed_code;

  /* If function pointers need to be "canonicalized" before they can
     be reliably compared, then canonicalize them.
     Only do this if *both* sides of the comparison are function pointers.
     If one side isn't, we want a noncanonicalized comparison.  See PR
     middle-end/17564.  */
  if (targetm.have_canonicalize_funcptr_for_compare ()
      && POINTER_TYPE_P (TREE_TYPE (treeop0))
      && POINTER_TYPE_P (TREE_TYPE (treeop1))
      && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))
      && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (treeop1))))
    {
      rtx new_op0 = gen_reg_rtx (mode);
      rtx new_op1 = gen_reg_rtx (mode);

      emit_insn (targetm.gen_canonicalize_funcptr_for_compare (new_op0, op0));
      op0 = new_op0;

      emit_insn (targetm.gen_canonicalize_funcptr_for_compare (new_op1, op1));
      op1 = new_op1;
    }

  do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
                           ((mode == BLKmode)
                            ? expr_size (treeop0) : NULL_RTX),
			   if_false_label, if_true_label, prob);
}
Exemple #18
0
tree
maybe_fold_tmr (tree ref)
{
  struct mem_address addr;
  bool changed = false;
  tree new_ref, off;

  get_address_description (ref, &addr);

  if (addr.base
      && TREE_CODE (addr.base) == INTEGER_CST
      && !integer_zerop (addr.base))
    {
      addr.offset = fold_binary_to_constant (PLUS_EXPR,
					     TREE_TYPE (addr.offset),
					     addr.offset, addr.base);
      addr.base = NULL_TREE;
      changed = true;
    }

  if (addr.symbol
      && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
    {
      addr.offset = fold_binary_to_constant
			(PLUS_EXPR, TREE_TYPE (addr.offset),
			 addr.offset,
			 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
      addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
      changed = true;
    }
  else if (addr.symbol
	   && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
    {
      HOST_WIDE_INT offset;
      addr.symbol = build_fold_addr_expr
		      (get_addr_base_and_unit_offset
		         (TREE_OPERAND (addr.symbol, 0), &offset));
      addr.offset = int_const_binop (PLUS_EXPR,
				     addr.offset, size_int (offset));
      changed = true;
    }

  if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
    {
      off = addr.index;
      if (addr.step)
	{
	  off = fold_binary_to_constant (MULT_EXPR, sizetype,
					 off, addr.step);
	  addr.step = NULL_TREE;
	}

      addr.offset = fold_binary_to_constant (PLUS_EXPR,
					     TREE_TYPE (addr.offset),
					     addr.offset, off);
      addr.index = NULL_TREE;
      changed = true;
    }

  if (!changed)
    return NULL_TREE;

  /* If we have propagated something into this TARGET_MEM_REF and thus
     ended up folding it, always create a new TARGET_MEM_REF regardless
     if it is valid in this for on the target - the propagation result
     wouldn't be anyway.  */
  new_ref = create_mem_ref_raw (TREE_TYPE (ref),
			        TREE_TYPE (addr.offset), &addr, false);
  TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
  TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
  return new_ref;
}
Exemple #19
0
void
do_jump (tree exp, rtx_code_label *if_false_label,
	 rtx_code_label *if_true_label, int prob)
{
  enum tree_code code = TREE_CODE (exp);
  rtx temp;
  int i;
  tree type;
  machine_mode mode;
  rtx_code_label *drop_through_label = NULL;

  switch (code)
    {
    case ERROR_MARK:
      break;

    case INTEGER_CST:
      {
	rtx_code_label *lab = integer_zerop (exp) ? if_false_label
						  : if_true_label;
	if (lab)
	  emit_jump (lab);
	break;
      }

#if 0
      /* This is not true with #pragma weak  */
    case ADDR_EXPR:
      /* The address of something can never be zero.  */
      if (if_true_label)
        emit_jump (if_true_label);
      break;
#endif

    case NOP_EXPR:
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
        goto normal;
    case CONVERT_EXPR:
      /* If we are narrowing the operand, we have to do the compare in the
         narrower mode.  */
      if ((TYPE_PRECISION (TREE_TYPE (exp))
           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
        goto normal;
    case NON_LVALUE_EXPR:
    case ABS_EXPR:
    case NEGATE_EXPR:
    case LROTATE_EXPR:
    case RROTATE_EXPR:
      /* These cannot change zero->nonzero or vice versa.  */
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
      break;

    case TRUTH_NOT_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
	       inv (prob));
      break;

    case COND_EXPR:
      {
	rtx_code_label *label1 = gen_label_rtx ();
	if (!if_true_label || !if_false_label)
	  {
	    drop_through_label = gen_label_rtx ();
	    if (!if_true_label)
	      if_true_label = drop_through_label;
	    if (!if_false_label)
	      if_false_label = drop_through_label;
	  }

        do_pending_stack_adjust ();
	do_jump (TREE_OPERAND (exp, 0), label1, NULL, -1);
	do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
        emit_label (label1);
	do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
	break;
      }

    case COMPOUND_EXPR:
      /* Lowered by gimplify.c.  */
      gcc_unreachable ();

    case MINUS_EXPR:
      /* Nonzero iff operands of minus differ.  */
      code = NE_EXPR;

      /* FALLTHRU */
    case EQ_EXPR:
    case NE_EXPR:
    case LT_EXPR:
    case LE_EXPR:
    case GT_EXPR:
    case GE_EXPR:
    case ORDERED_EXPR:
    case UNORDERED_EXPR:
    case UNLT_EXPR:
    case UNLE_EXPR:
    case UNGT_EXPR:
    case UNGE_EXPR:
    case UNEQ_EXPR:
    case LTGT_EXPR:
    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
    other_code:
      do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
		 if_false_label, if_true_label, prob);
      break;

    case BIT_AND_EXPR:
      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
	 See if the former is preferred for jump tests and restore it
	 if so.  */
      if (integer_onep (TREE_OPERAND (exp, 1)))
	{
	  tree exp0 = TREE_OPERAND (exp, 0);
	  rtx_code_label *set_label, *clr_label;
	  int setclr_prob = prob;

	  /* Strip narrowing integral type conversions.  */
	  while (CONVERT_EXPR_P (exp0)
		 && TREE_OPERAND (exp0, 0) != error_mark_node
		 && TYPE_PRECISION (TREE_TYPE (exp0))
		    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
	    exp0 = TREE_OPERAND (exp0, 0);

	  /* "exp0 ^ 1" inverts the sense of the single bit test.  */
	  if (TREE_CODE (exp0) == BIT_XOR_EXPR
	      && integer_onep (TREE_OPERAND (exp0, 1)))
	    {
	      exp0 = TREE_OPERAND (exp0, 0);
	      clr_label = if_true_label;
	      set_label = if_false_label;
	      setclr_prob = inv (prob);
	    }
	  else
	    {
	      clr_label = if_false_label;
	      set_label = if_true_label;
	    }

	  if (TREE_CODE (exp0) == RSHIFT_EXPR)
	    {
	      tree arg = TREE_OPERAND (exp0, 0);
	      tree shift = TREE_OPERAND (exp0, 1);
	      tree argtype = TREE_TYPE (arg);
	      if (TREE_CODE (shift) == INTEGER_CST
		  && compare_tree_int (shift, 0) >= 0
		  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
		  && prefer_and_bit_test (TYPE_MODE (argtype),
					  TREE_INT_CST_LOW (shift)))
		{
		  unsigned HOST_WIDE_INT mask
		    = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
		  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
				   build_int_cstu (argtype, mask)),
			   clr_label, set_label, setclr_prob);
		  break;
		}
	    }
	}

      /* If we are AND'ing with a small constant, do this comparison in the
         smallest type that fits.  If the machine doesn't have comparisons
         that small, it will be converted back to the wider comparison.
         This helps if we are testing the sign bit of a narrower object.
         combine can't do this for us because it can't know whether a
         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */

      if (! SLOW_BYTE_ACCESS
          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
          && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
          && have_insn_for (COMPARE, TYPE_MODE (type)))
        {
	  do_jump (fold_convert (type, exp), if_false_label, if_true_label,
		   prob);
          break;
        }

      if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
	  || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
	goto normal;

      /* Boolean comparisons can be compiled as TRUTH_AND_EXPR.  */

    case TRUTH_AND_EXPR:
      /* High branch cost, expand as the bitwise AND of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
      if (BRANCH_COST (optimize_insn_for_speed_p (),
		       false) >= 4
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
	goto normal;
      code = TRUTH_ANDIF_EXPR;
      goto other_code;

    case BIT_IOR_EXPR:
    case TRUTH_OR_EXPR:
      /* High branch cost, expand as the bitwise OR of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
      if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
	goto normal;
      code = TRUTH_ORIF_EXPR;
      goto other_code;

      /* Fall through and generate the normal code.  */
    default:
    normal:
      temp = expand_normal (exp);
      do_pending_stack_adjust ();
      /* The RTL optimizers prefer comparisons against pseudos.  */
      if (GET_CODE (temp) == SUBREG)
	{
	  /* Compare promoted variables in their promoted mode.  */
	  if (SUBREG_PROMOTED_VAR_P (temp)
	      && REG_P (XEXP (temp, 0)))
	    temp = XEXP (temp, 0);
	  else
	    temp = copy_to_reg (temp);
	}
      do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
			       NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
			       GET_MODE (temp), NULL_RTX,
			       if_false_label, if_true_label, prob);
    }

  if (drop_through_label)
    {
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}
Exemple #20
0
tree
decl_attributes (tree *node, tree attributes, int flags)
{
  tree a;
  tree returned_attrs = NULL_TREE;

  if (TREE_TYPE (*node) == error_mark_node || attributes == error_mark_node)
    return NULL_TREE;

  if (!attributes_initialized)
    init_attributes ();

  /* If this is a function and the user used #pragma GCC optimize, add the
     options to the attribute((optimize(...))) list.  */
  if (TREE_CODE (*node) == FUNCTION_DECL && current_optimize_pragma)
    {
      tree cur_attr = lookup_attribute ("optimize", attributes);
      tree opts = copy_list (current_optimize_pragma);

      if (! cur_attr)
	attributes
	  = tree_cons (get_identifier ("optimize"), opts, attributes);
      else
	TREE_VALUE (cur_attr) = chainon (opts, TREE_VALUE (cur_attr));
    }

  if (TREE_CODE (*node) == FUNCTION_DECL
      && optimization_current_node != optimization_default_node
      && !DECL_FUNCTION_SPECIFIC_OPTIMIZATION (*node))
    DECL_FUNCTION_SPECIFIC_OPTIMIZATION (*node) = optimization_current_node;

  /* If this is a function and the user used #pragma GCC target, add the
     options to the attribute((target(...))) list.  */
  if (TREE_CODE (*node) == FUNCTION_DECL
      && current_target_pragma
      && targetm.target_option.valid_attribute_p (*node, NULL_TREE,
						  current_target_pragma, 0))
    {
      tree cur_attr = lookup_attribute ("target", attributes);
      tree opts = copy_list (current_target_pragma);

      if (! cur_attr)
	attributes = tree_cons (get_identifier ("target"), opts, attributes);
      else
	TREE_VALUE (cur_attr) = chainon (opts, TREE_VALUE (cur_attr));
    }

  /* A "naked" function attribute implies "noinline" and "noclone" for
     those targets that support it.  */
  if (TREE_CODE (*node) == FUNCTION_DECL
      && attributes
      && lookup_attribute_spec (get_identifier ("naked"))
      && lookup_attribute ("naked", attributes) != NULL)
    {
      if (lookup_attribute ("noinline", attributes) == NULL)
	attributes = tree_cons (get_identifier ("noinline"), NULL, attributes);

      if (lookup_attribute ("noclone", attributes) == NULL)
	attributes = tree_cons (get_identifier ("noclone"),  NULL, attributes);
    }

  targetm.insert_attributes (*node, &attributes);

  for (a = attributes; a; a = TREE_CHAIN (a))
    {
      tree ns = get_attribute_namespace (a);
      tree name = get_attribute_name (a);
      tree args = TREE_VALUE (a);
      tree *anode = node;
      const struct attribute_spec *spec =
	lookup_scoped_attribute_spec (ns, name);
      bool no_add_attrs = 0;
      int fn_ptr_quals = 0;
      tree fn_ptr_tmp = NULL_TREE;

      if (spec == NULL)
	{
	  if (!(flags & (int) ATTR_FLAG_BUILT_IN))
	    {
	      if (ns == NULL_TREE || !cxx11_attribute_p (a))
		warning (OPT_Wattributes, "%qE attribute directive ignored",
			 name);
	      else
		warning (OPT_Wattributes,
			 "%<%E::%E%> scoped attribute directive ignored",
			 ns, name);
	    }
	  continue;
	}
      else if (list_length (args) < spec->min_length
	       || (spec->max_length >= 0
		   && list_length (args) > spec->max_length))
	{
	  error ("wrong number of arguments specified for %qE attribute",
		 name);
	  continue;
	}
      gcc_assert (is_attribute_p (spec->name, name));

      if (TYPE_P (*node)
	  && cxx11_attribute_p (a)
	  && !(flags & ATTR_FLAG_TYPE_IN_PLACE))
	{
	  /* This is a c++11 attribute that appertains to a
	     type-specifier, outside of the definition of, a class
	     type.  Ignore it.  */
	  if (warning (OPT_Wattributes, "attribute ignored"))
	    inform (input_location,
		    "an attribute that appertains to a type-specifier "
		    "is ignored");
	  continue;
	}

      if (spec->decl_required && !DECL_P (*anode))
	{
	  if (flags & ((int) ATTR_FLAG_DECL_NEXT
		       | (int) ATTR_FLAG_FUNCTION_NEXT
		       | (int) ATTR_FLAG_ARRAY_NEXT))
	    {
	      /* Pass on this attribute to be tried again.  */
	      returned_attrs = tree_cons (name, args, returned_attrs);
	      continue;
	    }
	  else
	    {
	      warning (OPT_Wattributes, "%qE attribute does not apply to types",
		       name);
	      continue;
	    }
	}

      /* If we require a type, but were passed a decl, set up to make a
	 new type and update the one in the decl.  ATTR_FLAG_TYPE_IN_PLACE
	 would have applied if we'd been passed a type, but we cannot modify
	 the decl's type in place here.  */
      if (spec->type_required && DECL_P (*anode))
	{
	  anode = &TREE_TYPE (*anode);
	  flags &= ~(int) ATTR_FLAG_TYPE_IN_PLACE;
	}

      if (spec->function_type_required && TREE_CODE (*anode) != FUNCTION_TYPE
	  && TREE_CODE (*anode) != METHOD_TYPE)
	{
	  if (TREE_CODE (*anode) == POINTER_TYPE
	      && (TREE_CODE (TREE_TYPE (*anode)) == FUNCTION_TYPE
		  || TREE_CODE (TREE_TYPE (*anode)) == METHOD_TYPE))
	    {
	      /* OK, this is a bit convoluted.  We can't just make a copy
		 of the pointer type and modify its TREE_TYPE, because if
		 we change the attributes of the target type the pointer
		 type needs to have a different TYPE_MAIN_VARIANT.  So we
		 pull out the target type now, frob it as appropriate, and
		 rebuild the pointer type later.

		 This would all be simpler if attributes were part of the
		 declarator, grumble grumble.  */
	      fn_ptr_tmp = TREE_TYPE (*anode);
	      fn_ptr_quals = TYPE_QUALS (*anode);
	      anode = &fn_ptr_tmp;
	      flags &= ~(int) ATTR_FLAG_TYPE_IN_PLACE;
	    }
	  else if (flags & (int) ATTR_FLAG_FUNCTION_NEXT)
	    {
	      /* Pass on this attribute to be tried again.  */
	      returned_attrs = tree_cons (name, args, returned_attrs);
	      continue;
	    }

	  if (TREE_CODE (*anode) != FUNCTION_TYPE
	      && TREE_CODE (*anode) != METHOD_TYPE)
	    {
	      warning (OPT_Wattributes,
		       "%qE attribute only applies to function types",
		       name);
	      continue;
	    }
	}

      if (TYPE_P (*anode)
	  && (flags & (int) ATTR_FLAG_TYPE_IN_PLACE)
	  && TYPE_SIZE (*anode) != NULL_TREE)
	{
	  warning (OPT_Wattributes, "type attributes ignored after type is already defined");
	  continue;
	}

      if (spec->handler != NULL)
	{
	  int cxx11_flag =
	    cxx11_attribute_p (a) ? ATTR_FLAG_CXX11 : 0;

	  returned_attrs = chainon ((*spec->handler) (anode, name, args,
						      flags|cxx11_flag,
						      &no_add_attrs),
				    returned_attrs);
	}

      /* Layout the decl in case anything changed.  */
      if (spec->type_required && DECL_P (*node)
	  && (TREE_CODE (*node) == VAR_DECL
	      || TREE_CODE (*node) == PARM_DECL
	      || TREE_CODE (*node) == RESULT_DECL))
	relayout_decl (*node);

      if (!no_add_attrs)
	{
	  tree old_attrs;
	  tree a;

	  if (DECL_P (*anode))
	    old_attrs = DECL_ATTRIBUTES (*anode);
	  else
	    old_attrs = TYPE_ATTRIBUTES (*anode);

	  for (a = lookup_attribute (spec->name, old_attrs);
	       a != NULL_TREE;
	       a = lookup_attribute (spec->name, TREE_CHAIN (a)))
	    {
	      if (simple_cst_equal (TREE_VALUE (a), args) == 1)
		break;
	    }

	  if (a == NULL_TREE)
	    {
	      /* This attribute isn't already in the list.  */
	      if (DECL_P (*anode))
		DECL_ATTRIBUTES (*anode) = tree_cons (name, args, old_attrs);
	      else if (flags & (int) ATTR_FLAG_TYPE_IN_PLACE)
		{
		  TYPE_ATTRIBUTES (*anode) = tree_cons (name, args, old_attrs);
		  /* If this is the main variant, also push the attributes
		     out to the other variants.  */
		  if (*anode == TYPE_MAIN_VARIANT (*anode))
		    {
		      tree variant;
		      for (variant = *anode; variant;
			   variant = TYPE_NEXT_VARIANT (variant))
			{
			  if (TYPE_ATTRIBUTES (variant) == old_attrs)
			    TYPE_ATTRIBUTES (variant)
			      = TYPE_ATTRIBUTES (*anode);
			  else if (!lookup_attribute
				   (spec->name, TYPE_ATTRIBUTES (variant)))
			    TYPE_ATTRIBUTES (variant) = tree_cons
			      (name, args, TYPE_ATTRIBUTES (variant));
			}
		    }
		}
	      else
		*anode = build_type_attribute_variant (*anode,
						       tree_cons (name, args,
								  old_attrs));
	    }
	}

      if (fn_ptr_tmp)
	{
	  /* Rebuild the function pointer type and put it in the
	     appropriate place.  */
	  fn_ptr_tmp = build_pointer_type (fn_ptr_tmp);
	  if (fn_ptr_quals)
	    fn_ptr_tmp = build_qualified_type (fn_ptr_tmp, fn_ptr_quals);
	  if (DECL_P (*node))
	    TREE_TYPE (*node) = fn_ptr_tmp;
	  else
	    {
	      gcc_assert (TREE_CODE (*node) == POINTER_TYPE);
	      *node = fn_ptr_tmp;
	    }
	}
    }

  return returned_attrs;
}
Exemple #21
0
static tree
cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
{
  tree defparm, parm, t;
  int i = 0;
  int nargs;
  tree *argarray;

  if (fn == NULL)
    return NULL;

  nargs = list_length (DECL_ARGUMENTS (fn));
  argarray = XALLOCAVEC (tree, nargs);

  defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
  if (arg2)
    defparm = TREE_CHAIN (defparm);

  if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
    {
      tree inner_type = TREE_TYPE (arg1);
      tree start1, end1, p1;
      tree start2 = NULL, p2 = NULL;
      tree ret = NULL, lab;

      start1 = arg1;
      start2 = arg2;
      do
	{
	  inner_type = TREE_TYPE (inner_type);
	  start1 = build4 (ARRAY_REF, inner_type, start1,
			   size_zero_node, NULL, NULL);
	  if (arg2)
	    start2 = build4 (ARRAY_REF, inner_type, start2,
			     size_zero_node, NULL, NULL);
	}
      while (TREE_CODE (inner_type) == ARRAY_TYPE);
      start1 = build_fold_addr_expr_loc (input_location, start1);
      if (arg2)
	start2 = build_fold_addr_expr_loc (input_location, start2);

      end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
      end1 = fold_build_pointer_plus (start1, end1);

      p1 = create_tmp_var (TREE_TYPE (start1), NULL);
      t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
      append_to_statement_list (t, &ret);

      if (arg2)
	{
	  p2 = create_tmp_var (TREE_TYPE (start2), NULL);
	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
	  append_to_statement_list (t, &ret);
	}

      lab = create_artificial_label (input_location);
      t = build1 (LABEL_EXPR, void_type_node, lab);
      append_to_statement_list (t, &ret);

      argarray[i++] = p1;
      if (arg2)
	argarray[i++] = p2;
      /* Handle default arguments.  */
      for (parm = defparm; parm && parm != void_list_node;
	   parm = TREE_CHAIN (parm), i++)
	argarray[i] = convert_default_arg (TREE_VALUE (parm),
					   TREE_PURPOSE (parm), fn, i);
      t = build_call_a (fn, i, argarray);
      t = fold_convert (void_type_node, t);
      t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
      append_to_statement_list (t, &ret);

      t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
      t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
      append_to_statement_list (t, &ret);

      if (arg2)
	{
	  t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
	  append_to_statement_list (t, &ret);
	}

      t = build2 (NE_EXPR, boolean_type_node, p1, end1);
      t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
      append_to_statement_list (t, &ret);

      return ret;
    }
  else
    {
      argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
      if (arg2)
	argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
      /* Handle default arguments.  */
      for (parm = defparm; parm && parm != void_list_node;
	   parm = TREE_CHAIN (parm), i++)
	argarray[i] = convert_default_arg (TREE_VALUE (parm),
					   TREE_PURPOSE (parm),
					   fn, i);
      t = build_call_a (fn, i, argarray);
      t = fold_convert (void_type_node, t);
      return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
    }
}
Exemple #22
0
void
apply_tm_attr (tree fndecl, tree attr)
{
  decl_attributes (&TREE_TYPE (fndecl), tree_cons (attr, NULL, NULL), 0);
}
Exemple #23
0
int
cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
{
  int saved_stmts_are_full_exprs_p = 0;
  enum tree_code code = TREE_CODE (*expr_p);
  enum gimplify_status ret;

  if (STATEMENT_CODE_P (code))
    {
      saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
      current_stmt_tree ()->stmts_are_full_exprs_p
	= STMT_IS_FULL_EXPR_P (*expr_p);
    }

  switch (code)
    {
    case PTRMEM_CST:
      *expr_p = cplus_expand_constant (*expr_p);
      ret = GS_OK;
      break;

    case AGGR_INIT_EXPR:
      simplify_aggr_init_expr (expr_p);
      ret = GS_OK;
      break;

    case VEC_INIT_EXPR:
      {
	location_t loc = input_location;
	tree init = VEC_INIT_EXPR_INIT (*expr_p);
	int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
	gcc_assert (EXPR_HAS_LOCATION (*expr_p));
	input_location = EXPR_LOCATION (*expr_p);
	*expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
				  init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
				  from_array,
				  tf_warning_or_error);
	ret = GS_OK;
	input_location = loc;
      }
      break;

    case THROW_EXPR:
      /* FIXME communicate throw type to back end, probably by moving
	 THROW_EXPR into ../tree.def.  */
      *expr_p = TREE_OPERAND (*expr_p, 0);
      ret = GS_OK;
      break;

    case MUST_NOT_THROW_EXPR:
      ret = gimplify_must_not_throw_expr (expr_p, pre_p);
      break;

      /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
	 LHS of an assignment might also be involved in the RHS, as in bug
	 25979.  */
    case INIT_EXPR:
      cp_gimplify_init_expr (expr_p);
      if (TREE_CODE (*expr_p) != INIT_EXPR)
	return GS_OK;
      /* Otherwise fall through.  */
    case MODIFY_EXPR:
      {
	/* If the back end isn't clever enough to know that the lhs and rhs
	   types are the same, add an explicit conversion.  */
	tree op0 = TREE_OPERAND (*expr_p, 0);
	tree op1 = TREE_OPERAND (*expr_p, 1);

	if (!error_operand_p (op0)
	    && !error_operand_p (op1)
	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
					      TREE_TYPE (op0), op1);

	else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
		  || (TREE_CODE (op1) == CONSTRUCTOR
		      && CONSTRUCTOR_NELTS (op1) == 0
		      && !TREE_CLOBBER_P (op1))
		  || (TREE_CODE (op1) == CALL_EXPR
		      && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
		 && is_really_empty_class (TREE_TYPE (op0)))
	  {
	    /* Remove any copies of empty classes.  We check that the RHS
	       has a simple form so that TARGET_EXPRs and non-empty
	       CONSTRUCTORs get reduced properly, and we leave the return
	       slot optimization alone because it isn't a copy (FIXME so it
	       shouldn't be represented as one).

	       Also drop volatile variables on the RHS to avoid infinite
	       recursion from gimplify_expr trying to load the value.  */
	    if (!TREE_SIDE_EFFECTS (op1)
		|| (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
	      *expr_p = op0;
	    else if (TREE_CODE (op1) == MEM_REF
		     && TREE_THIS_VOLATILE (op1))
	      {
		/* Similarly for volatile MEM_REFs on the RHS.  */
		if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
		  *expr_p = op0;
		else
		  *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
				    TREE_OPERAND (op1, 0), op0);
	      }
	    else
	      *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
				op0, op1);
	  }
      }
      ret = GS_OK;
      break;

    case EMPTY_CLASS_EXPR:
      /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
      *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
      ret = GS_OK;
      break;

    case BASELINK:
      *expr_p = BASELINK_FUNCTIONS (*expr_p);
      ret = GS_OK;
      break;

    case TRY_BLOCK:
      genericize_try_block (expr_p);
      ret = GS_OK;
      break;

    case HANDLER:
      genericize_catch_block (expr_p);
      ret = GS_OK;
      break;

    case EH_SPEC_BLOCK:
      genericize_eh_spec_block (expr_p);
      ret = GS_OK;
      break;

    case USING_STMT:
      gcc_unreachable ();

    case FOR_STMT:
      gimplify_for_stmt (expr_p, pre_p);
      ret = GS_OK;
      break;

    case WHILE_STMT:
      gimplify_while_stmt (expr_p, pre_p);
      ret = GS_OK;
      break;

    case DO_STMT:
      gimplify_do_stmt (expr_p, pre_p);
      ret = GS_OK;
      break;

    case SWITCH_STMT:
      gimplify_switch_stmt (expr_p, pre_p);
      ret = GS_OK;
      break;

    case OMP_FOR:
      ret = cp_gimplify_omp_for (expr_p, pre_p);
      break;

    case CONTINUE_STMT:
      gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_CONTINUE, NOT_TAKEN));
      gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
      *expr_p = NULL_TREE;
      ret = GS_ALL_DONE;
      break;

    case BREAK_STMT:
      gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
      *expr_p = NULL_TREE;
      ret = GS_ALL_DONE;
      break;

    case EXPR_STMT:
      gimplify_expr_stmt (expr_p);
      ret = GS_OK;
      break;

    case UNARY_PLUS_EXPR:
      {
	tree arg = TREE_OPERAND (*expr_p, 0);
	tree type = TREE_TYPE (*expr_p);
	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
					    : arg;
	ret = GS_OK;
      }
      break;

    default:
      ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
      break;
    }

  /* Restore saved state.  */
  if (STATEMENT_CODE_P (code))
    current_stmt_tree ()->stmts_are_full_exprs_p
      = saved_stmts_are_full_exprs_p;

  return ret;
}
Exemple #24
0
void
make_friend_class (tree type, tree friend_type, bool complain)
{
  tree classes;

  /* CLASS_TEMPLATE_DEPTH counts the number of template headers for
     the enclosing class.  FRIEND_DEPTH counts the number of template
     headers used for this friend declaration.  TEMPLATE_MEMBER_P,
     defined inside the `if' block for TYPENAME_TYPE case, is true if
     a template header in FRIEND_DEPTH is intended for DECLARATOR.
     For example, the code

       template <class T> struct A {
	 template <class U> struct B {
	   template <class V> template <class W>
	     friend class C<V>::D;
	 };
       };

     will eventually give the following results

     1. CLASS_TEMPLATE_DEPTH equals 2 (for `T' and `U').
     2. FRIEND_DEPTH equals 2 (for `V' and `W').
     3. TEMPLATE_MEMBER_P is true (for `W').

     The friend is a template friend iff FRIEND_DEPTH is nonzero.  */

  int class_template_depth = template_class_depth (type);
  int friend_depth = processing_template_decl - class_template_depth;

  if (! MAYBE_CLASS_TYPE_P (friend_type)
      && TREE_CODE (friend_type) != TEMPLATE_TEMPLATE_PARM)
    {
      /* N1791: If the type specifier in a friend declaration designates a
	 (possibly cv-qualified) class type, that class is declared as a
	 friend; otherwise, the friend declaration is ignored.

         So don't complain in C++11 mode.  */
      if (cxx_dialect < cxx11)
	pedwarn (input_location, complain ? 0 : OPT_Wpedantic,
		 "invalid type %qT declared %<friend%>", friend_type);
      return;
    }

  friend_type = cv_unqualified (friend_type);

  if (check_for_bare_parameter_packs (friend_type))
    return;

  if (friend_depth)
    {
      /* [temp.friend] Friend declarations shall not declare partial
	 specializations.  */
      if (CLASS_TYPE_P (friend_type)
	  && CLASSTYPE_TEMPLATE_SPECIALIZATION (friend_type)
	  && uses_template_parms (friend_type))
	{
	  error ("partial specialization %qT declared %<friend%>",
		 friend_type);
	  return;
	}

      if (TYPE_TEMPLATE_INFO (friend_type)
	  && !PRIMARY_TEMPLATE_P (TYPE_TI_TEMPLATE (friend_type)))
	{
	  error ("%qT is not a template", friend_type);
	  inform (location_of (friend_type), "previous declaration here");
	  if (TYPE_CLASS_SCOPE_P (friend_type)
	      && CLASSTYPE_TEMPLATE_INFO (TYPE_CONTEXT (friend_type))
	      && currently_open_class (TYPE_CONTEXT (friend_type)))
	    inform (input_location, "perhaps you need explicit template "
		    "arguments in your nested-name-specifier");
	  return;
	}
    }

  /* It makes sense for a template class to be friends with itself,
     that means the instantiations can be friendly.  Other cases are
     not so meaningful.  */
  if (!friend_depth && same_type_p (type, friend_type))
    {
      if (complain)
	warning (0, "class %qT is implicitly friends with itself",
		 type);
      return;
    }

  /* [temp.friend]

     A friend of a class or class template can be a function or
     class template, a specialization of a function template or
     class template, or an ordinary (nontemplate) function or
     class.  */
  if (!friend_depth)
    ;/* ok */
  else if (TREE_CODE (friend_type) == TYPENAME_TYPE)
    {
      if (TREE_CODE (TYPENAME_TYPE_FULLNAME (friend_type))
	  == TEMPLATE_ID_EXPR)
	{
	  /* template <class U> friend class T::X<U>; */
	  /* [temp.friend]
	     Friend declarations shall not declare partial
	     specializations.  */
	  error ("partial specialization %qT declared %<friend%>",
		 friend_type);
	  return;
	}
      else
	{
	  /* We will figure this out later.  */
	  bool template_member_p = false;

	  tree ctype = TYPE_CONTEXT (friend_type);
	  tree name = TYPE_IDENTIFIER (friend_type);
	  tree decl;

	  if (!uses_template_parms_level (ctype, class_template_depth
						 + friend_depth))
	    template_member_p = true;

	  if (class_template_depth)
	    {
	      /* We rely on tsubst_friend_class to check the
		 validity of the declaration later.  */
	      if (template_member_p)
		friend_type
		  = make_unbound_class_template (ctype,
						 name,
						 current_template_parms,
						 tf_error);
	      else
		friend_type
		  = make_typename_type (ctype, name, class_type, tf_error);
	    }
	  else
	    {
	      decl = lookup_member (ctype, name, 0, true, tf_warning_or_error);
	      if (!decl)
		{
		  error ("%qT is not a member of %qT", name, ctype);
		  return;
		}
	      if (template_member_p && !DECL_CLASS_TEMPLATE_P (decl))
		{
		  error ("%qT is not a member class template of %qT",
			 name, ctype);
		  inform (DECL_SOURCE_LOCATION (decl),
			  "%qD declared here", decl);
		  return;
		}
	      if (!template_member_p && (TREE_CODE (decl) != TYPE_DECL
					 || !CLASS_TYPE_P (TREE_TYPE (decl))))
		{
		  error ("%qT is not a nested class of %qT",
			 name, ctype);
		  inform (DECL_SOURCE_LOCATION (decl),
			  "%qD declared here", decl);
		  return;
		}

	      friend_type = CLASSTYPE_TI_TEMPLATE (TREE_TYPE (decl));
	    }
	}
    }
  else if (TREE_CODE (friend_type) == TEMPLATE_TYPE_PARM)
    {
      /* template <class T> friend class T; */
      error ("template parameter type %qT declared %<friend%>", friend_type);
      return;
    }
  else if (TREE_CODE (friend_type) == TEMPLATE_TEMPLATE_PARM)
    friend_type = TYPE_NAME (friend_type);
  else if (!CLASSTYPE_TEMPLATE_INFO (friend_type))
    {
      /* template <class T> friend class A; where A is not a template */
      error ("%q#T is not a template", friend_type);
      return;
    }
  else
    /* template <class T> friend class A; where A is a template */
    friend_type = CLASSTYPE_TI_TEMPLATE (friend_type);

  if (friend_type == error_mark_node)
    return;

  /* See if it is already a friend.  */
  for (classes = CLASSTYPE_FRIEND_CLASSES (type);
       classes;
       classes = TREE_CHAIN (classes))
    {
      tree probe = TREE_VALUE (classes);

      if (TREE_CODE (friend_type) == TEMPLATE_DECL)
	{
	  if (friend_type == probe)
	    {
	      if (complain)
		warning (OPT_Wredundant_decls,
			 "%qD is already a friend of %qT", probe, type);
	      break;
	    }
	}
      else if (TREE_CODE (probe) != TEMPLATE_DECL)
	{
	  if (same_type_p (probe, friend_type))
	    {
	      if (complain)
		warning (OPT_Wredundant_decls,
			 "%qT is already a friend of %qT", probe, type);
	      break;
	    }
	}
    }

  if (!classes)
    {
      maybe_add_class_template_decl_list (type, friend_type, /*friend_p=*/1);

      CLASSTYPE_FRIEND_CLASSES (type)
	= tree_cons (NULL_TREE, friend_type, CLASSTYPE_FRIEND_CLASSES (type));
      if (TREE_CODE (friend_type) == TEMPLATE_DECL)
	friend_type = TREE_TYPE (friend_type);
      if (!uses_template_parms (type))
	CLASSTYPE_BEFRIENDING_CLASSES (friend_type)
	  = tree_cons (NULL_TREE, type,
		       CLASSTYPE_BEFRIENDING_CLASSES (friend_type));
    }
}
Exemple #25
0
tree
aarch64_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
{
  machine_mode in_mode, out_mode;
  int in_n, out_n;

  if (TREE_CODE (type_out) != VECTOR_TYPE
      || TREE_CODE (type_in) != VECTOR_TYPE)
    return NULL_TREE;

  out_mode = TYPE_MODE (TREE_TYPE (type_out));
  out_n = TYPE_VECTOR_SUBPARTS (type_out);
  in_mode = TYPE_MODE (TREE_TYPE (type_in));
  in_n = TYPE_VECTOR_SUBPARTS (type_in);

#undef AARCH64_CHECK_BUILTIN_MODE
#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
#define AARCH64_FIND_FRINT_VARIANT(N) \
  (AARCH64_CHECK_BUILTIN_MODE (2, D) \
    ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
    : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
	? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
	: (AARCH64_CHECK_BUILTIN_MODE (2, S) \
	   ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
	   : NULL_TREE)))
  if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
    {
      enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
      switch (fn)
	{
#undef AARCH64_CHECK_BUILTIN_MODE
#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
  (out_mode == N##Fmode && out_n == C \
   && in_mode == N##Fmode && in_n == C)
	case BUILT_IN_FLOOR:
	case BUILT_IN_FLOORF:
	  return AARCH64_FIND_FRINT_VARIANT (floor);
	case BUILT_IN_CEIL:
	case BUILT_IN_CEILF:
	  return AARCH64_FIND_FRINT_VARIANT (ceil);
	case BUILT_IN_TRUNC:
	case BUILT_IN_TRUNCF:
	  return AARCH64_FIND_FRINT_VARIANT (btrunc);
	case BUILT_IN_ROUND:
	case BUILT_IN_ROUNDF:
	  return AARCH64_FIND_FRINT_VARIANT (round);
	case BUILT_IN_NEARBYINT:
	case BUILT_IN_NEARBYINTF:
	  return AARCH64_FIND_FRINT_VARIANT (nearbyint);
	case BUILT_IN_SQRT:
	case BUILT_IN_SQRTF:
	  return AARCH64_FIND_FRINT_VARIANT (sqrt);
#undef AARCH64_CHECK_BUILTIN_MODE
#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
  (out_mode == SImode && out_n == C \
   && in_mode == N##Imode && in_n == C)
        case BUILT_IN_CLZ:
          {
            if (AARCH64_CHECK_BUILTIN_MODE (4, S))
              return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
            return NULL_TREE;
          }
	case BUILT_IN_CTZ:
          {
	    if (AARCH64_CHECK_BUILTIN_MODE (2, S))
	      return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv2si];
	    else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
	      return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv4si];
	    return NULL_TREE;
          }
#undef AARCH64_CHECK_BUILTIN_MODE
#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
  (out_mode == N##Imode && out_n == C \
   && in_mode == N##Fmode && in_n == C)
	case BUILT_IN_LFLOOR:
	case BUILT_IN_LFLOORF:
	case BUILT_IN_LLFLOOR:
	case BUILT_IN_IFLOORF:
	  {
	    enum aarch64_builtins builtin;
	    if (AARCH64_CHECK_BUILTIN_MODE (2, D))
	      builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
	    else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
	      builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
	    else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
	      builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
	    else
	      return NULL_TREE;

	    return aarch64_builtin_decls[builtin];
	  }
	case BUILT_IN_LCEIL:
	case BUILT_IN_LCEILF:
	case BUILT_IN_LLCEIL:
	case BUILT_IN_ICEILF:
	  {
	    enum aarch64_builtins builtin;
	    if (AARCH64_CHECK_BUILTIN_MODE (2, D))
	      builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
	    else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
	      builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
	    else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
	      builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
	    else
	      return NULL_TREE;

	    return aarch64_builtin_decls[builtin];
	  }
	case BUILT_IN_LROUND:
	case BUILT_IN_IROUNDF:
	  {
	    enum aarch64_builtins builtin;
	    if (AARCH64_CHECK_BUILTIN_MODE (2, D))
	      builtin =	AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
	    else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
	      builtin =	AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
	    else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
	      builtin =	AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
	    else
	      return NULL_TREE;

	    return aarch64_builtin_decls[builtin];
	  }
	case BUILT_IN_BSWAP16:
#undef AARCH64_CHECK_BUILTIN_MODE
#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
  (out_mode == N##Imode && out_n == C \
   && in_mode == N##Imode && in_n == C)
	  if (AARCH64_CHECK_BUILTIN_MODE (4, H))
	    return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4hi];
	  else if (AARCH64_CHECK_BUILTIN_MODE (8, H))
	    return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv8hi];
	  else
	    return NULL_TREE;
	case BUILT_IN_BSWAP32:
	  if (AARCH64_CHECK_BUILTIN_MODE (2, S))
	    return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2si];
	  else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
	    return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4si];
	  else
	    return NULL_TREE;
	case BUILT_IN_BSWAP64:
	  if (AARCH64_CHECK_BUILTIN_MODE (2, D))
	    return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2di];
	  else
	    return NULL_TREE;
	default:
	  return NULL_TREE;
      }
    }

  return NULL_TREE;
}
Exemple #26
0
int
is_friend (tree type, tree supplicant)
{
  int declp;
  tree list;
  tree context;

  if (supplicant == NULL_TREE || type == NULL_TREE)
    return 0;

  if (is_global_friend (supplicant))
    return 1;

  declp = DECL_P (supplicant);

  if (declp)
    /* It's a function decl.  */
    {
      tree list = DECL_FRIENDLIST (TYPE_MAIN_DECL (type));
      tree name = DECL_NAME (supplicant);

      for (; list ; list = TREE_CHAIN (list))
	{
	  if (name == FRIEND_NAME (list))
	    {
	      tree friends = FRIEND_DECLS (list);
	      for (; friends ; friends = TREE_CHAIN (friends))
		{
		  tree this_friend = TREE_VALUE (friends);

		  if (this_friend == NULL_TREE)
		    continue;

		  if (supplicant == this_friend)
		    return 1;

		  if (is_specialization_of_friend (supplicant, this_friend))
		    return 1;
		}
	      break;
	    }
	}
    }
  else
    /* It's a type.  */
    {
      if (same_type_p (supplicant, type))
	return 1;

      list = CLASSTYPE_FRIEND_CLASSES (TREE_TYPE (TYPE_MAIN_DECL (type)));
      for (; list ; list = TREE_CHAIN (list))
	{
	  tree t = TREE_VALUE (list);

	  if (TREE_CODE (t) == TEMPLATE_DECL ?
	      is_specialization_of_friend (TYPE_MAIN_DECL (supplicant), t) :
	      same_type_p (supplicant, t))
	    return 1;
	}
    }

  if (declp)
    {
      if (DECL_FUNCTION_MEMBER_P (supplicant))
	context = DECL_CONTEXT (supplicant);
      else
	context = NULL_TREE;
    }
  else
    {
      if (TYPE_CLASS_SCOPE_P (supplicant))
	/* Nested classes get the same access as their enclosing types, as
	   per DR 45 (this is a change from the standard).  */
	context = TYPE_CONTEXT (supplicant);
      else
	/* Local classes have the same access as the enclosing function.  */
	context = decl_function_context (TYPE_MAIN_DECL (supplicant));
    }

  /* A namespace is not friend to anybody.  */
  if (context && TREE_CODE (context) == NAMESPACE_DECL)
    context = NULL_TREE;

  if (context)
    return is_friend (type, context);

  return 0;
}
Exemple #27
0
bool
i386_pe_type_dllimport_p (tree decl)
{
   gcc_assert (TREE_CODE (decl) == VAR_DECL 
               || TREE_CODE (decl) == FUNCTION_DECL);

   if (TARGET_NOP_FUN_DLLIMPORT && TREE_CODE (decl) == FUNCTION_DECL)
     return false;

   /* We ignore the dllimport attribute for inline member functions.
      This differs from MSVC behavior which treats it like GNUC
      'extern inline' extension.  Also ignore for template
      instantiations with linkonce semantics and artificial methods.  */
    if (TREE_CODE (decl) ==  FUNCTION_DECL
        && (DECL_DECLARED_INLINE_P (decl)
	    || DECL_TEMPLATE_INSTANTIATION (decl)
	    || DECL_ARTIFICIAL (decl)))
      return false;

   /* Since we can't treat a pointer to a dllimport'd symbol as a
       constant address, we turn off the attribute on C++ virtual
       methods to allow creation of vtables using thunks.  */
    else if (TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
	     && DECL_VIRTUAL_P (decl))
      {
	/* Even though we ignore the attribute from the start, warn if we later see
	   an out-of class definition, as we do for other member functions in
	   tree.c:merge_dllimport_decl_attributes.  If this is the key method, the
	   definition may affect the import-export status of vtables, depending
           on how we handle MULTIPLE_SYMBOL_SPACES in cp/decl2.c.   */
	if (DECL_INITIAL (decl))
	  {
	    warning ("%q+D redeclared without dllimport attribute: "
		    "previous dllimport ignored", decl);
#ifdef PE_DLL_DEBUG
	    if (decl == CLASSTYPE_KEY_METHOD (DECL_CONTEXT (decl)))            
	      warning ("key method %q+D of dllimport'd class defined"
		       decl);
#endif
	  }
	return false;
      }

      /* Don't mark defined functions as dllimport.  This code will only be
         reached if we see a non-inline function defined out-of-class.  */
    else if (TREE_CODE (decl) ==  FUNCTION_DECL
	     && (DECL_INITIAL (decl)))
      return false;

    /*  Don't allow definitions of static data members in dllimport class,
        If vtable data is marked as DECL_EXTERNAL, import it; otherwise just
        ignore the class attribute.  */
    else if (TREE_CODE (decl) == VAR_DECL
	     && TREE_STATIC (decl) && TREE_PUBLIC (decl)
	     && !DECL_EXTERNAL (decl))
      {
	if (!DECL_VIRTUAL_P (decl))
	     error ("definition of static data member %q+D of "
		    "dllimport'd class", decl);
	return false;
      }

    return true;
}
Exemple #28
0
void
omp_extract_for_data (gomp_for *for_stmt, struct omp_for_data *fd,
		      struct omp_for_data_loop *loops)
{
  tree t, var, *collapse_iter, *collapse_count;
  tree count = NULL_TREE, iter_type = long_integer_type_node;
  struct omp_for_data_loop *loop;
  int i;
  struct omp_for_data_loop dummy_loop;
  location_t loc = gimple_location (for_stmt);
  bool simd = gimple_omp_for_kind (for_stmt) & GF_OMP_FOR_SIMD;
  bool distribute = gimple_omp_for_kind (for_stmt)
		    == GF_OMP_FOR_KIND_DISTRIBUTE;
  bool taskloop = gimple_omp_for_kind (for_stmt)
		  == GF_OMP_FOR_KIND_TASKLOOP;
  tree iterv, countv;

  fd->for_stmt = for_stmt;
  fd->pre = NULL;
  fd->have_nowait = distribute || simd;
  fd->have_ordered = false;
  fd->tiling = NULL_TREE;
  fd->collapse = 1;
  fd->ordered = 0;
  fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
  fd->sched_modifiers = 0;
  fd->chunk_size = NULL_TREE;
  fd->simd_schedule = false;
  if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_CILKFOR)
    fd->sched_kind = OMP_CLAUSE_SCHEDULE_CILKFOR;
  collapse_iter = NULL;
  collapse_count = NULL;

  for (t = gimple_omp_for_clauses (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
    switch (OMP_CLAUSE_CODE (t))
      {
      case OMP_CLAUSE_NOWAIT:
	fd->have_nowait = true;
	break;
      case OMP_CLAUSE_ORDERED:
	fd->have_ordered = true;
	if (OMP_CLAUSE_ORDERED_EXPR (t))
	  fd->ordered = tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (t));
	break;
      case OMP_CLAUSE_SCHEDULE:
	gcc_assert (!distribute && !taskloop);
	fd->sched_kind
	  = (enum omp_clause_schedule_kind)
	    (OMP_CLAUSE_SCHEDULE_KIND (t) & OMP_CLAUSE_SCHEDULE_MASK);
	fd->sched_modifiers = (OMP_CLAUSE_SCHEDULE_KIND (t)
			       & ~OMP_CLAUSE_SCHEDULE_MASK);
	fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
	fd->simd_schedule = OMP_CLAUSE_SCHEDULE_SIMD (t);
	break;
      case OMP_CLAUSE_DIST_SCHEDULE:
	gcc_assert (distribute);
	fd->chunk_size = OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (t);
	break;
      case OMP_CLAUSE_COLLAPSE:
	fd->collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (t));
	if (fd->collapse > 1)
	  {
	    collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
	    collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
	  }
	break;
      case OMP_CLAUSE_TILE:
	fd->tiling = OMP_CLAUSE_TILE_LIST (t);
	fd->collapse = list_length (fd->tiling);
	gcc_assert (fd->collapse);
	collapse_iter = &OMP_CLAUSE_TILE_ITERVAR (t);
	collapse_count = &OMP_CLAUSE_TILE_COUNT (t);
	break;
      default:
	break;
      }

  if (fd->collapse > 1 || fd->tiling)
    fd->loops = loops;
  else
    fd->loops = &fd->loop;

  if (fd->ordered && fd->collapse == 1 && loops != NULL)
    {
      fd->loops = loops;
      iterv = NULL_TREE;
      countv = NULL_TREE;
      collapse_iter = &iterv;
      collapse_count = &countv;
    }

  /* FIXME: for now map schedule(auto) to schedule(static).
     There should be analysis to determine whether all iterations
     are approximately the same amount of work (then schedule(static)
     is best) or if it varies (then schedule(dynamic,N) is better).  */
  if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
    {
      fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
      gcc_assert (fd->chunk_size == NULL);
    }
  gcc_assert ((fd->collapse == 1 && !fd->tiling) || collapse_iter != NULL);
  if (taskloop)
    fd->sched_kind = OMP_CLAUSE_SCHEDULE_RUNTIME;
  if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
    gcc_assert (fd->chunk_size == NULL);
  else if (fd->chunk_size == NULL)
    {
      /* We only need to compute a default chunk size for ordered
	 static loops and dynamic loops.  */
      if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
	  || fd->have_ordered)
	fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
			 ? integer_zero_node : integer_one_node;
    }

  int cnt = fd->ordered ? fd->ordered : fd->collapse;
  for (i = 0; i < cnt; i++)
    {
      if (i == 0
	  && fd->collapse == 1
	  && !fd->tiling
	  && (fd->ordered == 0 || loops == NULL))
	loop = &fd->loop;
      else if (loops != NULL)
	loop = loops + i;
      else
	loop = &dummy_loop;

      loop->v = gimple_omp_for_index (for_stmt, i);
      gcc_assert (SSA_VAR_P (loop->v));
      gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
		  || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
      var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
      loop->n1 = gimple_omp_for_initial (for_stmt, i);

      loop->cond_code = gimple_omp_for_cond (for_stmt, i);
      loop->n2 = gimple_omp_for_final (for_stmt, i);
      gcc_assert (loop->cond_code != NE_EXPR
		  || gimple_omp_for_kind (for_stmt) == GF_OMP_FOR_KIND_CILKSIMD
		  || gimple_omp_for_kind (for_stmt) == GF_OMP_FOR_KIND_CILKFOR);
      omp_adjust_for_condition (loc, &loop->cond_code, &loop->n2);

      t = gimple_omp_for_incr (for_stmt, i);
      gcc_assert (TREE_OPERAND (t, 0) == var);
      loop->step = omp_get_for_step_from_incr (loc, t);

      if (simd
	  || (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
	      && !fd->have_ordered))
	{
	  if (fd->collapse == 1 && !fd->tiling)
	    iter_type = TREE_TYPE (loop->v);
	  else if (i == 0
		   || TYPE_PRECISION (iter_type)
		      < TYPE_PRECISION (TREE_TYPE (loop->v)))
	    iter_type
	      = build_nonstandard_integer_type
		  (TYPE_PRECISION (TREE_TYPE (loop->v)), 1);
	}
      else if (iter_type != long_long_unsigned_type_node)
	{
	  if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
	    iter_type = long_long_unsigned_type_node;
	  else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
		   && TYPE_PRECISION (TREE_TYPE (loop->v))
		      >= TYPE_PRECISION (iter_type))
	    {
	      tree n;

	      if (loop->cond_code == LT_EXPR)
		n = fold_build2_loc (loc,
				 PLUS_EXPR, TREE_TYPE (loop->v),
				 loop->n2, loop->step);
	      else
		n = loop->n1;
	      if (TREE_CODE (n) != INTEGER_CST
		  || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
		iter_type = long_long_unsigned_type_node;
	    }
	  else if (TYPE_PRECISION (TREE_TYPE (loop->v))
		   > TYPE_PRECISION (iter_type))
	    {
	      tree n1, n2;

	      if (loop->cond_code == LT_EXPR)
		{
		  n1 = loop->n1;
		  n2 = fold_build2_loc (loc,
				    PLUS_EXPR, TREE_TYPE (loop->v),
				    loop->n2, loop->step);
		}
	      else
		{
		  n1 = fold_build2_loc (loc,
				    MINUS_EXPR, TREE_TYPE (loop->v),
				    loop->n2, loop->step);
		  n2 = loop->n1;
		}
	      if (TREE_CODE (n1) != INTEGER_CST
		  || TREE_CODE (n2) != INTEGER_CST
		  || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
		  || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
		iter_type = long_long_unsigned_type_node;
	    }
	}

      if (i >= fd->collapse)
	continue;

      if (collapse_count && *collapse_count == NULL)
	{
	  t = fold_binary (loop->cond_code, boolean_type_node,
			   fold_convert (TREE_TYPE (loop->v), loop->n1),
			   fold_convert (TREE_TYPE (loop->v), loop->n2));
	  if (t && integer_zerop (t))
	    count = build_zero_cst (long_long_unsigned_type_node);
	  else if ((i == 0 || count != NULL_TREE)
		   && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
		   && TREE_CONSTANT (loop->n1)
		   && TREE_CONSTANT (loop->n2)
		   && TREE_CODE (loop->step) == INTEGER_CST)
	    {
	      tree itype = TREE_TYPE (loop->v);

	      if (POINTER_TYPE_P (itype))
		itype = signed_type_for (itype);
	      t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
	      t = fold_build2_loc (loc,
			       PLUS_EXPR, itype,
			       fold_convert_loc (loc, itype, loop->step), t);
	      t = fold_build2_loc (loc, PLUS_EXPR, itype, t,
			       fold_convert_loc (loc, itype, loop->n2));
	      t = fold_build2_loc (loc, MINUS_EXPR, itype, t,
			       fold_convert_loc (loc, itype, loop->n1));
	      if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
		t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
				 fold_build1_loc (loc, NEGATE_EXPR, itype, t),
				 fold_build1_loc (loc, NEGATE_EXPR, itype,
					      fold_convert_loc (loc, itype,
								loop->step)));
	      else
		t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
				 fold_convert_loc (loc, itype, loop->step));
	      t = fold_convert_loc (loc, long_long_unsigned_type_node, t);
	      if (count != NULL_TREE)
		count = fold_build2_loc (loc,
				     MULT_EXPR, long_long_unsigned_type_node,
				     count, t);
	      else
		count = t;
	      if (TREE_CODE (count) != INTEGER_CST)
		count = NULL_TREE;
	    }
	  else if (count && !integer_zerop (count))
	    count = NULL_TREE;
	}
    }

  if (count
      && !simd
      && (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
	  || fd->have_ordered))
    {
      if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
	iter_type = long_long_unsigned_type_node;
      else
	iter_type = long_integer_type_node;
    }
  else if (collapse_iter && *collapse_iter != NULL)
    iter_type = TREE_TYPE (*collapse_iter);
  fd->iter_type = iter_type;
  if (collapse_iter && *collapse_iter == NULL)
    *collapse_iter = create_tmp_var (iter_type, ".iter");
  if (collapse_count && *collapse_count == NULL)
    {
      if (count)
	*collapse_count = fold_convert_loc (loc, iter_type, count);
      else
	*collapse_count = create_tmp_var (iter_type, ".count");
    }

  if (fd->collapse > 1 || fd->tiling || (fd->ordered && loops))
    {
      fd->loop.v = *collapse_iter;
      fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
      fd->loop.n2 = *collapse_count;
      fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
      fd->loop.cond_code = LT_EXPR;
    }
  else if (loops)
    loops[0] = fd->loop;
}
Exemple #29
0
void
gcc_queue_and_dump_type (dump_info_p di, const_tree t)
{
  gcc_queue_and_dump_index (di, "type", TREE_TYPE (t), DUMP_NONE);
}
Exemple #30
0
/* Find memcpy, mempcpy, memmove and memset calls, perform
   checks before call and then call no_chk version of
   functions.  We do it on O2 to enable inlining of these
   functions during expand.

   Also try to find memcpy, mempcpy, memmove and memset calls
   which are known to not write pointers to memory and use
   faster function versions for them.  */
static void
chkp_optimize_string_function_calls (void)
{
  basic_block bb;

  if (dump_file && (dump_flags & TDF_DETAILS))
    fprintf (dump_file, "Searching for replaceable string function calls...\n");

  FOR_EACH_BB_FN (bb, cfun)
    {
      gimple_stmt_iterator i;

      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
        {
	  gimple stmt = gsi_stmt (i);
	  tree fndecl;

	  if (gimple_code (stmt) != GIMPLE_CALL
	      || !gimple_call_with_bounds_p (stmt))
	    continue;

	  fndecl = gimple_call_fndecl (stmt);

	  if (!fndecl || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
	    continue;

	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMCPY_CHKP
	      || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHKP
	      || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMMOVE_CHKP
	      || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHKP)
	    {
	      tree dst = gimple_call_arg (stmt, 0);
	      tree dst_bnd = gimple_call_arg (stmt, 1);
	      bool is_memset = DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHKP;
	      tree size = gimple_call_arg (stmt, is_memset ? 3 : 4);
	      tree fndecl_nochk;
	      gimple_stmt_iterator j;
	      basic_block check_bb;
	      address_t size_val;
	      int sign;
	      bool known;

	      /* We may replace call with corresponding __chkp_*_nobnd
		 call in case destination pointer base type is not
		 void or pointer.  */
	      if (POINTER_TYPE_P (TREE_TYPE (dst))
		  && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst)))
		  && !chkp_type_has_pointer (TREE_TYPE (TREE_TYPE (dst))))
		{
		  tree fndecl_nobnd
		    = chkp_get_nobnd_fndecl (DECL_FUNCTION_CODE (fndecl));

		  if (fndecl_nobnd)
		    fndecl = fndecl_nobnd;
		}

	      fndecl_nochk = chkp_get_nochk_fndecl (DECL_FUNCTION_CODE (fndecl));

	      if (fndecl_nochk)
		fndecl = fndecl_nochk;

	      if (fndecl != gimple_call_fndecl (stmt))
		{
		  if (dump_file && (dump_flags & TDF_DETAILS))
		    {
		      fprintf (dump_file, "Replacing call: ");
		      print_gimple_stmt (dump_file, stmt, 0,
					 TDF_VOPS|TDF_MEMSYMS);
		    }

		  gimple_call_set_fndecl (stmt, fndecl);

		  if (dump_file && (dump_flags & TDF_DETAILS))
		    {
		      fprintf (dump_file, "With a new call: ");
		      print_gimple_stmt (dump_file, stmt, 0,
					 TDF_VOPS|TDF_MEMSYMS);
		    }
		}

	      /* If there is no nochk version of function then
		 do nothing.  Otherwise insert checks before
		 the call.  */
	      if (!fndecl_nochk)
		continue;

	      /* If size passed to call is known and > 0
		 then we may insert checks unconditionally.  */
	      size_val.pol.create (0);
	      chkp_collect_value (size, size_val);
	      known = chkp_is_constant_addr (size_val, &sign);
	      size_val.pol.release ();

	      /* If we are not sure size is not zero then we have
		 to perform runtime check for size and perform
		 checks only when size is not zero.  */
	      if (!known)
		{
		  gimple check = gimple_build_cond (NE_EXPR,
						    size,
						    size_zero_node,
						    NULL_TREE,
						    NULL_TREE);

		  /* Split block before string function call.  */
		  gsi_prev (&i);
		  check_bb = insert_cond_bb (bb, gsi_stmt (i), check);

		  /* Set position for checks.  */
		  j = gsi_last_bb (check_bb);

		  /* The block was splitted and therefore we
		     need to set iterator to its end.  */
		  i = gsi_last_bb (bb);
		}
	      /* If size is known to be zero then no checks
		 should be performed.  */
	      else if (!sign)
		continue;
	      else
		j = i;

	      size = size_binop (MINUS_EXPR, size, size_one_node);
	      if (!is_memset)
		{
		  tree src = gimple_call_arg (stmt, 2);
		  tree src_bnd = gimple_call_arg (stmt, 3);

		  chkp_check_mem_access (src, fold_build_pointer_plus (src, size),
					 src_bnd, j, gimple_location (stmt),
					 integer_zero_node);
		}

	      chkp_check_mem_access (dst, fold_build_pointer_plus (dst, size),
				     dst_bnd, j, gimple_location (stmt),
				     integer_one_node);

	    }
	}
    }