Exemplo n.º 1
0
static enum machine_mode
type_natural_mode (tree type)
{
    enum machine_mode mode = TYPE_MODE (type);

    if (TREE_CODE (type) == VECTOR_TYPE && !VECTOR_MODE_P (mode))
    {
        HOST_WIDE_INT size = int_size_in_bytes (type);
        if ((size == 8 || size == 16)
        /* ??? Generic code allows us to create width 1 vectors.  Ignore.  */
        && TYPE_VECTOR_SUBPARTS (type) > 1)
        {
            enum machine_mode innermode = TYPE_MODE (TREE_TYPE (type));

            if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
                mode = MIN_MODE_VECTOR_FLOAT;
            else
                mode = MIN_MODE_VECTOR_INT;

            /* Get the mode which has this inner mode and number of units.  */
            for (; mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode))
                if (GET_MODE_NUNITS (mode) == TYPE_VECTOR_SUBPARTS (type)
                && GET_MODE_INNER (mode) == innermode)
                    return mode;

            gcc_unreachable ();
        }
    }
    return mode;
}
Exemplo n.º 2
0
rtx
rtx_vector_builder::find_cached_value ()
{
  if (encoded_nelts () != 1)
    return NULL_RTX;

  rtx elt = (*this)[0];

  if (GET_MODE_CLASS (m_mode) == MODE_VECTOR_BOOL)
    {
      if (elt == const1_rtx || elt == constm1_rtx)
	return CONST1_RTX (m_mode);
      else if (elt == const0_rtx)
	return CONST0_RTX (m_mode);
      else
	gcc_unreachable ();
    }

  /* We can be called before the global vector constants are set up,
     but in that case we'll just return null.  */
  scalar_mode inner_mode = GET_MODE_INNER (m_mode);
  if (elt == CONST0_RTX (inner_mode))
    return CONST0_RTX (m_mode);
  else if (elt == CONST1_RTX (inner_mode))
    return CONST1_RTX (m_mode);
  else if (elt == CONSTM1_RTX (inner_mode))
    return CONSTM1_RTX (m_mode);

  return NULL_RTX;
}
Exemplo n.º 3
0
rtx
rtx_vector_builder::apply_step (rtx base, unsigned int factor,
				const wide_int &step) const
{
  scalar_int_mode int_mode = as_a <scalar_int_mode> (GET_MODE_INNER (m_mode));
  return immed_wide_int_const (wi::add (rtx_mode_t (base, int_mode),
					factor * step),
			       int_mode);
}
static inline int
zero_extended_scalar_load_operand_1 (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
#line 747 "../.././gcc/config/i386/predicates.md"
{
  unsigned n_elts;
  op = maybe_get_pool_constant (op);
  if (!op)
    return 0;
  if (GET_CODE (op) != CONST_VECTOR)
    return 0;
  n_elts =
    (GET_MODE_SIZE (GET_MODE (op)) /
     GET_MODE_SIZE (GET_MODE_INNER (GET_MODE (op))));
  for (n_elts--; n_elts > 0; n_elts--)
    {
      rtx elt = CONST_VECTOR_ELT (op, n_elts);
      if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
	return 0;
    }
  return 1;
}
Exemplo n.º 5
0
static void
aarch64_init_simd_builtins (void)
{
  unsigned int i, fcode = AARCH64_SIMD_BUILTIN_BASE + 1;

  aarch64_init_simd_builtin_types ();

  /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
     Therefore we need to preserve the old __builtin scalar types.  It can be
     removed once all the intrinsics become strongly typed using the qualifier
     system.  */
  aarch64_init_simd_builtin_scalar_types ();
 
  for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
    {
      bool print_type_signature_p = false;
      char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
      aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
      char namebuf[60];
      tree ftype = NULL;
      tree fndecl = NULL;

      d->fcode = fcode;

      /* We must track two variables here.  op_num is
	 the operand number as in the RTL pattern.  This is
	 required to access the mode (e.g. V4SF mode) of the
	 argument, from which the base type can be derived.
	 arg_num is an index in to the qualifiers data, which
	 gives qualifiers to the type (e.g. const unsigned).
	 The reason these two variables may differ by one is the
	 void return type.  While all return types take the 0th entry
	 in the qualifiers array, there is no operand for them in the
	 RTL pattern.  */
      int op_num = insn_data[d->code].n_operands - 1;
      int arg_num = d->qualifiers[0] & qualifier_void
		      ? op_num + 1
		      : op_num;
      tree return_type = void_type_node, args = void_list_node;
      tree eltype;

      /* Build a function type directly from the insn_data for this
	 builtin.  The build_function_type () function takes care of
	 removing duplicates for us.  */
      for (; op_num >= 0; arg_num--, op_num--)
	{
	  machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
	  enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];

	  if (qualifiers & qualifier_unsigned)
	    {
	      type_signature[arg_num] = 'u';
	      print_type_signature_p = true;
	    }
	  else if (qualifiers & qualifier_poly)
	    {
	      type_signature[arg_num] = 'p';
	      print_type_signature_p = true;
	    }
	  else
	    type_signature[arg_num] = 's';

	  /* Skip an internal operand for vget_{low, high}.  */
	  if (qualifiers & qualifier_internal)
	    continue;

	  /* Some builtins have different user-facing types
	     for certain arguments, encoded in d->mode.  */
	  if (qualifiers & qualifier_map_mode)
	      op_mode = d->mode;

	  /* For pointers, we want a pointer to the basic type
	     of the vector.  */
	  if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
	    op_mode = GET_MODE_INNER (op_mode);

	  eltype = aarch64_simd_builtin_type
		     (op_mode,
		      (qualifiers & qualifier_unsigned) != 0,
		      (qualifiers & qualifier_poly) != 0);
	  gcc_assert (eltype != NULL);

	  /* Add qualifiers.  */
	  if (qualifiers & qualifier_const)
	    eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);

	  if (qualifiers & qualifier_pointer)
	      eltype = build_pointer_type (eltype);

	  /* If we have reached arg_num == 0, we are at a non-void
	     return type.  Otherwise, we are still processing
	     arguments.  */
	  if (arg_num == 0)
	    return_type = eltype;
	  else
	    args = tree_cons (NULL_TREE, eltype, args);
	}

      ftype = build_function_type (return_type, args);

      gcc_assert (ftype != NULL);

      if (print_type_signature_p)
	snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s_%s",
		  d->name, type_signature);
      else
	snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s",
		  d->name);

      fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
				     NULL, NULL_TREE);
      aarch64_builtin_decls[fcode] = fndecl;
    }
}
Exemplo n.º 6
0
static bool
propagate_rtx_1 (rtx *px, rtx old_rtx, rtx new_rtx, int flags)
{
  rtx x = *px, tem = NULL_RTX, op0, op1, op2;
  enum rtx_code code = GET_CODE (x);
  machine_mode mode = GET_MODE (x);
  machine_mode op_mode;
  bool can_appear = (flags & PR_CAN_APPEAR) != 0;
  bool valid_ops = true;

  if (!(flags & PR_HANDLE_MEM) && MEM_P (x) && !MEM_READONLY_P (x))
    {
      /* If unsafe, change MEMs to CLOBBERs or SCRATCHes (to preserve whether
	 they have side effects or not).  */
      *px = (side_effects_p (x)
	     ? gen_rtx_CLOBBER (GET_MODE (x), const0_rtx)
	     : gen_rtx_SCRATCH (GET_MODE (x)));
      return false;
    }

  /* If X is OLD_RTX, return NEW_RTX.  But not if replacing only within an
     address, and we are *not* inside one.  */
  if (x == old_rtx)
    {
      *px = new_rtx;
      return can_appear;
    }

  /* If this is an expression, try recursive substitution.  */
  switch (GET_RTX_CLASS (code))
    {
    case RTX_UNARY:
      op0 = XEXP (x, 0);
      op_mode = GET_MODE (op0);
      valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
      if (op0 == XEXP (x, 0))
	return true;
      tem = simplify_gen_unary (code, mode, op0, op_mode);
      break;

    case RTX_BIN_ARITH:
    case RTX_COMM_ARITH:
      op0 = XEXP (x, 0);
      op1 = XEXP (x, 1);
      valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
      valid_ops &= propagate_rtx_1 (&op1, old_rtx, new_rtx, flags);
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
	return true;
      tem = simplify_gen_binary (code, mode, op0, op1);
      break;

    case RTX_COMPARE:
    case RTX_COMM_COMPARE:
      op0 = XEXP (x, 0);
      op1 = XEXP (x, 1);
      op_mode = GET_MODE (op0) != VOIDmode ? GET_MODE (op0) : GET_MODE (op1);
      valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
      valid_ops &= propagate_rtx_1 (&op1, old_rtx, new_rtx, flags);
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
	return true;
      tem = simplify_gen_relational (code, mode, op_mode, op0, op1);
      break;

    case RTX_TERNARY:
    case RTX_BITFIELD_OPS:
      op0 = XEXP (x, 0);
      op1 = XEXP (x, 1);
      op2 = XEXP (x, 2);
      op_mode = GET_MODE (op0);
      valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
      valid_ops &= propagate_rtx_1 (&op1, old_rtx, new_rtx, flags);
      valid_ops &= propagate_rtx_1 (&op2, old_rtx, new_rtx, flags);
      if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1) && op2 == XEXP (x, 2))
	return true;
      if (op_mode == VOIDmode)
	op_mode = GET_MODE (op0);
      tem = simplify_gen_ternary (code, mode, op_mode, op0, op1, op2);
      break;

    case RTX_EXTRA:
      /* The only case we try to handle is a SUBREG.  */
      if (code == SUBREG)
	{
          op0 = XEXP (x, 0);
	  valid_ops &= propagate_rtx_1 (&op0, old_rtx, new_rtx, flags);
          if (op0 == XEXP (x, 0))
	    return true;
	  tem = simplify_gen_subreg (mode, op0, GET_MODE (SUBREG_REG (x)),
				     SUBREG_BYTE (x));
	}
      break;

    case RTX_OBJ:
      if (code == MEM && x != new_rtx)
	{
	  rtx new_op0;
	  op0 = XEXP (x, 0);

	  /* There are some addresses that we cannot work on.  */
	  if (!can_simplify_addr (op0))
	    return true;

	  op0 = new_op0 = targetm.delegitimize_address (op0);
	  valid_ops &= propagate_rtx_1 (&new_op0, old_rtx, new_rtx,
					flags | PR_CAN_APPEAR);

	  /* Dismiss transformation that we do not want to carry on.  */
	  if (!valid_ops
	      || new_op0 == op0
	      || !(GET_MODE (new_op0) == GET_MODE (op0)
		   || GET_MODE (new_op0) == VOIDmode))
	    return true;

	  canonicalize_address (new_op0);

	  /* Copy propagations are always ok.  Otherwise check the costs.  */
	  if (!(REG_P (old_rtx) && REG_P (new_rtx))
	      && !should_replace_address (op0, new_op0, GET_MODE (x),
					  MEM_ADDR_SPACE (x),
	      			 	  flags & PR_OPTIMIZE_FOR_SPEED))
	    return true;

	  tem = replace_equiv_address_nv (x, new_op0);
	}

      else if (code == LO_SUM)
	{
          op0 = XEXP (x, 0);
          op1 = XEXP (x, 1);

	  /* The only simplification we do attempts to remove references to op0
	     or make it constant -- in both cases, op0's invalidity will not
	     make the result invalid.  */
	  propagate_rtx_1 (&op0, old_rtx, new_rtx, flags | PR_CAN_APPEAR);
	  valid_ops &= propagate_rtx_1 (&op1, old_rtx, new_rtx, flags);
          if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
	    return true;

	  /* (lo_sum (high x) x) -> x  */
	  if (GET_CODE (op0) == HIGH && rtx_equal_p (XEXP (op0, 0), op1))
	    tem = op1;
	  else
	    tem = gen_rtx_LO_SUM (mode, op0, op1);

	  /* OP1 is likely not a legitimate address, otherwise there would have
	     been no LO_SUM.  We want it to disappear if it is invalid, return
	     false in that case.  */
	  return memory_address_p (mode, tem);
	}

      else if (code == REG)
	{
	  if (rtx_equal_p (x, old_rtx))
	    {
              *px = new_rtx;
              return can_appear;
	    }
	}
      break;

    default:
      break;
    }

  /* No change, no trouble.  */
  if (tem == NULL_RTX)
    return true;

  *px = tem;

  /* Allow replacements that simplify operations on a vector or complex
     value to a component.  The most prominent case is
     (subreg ([vec_]concat ...)).   */
  if (REG_P (tem) && !HARD_REGISTER_P (tem)
      && (VECTOR_MODE_P (GET_MODE (new_rtx))
	  || COMPLEX_MODE_P (GET_MODE (new_rtx)))
      && GET_MODE (tem) == GET_MODE_INNER (GET_MODE (new_rtx)))
    return true;

  /* The replacement we made so far is valid, if all of the recursive
     replacements were valid, or we could simplify everything to
     a constant.  */
  return valid_ops || can_appear || CONSTANT_P (tem);
}
Exemplo n.º 7
0
static tree
fold_const_call_1 (built_in_function fn, tree type, tree arg0, tree arg1)
{
  machine_mode mode = TYPE_MODE (type);
  machine_mode arg0_mode = TYPE_MODE (TREE_TYPE (arg0));
  machine_mode arg1_mode = TYPE_MODE (TREE_TYPE (arg1));

  if (arg0_mode == arg1_mode
      && real_cst_p (arg0)
      && real_cst_p (arg1))
    {
      gcc_checking_assert (SCALAR_FLOAT_MODE_P (arg0_mode));
      if (mode == arg0_mode)
	{
	  /* real, real -> real.  */
	  REAL_VALUE_TYPE result;
	  if (fold_const_call_sss (&result, fn, TREE_REAL_CST_PTR (arg0),
				   TREE_REAL_CST_PTR (arg1),
				   REAL_MODE_FORMAT (mode)))
	    return build_real (type, result);
	}
      return NULL_TREE;
    }

  if (real_cst_p (arg0)
      && integer_cst_p (arg1))
    {
      gcc_checking_assert (SCALAR_FLOAT_MODE_P (arg0_mode));
      if (mode == arg0_mode)
	{
	  /* real, int -> real.  */
	  REAL_VALUE_TYPE result;
	  if (fold_const_call_sss (&result, fn, TREE_REAL_CST_PTR (arg0),
				   arg1, REAL_MODE_FORMAT (mode)))
	    return build_real (type, result);
	}
      return NULL_TREE;
    }

  if (integer_cst_p (arg0)
      && real_cst_p (arg1))
    {
      gcc_checking_assert (SCALAR_FLOAT_MODE_P (arg1_mode));
      if (mode == arg1_mode)
	{
	  /* int, real -> real.  */
	  REAL_VALUE_TYPE result;
	  if (fold_const_call_sss (&result, fn, arg0,
				   TREE_REAL_CST_PTR (arg1),
				   REAL_MODE_FORMAT (mode)))
	    return build_real (type, result);
	}
      return NULL_TREE;
    }

  if (arg0_mode == arg1_mode
      && complex_cst_p (arg0)
      && complex_cst_p (arg1))
    {
      gcc_checking_assert (COMPLEX_MODE_P (arg0_mode));
      machine_mode inner_mode = GET_MODE_INNER (arg0_mode);
      tree arg0r = TREE_REALPART (arg0);
      tree arg0i = TREE_IMAGPART (arg0);
      tree arg1r = TREE_REALPART (arg1);
      tree arg1i = TREE_IMAGPART (arg1);
      if (mode == arg0_mode
	  && real_cst_p (arg0r)
	  && real_cst_p (arg0i)
	  && real_cst_p (arg1r)
	  && real_cst_p (arg1i))
	{
	  /* complex real, complex real -> complex real.  */
	  REAL_VALUE_TYPE result_real, result_imag;
	  if (fold_const_call_ccc (&result_real, &result_imag, fn,
				   TREE_REAL_CST_PTR (arg0r),
				   TREE_REAL_CST_PTR (arg0i),
				   TREE_REAL_CST_PTR (arg1r),
				   TREE_REAL_CST_PTR (arg1i),
				   REAL_MODE_FORMAT (inner_mode)))
	    return build_complex (type,
				  build_real (TREE_TYPE (type), result_real),
				  build_real (TREE_TYPE (type), result_imag));
	}
      return NULL_TREE;
    }

  return NULL_TREE;
}
Exemplo n.º 8
0
static tree
fold_const_call_1 (built_in_function fn, tree type, tree arg)
{
  machine_mode mode = TYPE_MODE (type);
  machine_mode arg_mode = TYPE_MODE (TREE_TYPE (arg));

  if (integer_cst_p (arg))
    {
      if (SCALAR_INT_MODE_P (mode))
	{
	  wide_int result;
	  if (fold_const_call_ss (&result, fn, arg, TYPE_PRECISION (type),
				  TREE_TYPE (arg)))
	    return wide_int_to_tree (type, result);
	}
      return NULL_TREE;
    }

  if (real_cst_p (arg))
    {
      gcc_checking_assert (SCALAR_FLOAT_MODE_P (arg_mode));
      if (mode == arg_mode)
	{
	  /* real -> real.  */
	  REAL_VALUE_TYPE result;
	  if (fold_const_call_ss (&result, fn, TREE_REAL_CST_PTR (arg),
				  REAL_MODE_FORMAT (mode)))
	    return build_real (type, result);
	}
      else if (COMPLEX_MODE_P (mode)
	       && GET_MODE_INNER (mode) == arg_mode)
	{
	  /* real -> complex real.  */
	  REAL_VALUE_TYPE result_real, result_imag;
	  if (fold_const_call_cs (&result_real, &result_imag, fn,
				  TREE_REAL_CST_PTR (arg),
				  REAL_MODE_FORMAT (arg_mode)))
	    return build_complex (type,
				  build_real (TREE_TYPE (type), result_real),
				  build_real (TREE_TYPE (type), result_imag));
	}
      else if (INTEGRAL_TYPE_P (type))
	{
	  /* real -> int.  */
	  wide_int result;
	  if (fold_const_call_ss (&result, fn,
				  TREE_REAL_CST_PTR (arg),
				  TYPE_PRECISION (type),
				  REAL_MODE_FORMAT (arg_mode)))
	    return wide_int_to_tree (type, result);
	}
      return NULL_TREE;
    }

  if (complex_cst_p (arg))
    {
      gcc_checking_assert (COMPLEX_MODE_P (arg_mode));
      machine_mode inner_mode = GET_MODE_INNER (arg_mode);
      tree argr = TREE_REALPART (arg);
      tree argi = TREE_IMAGPART (arg);
      if (mode == arg_mode
	  && real_cst_p (argr)
	  && real_cst_p (argi))
	{
	  /* complex real -> complex real.  */
	  REAL_VALUE_TYPE result_real, result_imag;
	  if (fold_const_call_cc (&result_real, &result_imag, fn,
				  TREE_REAL_CST_PTR (argr),
				  TREE_REAL_CST_PTR (argi),
				  REAL_MODE_FORMAT (inner_mode)))
	    return build_complex (type,
				  build_real (TREE_TYPE (type), result_real),
				  build_real (TREE_TYPE (type), result_imag));
	}
      if (mode == inner_mode
	  && real_cst_p (argr)
	  && real_cst_p (argi))
	{
	  /* complex real -> real.  */
	  REAL_VALUE_TYPE result;
	  if (fold_const_call_sc (&result, fn,
				  TREE_REAL_CST_PTR (argr),
				  TREE_REAL_CST_PTR (argi),
				  REAL_MODE_FORMAT (inner_mode)))
	    return build_real (type, result);
	}
      return NULL_TREE;
    }

  return NULL_TREE;
}