예제 #1
0
/* Function to expand builtin function for
   '[(unspec_volatile [(reg) (imm)])]' pattern.  */
static rtx
nds32_expand_builtin_null_ftype_reg_imm (enum insn_code icode,
					 tree exp, rtx target)
{
  /* Mapping:
       ops[0] <--> value0 <--> arg0
       ops[1] <--> value1 <--> arg1 */
  struct expand_operand ops[2];
  tree arg0, arg1;
  rtx value0, value1;

  /* Grab the incoming arguments and extract its rtx.  */
  arg0 = CALL_EXPR_ARG (exp, 0);
  arg1 = CALL_EXPR_ARG (exp, 1);
  value0 = expand_normal (arg0);
  value1 = expand_normal (arg1);

  /* Create operands.  */
  create_input_operand (&ops[0], value0, TYPE_MODE (TREE_TYPE (arg0)));
  create_input_operand (&ops[1], value1, TYPE_MODE (TREE_TYPE (arg1)));

  /* Emit new instruction.  */
  if (!maybe_expand_insn (icode, 2, ops))
    error ("invalid argument to built-in function");

  return target;
}
예제 #2
0
rtx
aarch64_crc32_expand_builtin (int fcode, tree exp, rtx target)
{
  rtx pat;
  aarch64_crc_builtin_datum *d
    = &aarch64_crc_builtin_data[fcode - (AARCH64_CRC32_BUILTIN_BASE + 1)];
  enum insn_code icode = d->icode;
  tree arg0 = CALL_EXPR_ARG (exp, 0);
  tree arg1 = CALL_EXPR_ARG (exp, 1);
  rtx op0 = expand_normal (arg0);
  rtx op1 = expand_normal (arg1);
  machine_mode tmode = insn_data[icode].operand[0].mode;
  machine_mode mode0 = insn_data[icode].operand[1].mode;
  machine_mode mode1 = insn_data[icode].operand[2].mode;

  if (! target
      || GET_MODE (target) != tmode
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
    target = gen_reg_rtx (tmode);

  gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
	      && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));

  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
    op0 = copy_to_mode_reg (mode0, op0);
  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
    op1 = copy_to_mode_reg (mode1, op1);

  pat = GEN_FCN (icode) (target, op0, op1);
  if (!pat)
    return NULL_RTX;

  emit_insn (pat);
  return target;
}
예제 #3
0
파일: builtins.c 프로젝트: Nodplus/gcc
static tree
min_builtin (tree method_return_type, tree orig_call)
{
  /* MIN_EXPR does not handle -0.0 in the Java style.  */
  if (TREE_CODE (method_return_type) == REAL_TYPE)
    return NULL_TREE;
  return fold_build2 (MIN_EXPR, method_return_type,
		      CALL_EXPR_ARG (orig_call, 0),
		      CALL_EXPR_ARG (orig_call, 1));
}
예제 #4
0
HOST_WIDE_INT
extract_sec_implicit_index_arg (location_t location, tree fn)
{
  tree fn_arg;
  HOST_WIDE_INT return_int = 0;

  if (TREE_CODE (fn) == CALL_EXPR)
    {
      fn_arg = CALL_EXPR_ARG (fn, 0);
      if (TREE_CODE (fn_arg) == INTEGER_CST)
	return_int = int_cst_value (fn_arg);
      else
	{
	  /* If the location is unknown, and if fn has a location, then use that
	     information so that the user has a better idea where the error
	     could be.  */
	  if (location == UNKNOWN_LOCATION && EXPR_HAS_LOCATION (fn))
	    location = EXPR_LOCATION (fn);
	  error_at (location, "__sec_implicit_index parameter must be an " 
		    "integer constant expression");
	  return -1;
	}
    }
  return return_int;
}
예제 #5
0
파일: c-ubsan.c 프로젝트: WojciechMigda/gcc
void
ubsan_maybe_instrument_member_call (tree stmt, bool is_ctor)
{
  if (call_expr_nargs (stmt) == 0)
    return;
  tree op = CALL_EXPR_ARG (stmt, 0);
  if (op == error_mark_node
      || !POINTER_TYPE_P (TREE_TYPE (op)))
    return;
  op = ubsan_maybe_instrument_reference_or_call (EXPR_LOCATION (stmt), op,
						 TREE_TYPE (op),
						 is_ctor ? UBSAN_CTOR_CALL
						 : UBSAN_MEMBER_CALL);
  if (op)
    CALL_EXPR_ARG (stmt, 0) = op;
}
예제 #6
0
/* Expand an AArch64 AdvSIMD builtin(intrinsic).  */
rtx
aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
{
  aarch64_simd_builtin_datum *d =
		&aarch64_simd_builtin_data[fcode - (AARCH64_SIMD_BUILTIN_BASE + 1)];
  enum insn_code icode = d->code;
  builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS];
  int num_args = insn_data[d->code].n_operands;
  int is_void = 0;
  int k;

  is_void = !!(d->qualifiers[0] & qualifier_void);

  num_args += is_void;

  for (k = 1; k < num_args; k++)
    {
      /* We have four arrays of data, each indexed in a different fashion.
	 qualifiers - element 0 always describes the function return type.
	 operands - element 0 is either the operand for return value (if
	   the function has a non-void return type) or the operand for the
	   first argument.
	 expr_args - element 0 always holds the first argument.
	 args - element 0 is always used for the return type.  */
      int qualifiers_k = k;
      int operands_k = k - is_void;
      int expr_args_k = k - 1;

      if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
	args[k] = SIMD_ARG_LANE_INDEX;
      else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
	args[k] = SIMD_ARG_CONSTANT;
      else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
	{
	  rtx arg
	    = expand_normal (CALL_EXPR_ARG (exp,
					    (expr_args_k)));
	  /* Handle constants only if the predicate allows it.  */
	  bool op_const_int_p =
	    (CONST_INT_P (arg)
	     && (*insn_data[icode].operand[operands_k].predicate)
		(arg, insn_data[icode].operand[operands_k].mode));
	  args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
	}
      else
	args[k] = SIMD_ARG_COPY_TO_REG;

    }
  args[k] = SIMD_ARG_STOP;

  /* The interface to aarch64_simd_expand_args expects a 0 if
     the function is void, and a 1 if it is not.  */
  return aarch64_simd_expand_args
	  (target, icode, !is_void, exp, &args[1]);
}
예제 #7
0
파일: builtins.c 프로젝트: Nodplus/gcc
static tree
java_build_function_call_expr (tree fn, tree orig_call)
{
  int nargs = call_expr_nargs (orig_call);
  switch (nargs)
    {
      /* Although we could handle the 0-3 argument cases using the general
	 logic in the default case, splitting them out permits folding to
	 be performed without constructing a temporary CALL_EXPR.  */
    case 0:
      return build_call_expr (fn, 0);
    case 1:
      return build_call_expr (fn, 1, CALL_EXPR_ARG (orig_call, 0));
    case 2:
      return build_call_expr (fn, 2,
			      CALL_EXPR_ARG (orig_call, 0),
			      CALL_EXPR_ARG (orig_call, 1));
    case 3:
      return build_call_expr (fn, 3,
			      CALL_EXPR_ARG (orig_call, 0),
			      CALL_EXPR_ARG (orig_call, 1),
			      CALL_EXPR_ARG (orig_call, 2));
    default:
      {
	tree fntype = TREE_TYPE (fn);
	fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fn);
	return fold (build_call_array (TREE_TYPE (fntype),
				       fn, nargs, CALL_EXPR_ARGP (orig_call)));
      }
    }
}
예제 #8
0
/* Expand an expression EXP that calls a built-in function,
   with result going to TARGET if that's convenient.  */
rtx
aarch64_expand_builtin (tree exp,
		     rtx target,
		     rtx subtarget ATTRIBUTE_UNUSED,
		     machine_mode mode ATTRIBUTE_UNUSED,
		     int ignore ATTRIBUTE_UNUSED)
{
  tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
  int fcode = DECL_FUNCTION_CODE (fndecl);
  int icode;
  rtx pat, op0;
  tree arg0;

  switch (fcode)
    {
    case AARCH64_BUILTIN_GET_FPCR:
    case AARCH64_BUILTIN_SET_FPCR:
    case AARCH64_BUILTIN_GET_FPSR:
    case AARCH64_BUILTIN_SET_FPSR:
      if ((fcode == AARCH64_BUILTIN_GET_FPCR)
	  || (fcode == AARCH64_BUILTIN_GET_FPSR))
	{
	  icode = (fcode == AARCH64_BUILTIN_GET_FPSR) ?
	    CODE_FOR_get_fpsr : CODE_FOR_get_fpcr;
	  target = gen_reg_rtx (SImode);
	  pat = GEN_FCN (icode) (target);
	}
      else
	{
	  target = NULL_RTX;
	  icode = (fcode == AARCH64_BUILTIN_SET_FPSR) ?
	    CODE_FOR_set_fpsr : CODE_FOR_set_fpcr;
	  arg0 = CALL_EXPR_ARG (exp, 0);
	  op0 = expand_normal (arg0);
	  pat = GEN_FCN (icode) (op0);
	}
      emit_insn (pat);
      return target;
    }

  if (fcode >= AARCH64_SIMD_BUILTIN_BASE && fcode <= AARCH64_SIMD_BUILTIN_MAX)
    return aarch64_simd_expand_builtin (fcode, exp, target);
  else if (fcode >= AARCH64_CRC32_BUILTIN_BASE && fcode <= AARCH64_CRC32_BUILTIN_MAX)
    return aarch64_crc32_expand_builtin (fcode, exp, target);

  gcc_unreachable ();
}
예제 #9
0
static tree
get_frame_arg (tree call)
{
  tree arg, argtype;

  gcc_assert (call_expr_nargs (call) >= 1);
    
  arg = CALL_EXPR_ARG (call, 0);
  argtype = TREE_TYPE (arg);
  gcc_assert (TREE_CODE (argtype) == POINTER_TYPE);

  argtype = TREE_TYPE (argtype);
  
  /* If it is passed in as an address, then just use the value directly 
     since the function is inlined.  */
  if (TREE_CODE (arg) == ADDR_EXPR)
    return TREE_OPERAND (arg, 0);
  return arg;
}
예제 #10
0
void parseCallExprNode(CallExprNode *node)
{
    fillType(node);
    fillExprLocation(node);
    Log::dump(node);

// not recommended
//    fillExprOperands(node);

    if (CALL_EXPR_FN(node->gccNode) != NULL_TREE)
    {
        node->function = createParseNode(
            node,
            CALL_EXPR_FN(node->gccNode),
            "function");
    }
    else
    {
        node->functionName = internal_fn_name(CALL_EXPR_IFN (node->gccNode));
        Log::dump(node, "- function: %s", node->functionName.c_str());
    }

//    if (node->parseChilds <= 0)
//        return;

    const int argsCount = call_expr_nargs(node->gccNode);
    for (int f = 0; f < argsCount; f ++)
    {
        node->args.push_back(createParseNode(
            node,
            CALL_EXPR_ARG(node->gccNode, f),
            "arg"));
    }

    node->chain = createParseNode(
        node,
        CALL_EXPR_STATIC_CHAIN(node->gccNode),
        "chain");

    if (node->functionName == "LOAD_LANES")
        return;
}
예제 #11
0
void
cp_ubsan_maybe_instrument_member_call (tree stmt)
{
  if (call_expr_nargs (stmt) == 0)
    return;
  tree *opp = &CALL_EXPR_ARG (stmt, 0);
  tree op = *opp;
  if (op == error_mark_node
      || !POINTER_TYPE_P (TREE_TYPE (op)))
    return;
  while (TREE_CODE (op) == COMPOUND_EXPR)
    {
      opp = &TREE_OPERAND (op, 1);
      op = *opp;
    }
  op = cp_ubsan_maybe_instrument_vptr (EXPR_LOCATION (stmt), op,
				       TREE_TYPE (TREE_TYPE (op)),
				       true, UBSAN_MEMBER_CALL);
  if (op)
    *opp = op;
}
예제 #12
0
파일: builtins.c 프로젝트: Nodplus/gcc
static tree
abs_builtin (tree method_return_type, tree orig_call)
{
  return fold_build1 (ABS_EXPR, method_return_type,
		      CALL_EXPR_ARG (orig_call, 0));
}
예제 #13
0
static rtx
aarch64_simd_expand_args (rtx target, int icode, int have_retval,
			  tree exp, builtin_simd_arg *args)
{
  rtx pat;
  rtx op[SIMD_MAX_BUILTIN_ARGS + 1]; /* First element for result operand.  */
  int opc = 0;

  if (have_retval)
    {
      machine_mode tmode = insn_data[icode].operand[0].mode;
      if (!target
	  || GET_MODE (target) != tmode
	  || !(*insn_data[icode].operand[0].predicate) (target, tmode))
	target = gen_reg_rtx (tmode);
      op[opc++] = target;
    }

  for (;;)
    {
      builtin_simd_arg thisarg = args[opc - have_retval];

      if (thisarg == SIMD_ARG_STOP)
	break;
      else
	{
	  tree arg = CALL_EXPR_ARG (exp, opc - have_retval);
	  enum machine_mode mode = insn_data[icode].operand[opc].mode;
	  op[opc] = expand_normal (arg);

	  switch (thisarg)
	    {
	    case SIMD_ARG_COPY_TO_REG:
	      if (POINTER_TYPE_P (TREE_TYPE (arg)))
		op[opc] = convert_memory_address (Pmode, op[opc]);
	      /*gcc_assert (GET_MODE (op[opc]) == mode); */
	      if (!(*insn_data[icode].operand[opc].predicate)
		  (op[opc], mode))
		op[opc] = copy_to_mode_reg (mode, op[opc]);
	      break;

	    case SIMD_ARG_LANE_INDEX:
	      /* Must be a previous operand into which this is an index.  */
	      gcc_assert (opc > 0);
	      if (CONST_INT_P (op[opc]))
		{
		  machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
		  aarch64_simd_lane_bounds (op[opc],
					    0, GET_MODE_NUNITS (vmode), exp);
		  /* Keep to GCC-vector-extension lane indices in the RTL.  */
		  op[opc] = GEN_INT (ENDIAN_LANE_N (vmode, INTVAL (op[opc])));
		}
	      /* Fall through - if the lane index isn't a constant then
		 the next case will error.  */
	    case SIMD_ARG_CONSTANT:
	      if (!(*insn_data[icode].operand[opc].predicate)
		  (op[opc], mode))
	      {
		error ("%Kargument %d must be a constant immediate",
		       exp, opc + 1 - have_retval);
		return const0_rtx;
	      }
	      break;

	    case SIMD_ARG_STOP:
	      gcc_unreachable ();
	    }

	  opc++;
	}
    }

  switch (opc)
    {
    case 1:
      pat = GEN_FCN (icode) (op[0]);
      break;

    case 2:
      pat = GEN_FCN (icode) (op[0], op[1]);
      break;

    case 3:
      pat = GEN_FCN (icode) (op[0], op[1], op[2]);
      break;

    case 4:
      pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
      break;

    case 5:
      pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
      break;

    case 6:
      pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
      break;

    default:
      gcc_unreachable ();
    }

  if (!pat)
    return NULL_RTX;

  emit_insn (pat);

  return target;
}
예제 #14
0
/* Expand an AArch64 AdvSIMD builtin(intrinsic).  */
rtx
aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
{
  if (fcode == AARCH64_SIMD_BUILTIN_LANE_CHECK)
    {
      rtx totalsize = expand_normal (CALL_EXPR_ARG (exp, 0));
      rtx elementsize = expand_normal (CALL_EXPR_ARG (exp, 1));
      if (CONST_INT_P (totalsize) && CONST_INT_P (elementsize)
	  && UINTVAL (elementsize) != 0
	  && UINTVAL (totalsize) != 0)
	{
	  rtx lane_idx = expand_normal (CALL_EXPR_ARG (exp, 2));
          if (CONST_INT_P (lane_idx))
	    aarch64_simd_lane_bounds (lane_idx, 0,
				      UINTVAL (totalsize)
				       / UINTVAL (elementsize),
				      exp);
          else
	    error ("%Klane index must be a constant immediate", exp);
	}
      else
	error ("%Ktotal size and element size must be a non-zero constant immediate", exp);
      /* Don't generate any RTL.  */
      return const0_rtx;
    }
  aarch64_simd_builtin_datum *d =
		&aarch64_simd_builtin_data[fcode - AARCH64_SIMD_PATTERN_START];
  enum insn_code icode = d->code;
  builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS + 1];
  int num_args = insn_data[d->code].n_operands;
  int is_void = 0;
  int k;

  is_void = !!(d->qualifiers[0] & qualifier_void);

  num_args += is_void;

  for (k = 1; k < num_args; k++)
    {
      /* We have four arrays of data, each indexed in a different fashion.
	 qualifiers - element 0 always describes the function return type.
	 operands - element 0 is either the operand for return value (if
	   the function has a non-void return type) or the operand for the
	   first argument.
	 expr_args - element 0 always holds the first argument.
	 args - element 0 is always used for the return type.  */
      int qualifiers_k = k;
      int operands_k = k - is_void;
      int expr_args_k = k - 1;

      if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
	args[k] = SIMD_ARG_LANE_INDEX;
      else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
	args[k] = SIMD_ARG_CONSTANT;
      else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
	{
	  rtx arg
	    = expand_normal (CALL_EXPR_ARG (exp,
					    (expr_args_k)));
	  /* Handle constants only if the predicate allows it.  */
	  bool op_const_int_p =
	    (CONST_INT_P (arg)
	     && (*insn_data[icode].operand[operands_k].predicate)
		(arg, insn_data[icode].operand[operands_k].mode));
	  args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
	}
      else
	args[k] = SIMD_ARG_COPY_TO_REG;

    }
  args[k] = SIMD_ARG_STOP;

  /* The interface to aarch64_simd_expand_args expects a 0 if
     the function is void, and a 1 if it is not.  */
  return aarch64_simd_expand_args
	  (target, icode, !is_void, exp, &args[1]);
}
예제 #15
0
static rtx
aarch64_simd_expand_args (rtx target, int icode, int have_retval,
			  tree exp, builtin_simd_arg *args)
{
  rtx pat;
  tree arg[SIMD_MAX_BUILTIN_ARGS];
  rtx op[SIMD_MAX_BUILTIN_ARGS];
  machine_mode tmode = insn_data[icode].operand[0].mode;
  machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
  int argc = 0;

  if (have_retval
      && (!target
	  || GET_MODE (target) != tmode
	  || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
    target = gen_reg_rtx (tmode);

  for (;;)
    {
      builtin_simd_arg thisarg = args[argc];

      if (thisarg == SIMD_ARG_STOP)
	break;
      else
	{
	  arg[argc] = CALL_EXPR_ARG (exp, argc);
	  op[argc] = expand_normal (arg[argc]);
	  mode[argc] = insn_data[icode].operand[argc + have_retval].mode;

	  switch (thisarg)
	    {
	    case SIMD_ARG_COPY_TO_REG:
	      if (POINTER_TYPE_P (TREE_TYPE (arg[argc])))
		op[argc] = convert_memory_address (Pmode, op[argc]);
	      /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
	      if (!(*insn_data[icode].operand[argc + have_retval].predicate)
		  (op[argc], mode[argc]))
		op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
	      break;

	    case SIMD_ARG_LANE_INDEX:
	      /* Must be a previous operand into which this is an index.  */
	      gcc_assert (argc > 0);
	      if (CONST_INT_P (op[argc]))
		{
		  enum machine_mode vmode = mode[argc - 1];
		  aarch64_simd_lane_bounds (op[argc],
					    0, GET_MODE_NUNITS (vmode));
		  /* Keep to GCC-vector-extension lane indices in the RTL.  */
		  op[argc] = GEN_INT (ENDIAN_LANE_N (vmode, INTVAL (op[argc])));
		}
	      /* Fall through - if the lane index isn't a constant then
		 the next case will error.  */
	    case SIMD_ARG_CONSTANT:
	      if (!(*insn_data[icode].operand[argc + have_retval].predicate)
		  (op[argc], mode[argc]))
	      {
		error_at (EXPR_LOCATION (exp), "incompatible type for argument %d, "
		       "expected %<const int%>", argc + 1);
		return const0_rtx;
	      }
	      break;

	    case SIMD_ARG_STOP:
	      gcc_unreachable ();
	    }

	  argc++;
	}
    }

  if (have_retval)
    switch (argc)
      {
      case 1:
	pat = GEN_FCN (icode) (target, op[0]);
	break;

      case 2:
	pat = GEN_FCN (icode) (target, op[0], op[1]);
	break;

      case 3:
	pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
	break;

      case 4:
	pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
	break;

      case 5:
	pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
	break;

      default:
	gcc_unreachable ();
      }
  else
    switch (argc)
      {
      case 1:
	pat = GEN_FCN (icode) (op[0]);
	break;

      case 2:
	pat = GEN_FCN (icode) (op[0], op[1]);
	break;

      case 3:
	pat = GEN_FCN (icode) (op[0], op[1], op[2]);
	break;

      case 4:
	pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
	break;

      case 5:
	pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
	break;

      default:
	gcc_unreachable ();
      }

  if (!pat)
    return NULL_RTX;

  emit_insn (pat);

  return target;
}
예제 #16
0
파일: builtins.c 프로젝트: Nodplus/gcc
static tree
convert_real (tree method_return_type, tree orig_call)
{
  return build1 (VIEW_CONVERT_EXPR, method_return_type,
		 CALL_EXPR_ARG (orig_call, 0));
}
예제 #17
0
static tree
fix_builtin_array_notation_fn (tree an_builtin_fn, tree *new_var)
{
  tree new_var_type = NULL_TREE, func_parm, new_expr, new_yes_expr, new_no_expr;
  tree array_ind_value = NULL_TREE, new_no_ind, new_yes_ind, new_no_list;
  tree new_yes_list, new_cond_expr, new_var_init = NULL_TREE;
  tree new_exp_init = NULL_TREE;
  vec<tree, va_gc> *array_list = NULL, *array_operand = NULL;
  size_t list_size = 0, rank = 0, ii = 0;
  tree loop_init, array_op0;
  tree identity_value = NULL_TREE, call_fn = NULL_TREE, new_call_expr, body;
  location_t location = UNKNOWN_LOCATION;
  tree loop_with_init = alloc_stmt_list ();
  vec<vec<an_parts> > an_info = vNULL;
  vec<an_loop_parts> an_loop_info = vNULL;
  enum built_in_function an_type =
    is_cilkplus_reduce_builtin (CALL_EXPR_FN (an_builtin_fn));
  if (an_type == BUILT_IN_NONE)
    return NULL_TREE;

  /* Builtin call should contain at least one argument.  */
  if (call_expr_nargs (an_builtin_fn) == 0)
    {
      error_at (EXPR_LOCATION (an_builtin_fn), "Invalid builtin arguments");
      return error_mark_node;
    }

  if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE
      || an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING)
    {
      call_fn = CALL_EXPR_ARG (an_builtin_fn, 2);
      if (TREE_CODE (call_fn) == ADDR_EXPR)
	call_fn = TREE_OPERAND (call_fn, 0);
      identity_value = CALL_EXPR_ARG (an_builtin_fn, 0);
      func_parm = CALL_EXPR_ARG (an_builtin_fn, 1);
    }
  else
    func_parm = CALL_EXPR_ARG (an_builtin_fn, 0);
  
  /* Fully fold any EXCESSIVE_PRECISION EXPR that can occur in the function
     parameter.  */
  func_parm = c_fully_fold (func_parm, false, NULL);
  if (func_parm == error_mark_node)
    return error_mark_node;
  
  location = EXPR_LOCATION (an_builtin_fn);
  
  if (!find_rank (location, an_builtin_fn, an_builtin_fn, true, &rank))
    return error_mark_node;
 
  if (rank == 0)
    {
      error_at (location, "Invalid builtin arguments");
      return error_mark_node;
    }
  else if (rank > 1 
	   && (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND
	       || an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND))
    {
      error_at (location, "__sec_reduce_min_ind or __sec_reduce_max_ind cannot"
		" have arrays with dimension greater than 1");
      return error_mark_node;
    }
  
  extract_array_notation_exprs (func_parm, true, &array_list);
  list_size = vec_safe_length (array_list);
  switch (an_type)
    {
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ADD:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUL:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN:
      new_var_type = TREE_TYPE ((*array_list)[0]);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_NONZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_NONZERO:
      new_var_type = integer_type_node;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND:
      new_var_type = integer_type_node;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE:
      if (call_fn && identity_value) 
	new_var_type = TREE_TYPE ((*array_list)[0]);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING:
      new_var_type = NULL_TREE;
      break;
    default:
      gcc_unreachable (); 
    }

  an_loop_info.safe_grow_cleared (rank);
  cilkplus_extract_an_triplets (array_list, list_size, rank, &an_info);
  loop_init = alloc_stmt_list ();

  for (ii = 0; ii < rank; ii++)
    {
      an_loop_info[ii].var = create_tmp_var (integer_type_node);
      an_loop_info[ii].ind_init =
	build_modify_expr (location, an_loop_info[ii].var,
			   TREE_TYPE (an_loop_info[ii].var), NOP_EXPR,
			   location,
			   build_int_cst (TREE_TYPE (an_loop_info[ii].var), 0),
			   TREE_TYPE (an_loop_info[ii].var));	
    }
  array_operand = create_array_refs (location, an_info, an_loop_info,
				     list_size, rank);
  replace_array_notations (&func_parm, true, array_list, array_operand);

  create_cmp_incr (location, &an_loop_info, rank, an_info);
  if (an_type != BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING)
    {
      *new_var = build_decl (location, VAR_DECL, NULL_TREE, new_var_type);
      gcc_assert (*new_var && *new_var != error_mark_node);
    }
  else
    *new_var = NULL_TREE;
  
  if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND
      || an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND)
    array_ind_value = build_decl (location, VAR_DECL, NULL_TREE, 
				  TREE_TYPE (func_parm));
  array_op0 = (*array_operand)[0];
  if (TREE_CODE (array_op0) == INDIRECT_REF)
    array_op0 = TREE_OPERAND (array_op0, 0);
  switch (an_type)
    {
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ADD:
      new_var_init = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, build_zero_cst (new_var_type), new_var_type);
      new_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), PLUS_EXPR,
	 location, func_parm, TREE_TYPE (func_parm));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUL:
      new_var_init = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, build_one_cst (new_var_type), new_var_type);
      new_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), MULT_EXPR,
	 location, func_parm, TREE_TYPE (func_parm));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO:
      new_var_init = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, build_one_cst (new_var_type), new_var_type);
      /* Initially you assume everything is zero, now if we find a case where 
	 it is NOT true, then we set the result to false. Otherwise 
	 we just keep the previous value.  */
      new_yes_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, build_zero_cst (TREE_TYPE (*new_var)),
	 TREE_TYPE (*new_var));
      new_no_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, *new_var, TREE_TYPE (*new_var));
      new_cond_expr = build2 (NE_EXPR, TREE_TYPE (func_parm), func_parm,
			      build_zero_cst (TREE_TYPE (func_parm)));
      new_expr = build_conditional_expr
	(location, new_cond_expr, false, new_yes_expr,
	 TREE_TYPE (new_yes_expr), new_no_expr, TREE_TYPE (new_no_expr));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_NONZERO:
      new_var_init = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, build_one_cst (new_var_type), new_var_type);
      /* Initially you assume everything is non-zero, now if we find a case
	 where it is NOT true, then we set the result to false.  Otherwise
	 we just keep the previous value.  */
      new_yes_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, build_zero_cst (TREE_TYPE (*new_var)),
	 TREE_TYPE (*new_var));
      new_no_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, *new_var, TREE_TYPE (*new_var));
      new_cond_expr = build2 (EQ_EXPR, TREE_TYPE (func_parm), func_parm,
			      build_zero_cst (TREE_TYPE (func_parm)));
      new_expr = build_conditional_expr
	(location, new_cond_expr, false, new_yes_expr,
	 TREE_TYPE (new_yes_expr), new_no_expr, TREE_TYPE (new_no_expr));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO:
      new_var_init = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, build_zero_cst (new_var_type), new_var_type);
      /* Initially we assume there are NO zeros in the list. When we find 
	 a non-zero, we keep the previous value.  If we find a zero, we 
	 set the value to true.  */
      new_yes_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, build_one_cst (new_var_type), new_var_type);
      new_no_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, *new_var, TREE_TYPE (*new_var));
      new_cond_expr = build2 (EQ_EXPR, TREE_TYPE (func_parm), func_parm,
			      build_zero_cst (TREE_TYPE (func_parm)));
      new_expr = build_conditional_expr
	(location, new_cond_expr, false, new_yes_expr,
	 TREE_TYPE (new_yes_expr), new_no_expr, TREE_TYPE (new_no_expr));   
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_NONZERO:
      new_var_init = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, build_zero_cst (new_var_type), new_var_type);
      /* Initially we assume there are NO non-zeros in the list. When we find 
	 a zero, we keep the previous value.  If we find a non-zero, we set 
	 the value to true.  */
      new_yes_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, build_one_cst (new_var_type), new_var_type);
      new_no_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, *new_var, TREE_TYPE (*new_var));
      new_cond_expr = build2 (NE_EXPR, TREE_TYPE (func_parm), func_parm,
			      build_zero_cst (TREE_TYPE (func_parm)));
      new_expr = build_conditional_expr
	(location, new_cond_expr, false, new_yes_expr,
	 TREE_TYPE (new_yes_expr), new_no_expr, TREE_TYPE (new_no_expr));   
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX:
      if (TYPE_MIN_VALUE (new_var_type))
	new_var_init = build_modify_expr
	  (location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	   location, TYPE_MIN_VALUE (new_var_type), new_var_type);
      else
	new_var_init = build_modify_expr
	  (location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	   location, func_parm, new_var_type);
      new_no_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, *new_var, TREE_TYPE (*new_var));
      new_yes_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, func_parm, TREE_TYPE (*new_var));
      new_expr = build_conditional_expr
	(location,
	 build2 (LT_EXPR, TREE_TYPE (*new_var), *new_var, func_parm), false,
	 new_yes_expr, TREE_TYPE (*new_var), new_no_expr, TREE_TYPE (*new_var));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN:
      if (TYPE_MAX_VALUE (new_var_type))
	new_var_init = build_modify_expr
	  (location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	   location, TYPE_MAX_VALUE (new_var_type), new_var_type);
      else
	new_var_init = build_modify_expr
	  (location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	   location, func_parm, new_var_type);
      new_no_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, *new_var, TREE_TYPE (*new_var));
      new_yes_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, func_parm, TREE_TYPE (*new_var));
      new_expr = build_conditional_expr
	(location,
	 build2 (GT_EXPR, TREE_TYPE (*new_var), *new_var, func_parm), false,
	 new_yes_expr, TREE_TYPE (*new_var), new_no_expr, TREE_TYPE (*new_var));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND:
      new_var_init = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, build_zero_cst (new_var_type), new_var_type);
      new_exp_init = build_modify_expr
	(location, array_ind_value, TREE_TYPE (array_ind_value),
	 NOP_EXPR, location, func_parm, TREE_TYPE (func_parm));
      new_no_ind = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, *new_var, TREE_TYPE (*new_var));
      new_no_expr = build_modify_expr
	(location, array_ind_value, TREE_TYPE (array_ind_value),
	 NOP_EXPR,
	 location, array_ind_value, TREE_TYPE (array_ind_value));
      if (list_size > 1)
	{
	  new_yes_ind = build_modify_expr
	    (location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	     location, an_loop_info[0].var, TREE_TYPE (an_loop_info[0].var));
	  new_yes_expr = build_modify_expr
	    (location, array_ind_value, TREE_TYPE (array_ind_value),
	     NOP_EXPR,
	     location, func_parm, TREE_TYPE ((*array_operand)[0]));
	}
      else
	{
	  new_yes_ind = build_modify_expr
	    (location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	     location, TREE_OPERAND (array_op0, 1),
	     TREE_TYPE (TREE_OPERAND (array_op0, 1)));
	  new_yes_expr = build_modify_expr
	    (location, array_ind_value, TREE_TYPE (array_ind_value),
	     NOP_EXPR,
	     location, func_parm, TREE_OPERAND (array_op0, 1));
	}
      new_yes_list = alloc_stmt_list ();
      append_to_statement_list (new_yes_ind, &new_yes_list);
      append_to_statement_list (new_yes_expr, &new_yes_list);

      new_no_list = alloc_stmt_list ();
      append_to_statement_list (new_no_ind, &new_no_list);
      append_to_statement_list (new_no_expr, &new_no_list);
 
      new_expr = build_conditional_expr
	(location,
	 build2 (LE_EXPR, TREE_TYPE (array_ind_value), array_ind_value,
		 func_parm),
	 false,
	 new_yes_list, TREE_TYPE (*new_var), new_no_list, TREE_TYPE (*new_var));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND:
      new_var_init = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, build_zero_cst (new_var_type), new_var_type);
      new_exp_init = build_modify_expr
	(location, array_ind_value, TREE_TYPE (array_ind_value),
	 NOP_EXPR, location, func_parm, TREE_TYPE (func_parm));
      new_no_ind = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, *new_var, TREE_TYPE (*new_var));
      new_no_expr = build_modify_expr
	(location, array_ind_value, TREE_TYPE (array_ind_value),
	 NOP_EXPR,
	 location, array_ind_value, TREE_TYPE (array_ind_value));
      if (list_size > 1)
	{
	  new_yes_ind = build_modify_expr
	    (location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	     location, an_loop_info[0].var, TREE_TYPE (an_loop_info[0].var));
	  new_yes_expr = build_modify_expr
	    (location, array_ind_value, TREE_TYPE (array_ind_value),
	     NOP_EXPR,
	     location, func_parm, TREE_TYPE (array_op0));
	}
      else
	{
	  new_yes_ind = build_modify_expr
	    (location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	     location, TREE_OPERAND (array_op0, 1),
	     TREE_TYPE (TREE_OPERAND (array_op0, 1)));
	  new_yes_expr = build_modify_expr
	    (location, array_ind_value, TREE_TYPE (array_ind_value),
	     NOP_EXPR,
	     location, func_parm, TREE_OPERAND (array_op0, 1));
	}
      new_yes_list = alloc_stmt_list ();
      append_to_statement_list (new_yes_ind, &new_yes_list);
      append_to_statement_list (new_yes_expr, &new_yes_list);

      new_no_list = alloc_stmt_list ();
      append_to_statement_list (new_no_ind, &new_no_list);
      append_to_statement_list (new_no_expr, &new_no_list);
 
      new_expr = build_conditional_expr
	(location,
	 build2 (GE_EXPR, TREE_TYPE (array_ind_value), array_ind_value,
		 func_parm),
	 false,
	 new_yes_list, TREE_TYPE (*new_var), new_no_list, TREE_TYPE (*new_var));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE:
      new_var_init = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, identity_value, new_var_type);
      new_call_expr = build_call_expr (call_fn, 2, *new_var, func_parm);
      new_expr = build_modify_expr
	(location, *new_var, TREE_TYPE (*new_var), NOP_EXPR,
	 location, new_call_expr, TREE_TYPE (*new_var));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING:
      new_expr = build_call_expr (call_fn, 2, identity_value, func_parm);
      break;
    default:
      gcc_unreachable ();
      break;
    }

  for (ii = 0; ii < rank; ii++)
    append_to_statement_list (an_loop_info[ii].ind_init, &loop_init);

  if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND
      || an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND)
    append_to_statement_list (new_exp_init, &loop_init);
  if (an_type != BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING)
    append_to_statement_list (new_var_init, &loop_init);

  append_to_statement_list_force (loop_init, &loop_with_init);
  body = new_expr;
  for (ii = 0; ii < rank; ii++)
    {
      tree new_loop = push_stmt_list ();
      c_finish_loop (location, an_loop_info[ii].cmp, an_loop_info[ii].incr,
		     body, NULL_TREE, NULL_TREE, true);
      body = pop_stmt_list (new_loop);
    }
  append_to_statement_list_force (body, &loop_with_init);

  an_info.release ();
  an_loop_info.release ();
  
  return loop_with_init;
}
예제 #18
0
static tree
expand_sec_reduce_builtin (tree an_builtin_fn, tree *new_var)
{
  tree new_var_type = NULL_TREE, func_parm, new_yes_expr, new_no_expr;
  tree array_ind_value = NULL_TREE, new_no_ind, new_yes_ind, new_no_list;
  tree new_yes_list, new_cond_expr, new_expr = NULL_TREE; 
  vec<tree, va_gc> *array_list = NULL, *array_operand = NULL;
  size_t list_size = 0, rank = 0, ii = 0;
  tree  body, an_init, loop_with_init = alloc_stmt_list ();
  tree array_op0, comp_node = NULL_TREE;
  tree call_fn = NULL_TREE, identity_value = NULL_TREE;
  tree init = NULL_TREE, cond_init = NULL_TREE;
  enum tree_code code = NOP_EXPR;
  location_t location = UNKNOWN_LOCATION;
  vec<vec<an_parts> > an_info = vNULL;
  auto_vec<an_loop_parts> an_loop_info;
  enum built_in_function an_type =
    is_cilkplus_reduce_builtin (CALL_EXPR_FN (an_builtin_fn));
  vec <tree, va_gc> *func_args;
  
  if (an_type == BUILT_IN_NONE)
    return NULL_TREE;

  if (an_type != BUILT_IN_CILKPLUS_SEC_REDUCE
      && an_type != BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING)
    func_parm = CALL_EXPR_ARG (an_builtin_fn, 0);
  else
    {
      call_fn = CALL_EXPR_ARG (an_builtin_fn, 2);

      /* We need to do this because we are "faking" the builtin function types,
	 so the compiler does a bunch of typecasts and this will get rid of
	 all that!  */
      STRIP_NOPS (call_fn);
      if (TREE_CODE (call_fn) != OVERLOAD
	  && TREE_CODE (call_fn) != FUNCTION_DECL)
	call_fn = TREE_OPERAND (call_fn, 0);
      identity_value = CALL_EXPR_ARG (an_builtin_fn, 0);
      func_parm = CALL_EXPR_ARG (an_builtin_fn, 1);
      STRIP_NOPS (identity_value);
    }
  STRIP_NOPS (func_parm);
  
  location = EXPR_LOCATION (an_builtin_fn);
  
  /* Note about using find_rank (): If find_rank returns false, then it must
     have already reported an error, thus we just return an error_mark_node
     without any doing any error emission.  */  
  if (!find_rank (location, an_builtin_fn, an_builtin_fn, true, &rank))
      return error_mark_node;
  if (rank == 0)
    {
      error_at (location, "Invalid builtin arguments");
      return error_mark_node;
    }
  else if (rank > 1 
	   && (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND
	       || an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND))
    { 
      error_at (location, "__sec_reduce_min_ind or __sec_reduce_max_ind cannot "
		"have arrays with dimension greater than 1");
      return error_mark_node;
    }
  
  extract_array_notation_exprs (func_parm, true, &array_list);
  list_size = vec_safe_length (array_list);
  switch (an_type)
    {
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ADD:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUL:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN:
      new_var_type = TREE_TYPE ((*array_list)[0]);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_NONZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_NONZERO:
      new_var_type = boolean_type_node;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND:
      new_var_type = size_type_node;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE:
      if (call_fn && identity_value)
	new_var_type = TREE_TYPE ((*array_list)[0]);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING:
      new_var_type = NULL_TREE;
      break;
    default:
      gcc_unreachable ();
    }
    
  if (new_var_type && TREE_CODE (new_var_type) == ARRAY_TYPE)
    new_var_type = TREE_TYPE (new_var_type);
  an_loop_info.safe_grow_cleared (rank);

  an_init = push_stmt_list ();

  /* Assign the array notation components to variable so that they can satisfy
     the exec-once rule.  */
  for (ii = 0; ii < list_size; ii++)
    if (TREE_CODE ((*array_list)[ii]) == ARRAY_NOTATION_REF)
      {
	tree anode = (*array_list)[ii];
	make_triplet_val_inv (&ARRAY_NOTATION_START (anode));
	make_triplet_val_inv (&ARRAY_NOTATION_LENGTH (anode));
	make_triplet_val_inv (&ARRAY_NOTATION_STRIDE (anode));
      }
  cilkplus_extract_an_triplets (array_list, list_size, rank, &an_info);
  for (ii = 0; ii < rank; ii++)
    {
      tree typ = ptrdiff_type_node;

      /* In this place, we are using get_temp_regvar instead of 
	 create_temporary_var if an_type is SEC_REDUCE_MAX/MIN_IND because
	 the array_ind_value depends on this value being initalized to 0.  */
      if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND
	  || an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND) 
	an_loop_info[ii].var = get_temp_regvar (typ, build_zero_cst (typ));
      else
	{
	  an_loop_info[ii].var = create_temporary_var (typ);
	  add_decl_expr (an_loop_info[ii].var);
	}
      an_loop_info[ii].ind_init = 
	build_x_modify_expr (location, an_loop_info[ii].var, INIT_EXPR,
			     build_zero_cst (typ), tf_warning_or_error);
    }
  array_operand = create_array_refs (location, an_info, an_loop_info,
				      list_size, rank);
  replace_array_notations (&func_parm, true, array_list, array_operand);
  
  if (!TREE_TYPE (func_parm))      
    TREE_TYPE (func_parm) = TREE_TYPE ((*array_list)[0]);
  
  create_cmp_incr (location, &an_loop_info, rank, an_info, tf_warning_or_error);
  if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND
      || an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND) 
    array_ind_value = get_temp_regvar (TREE_TYPE (func_parm), func_parm);

  array_op0 = (*array_operand)[0];
  if (INDIRECT_REF_P (array_op0))
    array_op0 = TREE_OPERAND (array_op0, 0);
  switch (an_type)
    {
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ADD:
      code = PLUS_EXPR;
      init = build_zero_cst (new_var_type);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUL:
      code = MULT_EXPR;
      init = build_one_cst (new_var_type);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_NONZERO:
      code = ((an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO) ? EQ_EXPR
	: NE_EXPR);
      init = build_zero_cst (new_var_type);
      cond_init = build_one_cst (new_var_type);
      comp_node = build_zero_cst (TREE_TYPE (func_parm));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_NONZERO:
      code = ((an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO) ? NE_EXPR
	: EQ_EXPR);
      init = build_one_cst (new_var_type);
      cond_init = build_zero_cst (new_var_type);
      comp_node = build_zero_cst (TREE_TYPE (func_parm));
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX:
      code = MAX_EXPR;
      init = (TYPE_MIN_VALUE (new_var_type) ? TYPE_MIN_VALUE (new_var_type)
	: func_parm);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN:
      code = MIN_EXPR;
      init = (TYPE_MAX_VALUE (new_var_type) ? TYPE_MAX_VALUE (new_var_type)
	: func_parm);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND:
      code = (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND ? LE_EXPR
	: GE_EXPR);
      init = an_loop_info[0].var;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE:
      init = identity_value;
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING:
      init = NULL_TREE;
      break;
    default:
      gcc_unreachable ();
    }

  if (an_type != BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING)
    *new_var = get_temp_regvar (new_var_type, init);
  else
    *new_var = NULL_TREE;

  switch (an_type)
    {
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ADD:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUL:      
      new_expr = build_x_modify_expr (location, *new_var, code, func_parm,
				      tf_warning_or_error);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ALL_NONZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_ZERO:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_ANY_NONZERO:
      /* In all these cases, assume the false case is true and as soon as
	 we find a true case,  set the true flag on and latch it in.  */
      new_yes_expr = build_x_modify_expr (location, *new_var, NOP_EXPR,
					  cond_init, tf_warning_or_error);
      new_no_expr = build_x_modify_expr (location, *new_var, NOP_EXPR,
					 *new_var, tf_warning_or_error);
      new_cond_expr = build_x_binary_op
	(location, code, func_parm, TREE_CODE (func_parm), comp_node,
	 TREE_CODE (comp_node), NULL, tf_warning_or_error);
      new_expr = build_x_conditional_expr (location, new_cond_expr,
					   new_yes_expr, new_no_expr,
					   tf_warning_or_error);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN:
      new_cond_expr = build_x_binary_op
	(location, code, *new_var, TREE_CODE (*new_var), func_parm,
	 TREE_CODE (func_parm), NULL, tf_warning_or_error);
      new_expr = build_x_modify_expr (location, *new_var, NOP_EXPR, func_parm,
				      tf_warning_or_error);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MAX_IND:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MIN_IND:
      new_yes_expr = build_x_modify_expr (location, array_ind_value, NOP_EXPR,
					  func_parm, tf_warning_or_error);
      new_no_expr = build_x_modify_expr (location, array_ind_value, NOP_EXPR,
					 array_ind_value, tf_warning_or_error);
      if (list_size > 1)
	new_yes_ind = build_x_modify_expr (location, *new_var, NOP_EXPR,
					   an_loop_info[0].var,
					   tf_warning_or_error);
      else
	new_yes_ind = build_x_modify_expr (location, *new_var, NOP_EXPR,
					   TREE_OPERAND (array_op0, 1),
					   tf_warning_or_error);
      new_no_ind = build_x_modify_expr (location, *new_var, NOP_EXPR, *new_var,
					tf_warning_or_error);
      new_yes_list = alloc_stmt_list ();
      append_to_statement_list (new_yes_ind, &new_yes_list);
      append_to_statement_list (new_yes_expr, &new_yes_list);

      new_no_list = alloc_stmt_list ();
      append_to_statement_list (new_no_ind, &new_no_list);
      append_to_statement_list (new_no_expr, &new_no_list);

      new_cond_expr = build_x_binary_op (location, code, array_ind_value,
					 TREE_CODE (array_ind_value), func_parm,
					 TREE_CODE (func_parm), NULL,
					 tf_warning_or_error);
      new_expr = build_x_conditional_expr (location, new_cond_expr,
					   new_yes_list, new_no_list,
					   tf_warning_or_error);
      break;
    case BUILT_IN_CILKPLUS_SEC_REDUCE:
    case BUILT_IN_CILKPLUS_SEC_REDUCE_MUTATING:
      func_args = make_tree_vector ();
      if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE)
	vec_safe_push (func_args, *new_var);
      else
	vec_safe_push (func_args, identity_value);
      vec_safe_push (func_args, func_parm);

      new_expr = finish_call_expr (call_fn, &func_args, false, true,
				   tf_warning_or_error);
      if (an_type == BUILT_IN_CILKPLUS_SEC_REDUCE)
	new_expr = build_x_modify_expr (location, *new_var, NOP_EXPR, new_expr,
					tf_warning_or_error);
      release_tree_vector (func_args);
      break;
    default:
      gcc_unreachable ();
    }
  an_init = pop_stmt_list (an_init);
  append_to_statement_list (an_init, &loop_with_init);
  body = new_expr;

  for (ii = 0; ii < rank; ii++)
    {
      tree new_loop = push_stmt_list ();
      create_an_loop (an_loop_info[ii].ind_init, an_loop_info[ii].cmp,
		      an_loop_info[ii].incr, body);
      body = pop_stmt_list (new_loop);
    }
  append_to_statement_list (body, &loop_with_init);

  release_vec_vec (an_info);

  return loop_with_init;
}
예제 #19
0
파일: lambda.c 프로젝트: nguyentu1602/gcc
void
maybe_add_lambda_conv_op (tree type)
{
  bool nested = (cfun != NULL);
  bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
  tree callop = lambda_function (type);

  if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE)
    return;

  if (processing_template_decl)
    return;

  bool const generic_lambda_p
    = (DECL_TEMPLATE_INFO (callop)
    && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop)) == callop);

  if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
    {
      /* If the op() wasn't instantiated due to errors, give up.  */
      gcc_assert (errorcount || sorrycount);
      return;
    }

  /* Non-template conversion operators are defined directly with build_call_a
     and using DIRECT_ARGVEC for arguments (including 'this').  Templates are
     deferred and the CALL is built in-place.  In the case of a deduced return
     call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
     the return type is also built in-place.  The arguments of DECLTYPE_CALL in
     the return expression may differ in flags from those in the body CALL.  In
     particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
     the body CALL, but not in DECLTYPE_CALL.  */

  vec<tree, va_gc> *direct_argvec = 0;
  tree decltype_call = 0, call = 0;
  tree fn_result = TREE_TYPE (TREE_TYPE (callop));

  if (generic_lambda_p)
    {
      /* Prepare the dependent member call for the static member function
	 '_FUN' and, potentially, prepare another call to be used in a decltype
	 return expression for a deduced return call op to allow for simple
	 implementation of the conversion operator.  */

      tree instance = build_nop (type, null_pointer_node);
      tree objfn = build_min (COMPONENT_REF, NULL_TREE,
			      instance, DECL_NAME (callop), NULL_TREE);
      int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;

      call = prepare_op_call (objfn, nargs);
      if (type_uses_auto (fn_result))
	decltype_call = prepare_op_call (objfn, nargs);
    }
  else
    {
      direct_argvec = make_tree_vector ();
      direct_argvec->quick_push (build1 (NOP_EXPR,
					 TREE_TYPE (DECL_ARGUMENTS (callop)),
					 null_pointer_node));
    }

  /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
     declare the static member function "_FUN" below.  For each arg append to
     DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
     call args (for the template case).  If a parameter pack is found, expand
     it, flagging it as PACK_EXPANSION_LOCAL_P for the body call.  */

  tree fn_args = NULL_TREE;
  {
    int ix = 0;
    tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
    tree tgt;

    while (src)
      {
	tree new_node = copy_node (src);

	if (!fn_args)
	  fn_args = tgt = new_node;
	else
	  {
	    TREE_CHAIN (tgt) = new_node;
	    tgt = new_node;
	  }

	mark_exp_read (tgt);

	if (generic_lambda_p)
	  {
	    if (DECL_PACK_P (tgt))
	      {
		tree a = make_pack_expansion (tgt);
		if (decltype_call)
		  CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
		PACK_EXPANSION_LOCAL_P (a) = true;
		CALL_EXPR_ARG (call, ix) = a;
	      }
	    else
	      {
		tree a = convert_from_reference (tgt);
		CALL_EXPR_ARG (call, ix) = a;
		if (decltype_call)
		  CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
	      }
	    ++ix;
	  }
	else
	  vec_safe_push (direct_argvec, tgt);

	src = TREE_CHAIN (src);
      }
  }


  if (generic_lambda_p)
    {
      if (decltype_call)
	{
	  ++processing_template_decl;
	  fn_result = finish_decltype_type
	    (decltype_call, /*id_expression_or_member_access_p=*/false,
	     tf_warning_or_error);
	  --processing_template_decl;
	}
    }
  else
    call = build_call_a (callop,
			 direct_argvec->length (),
			 direct_argvec->address ());

  CALL_FROM_THUNK_P (call) = 1;

  tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));

  /* First build up the conversion op.  */

  tree rettype = build_pointer_type (stattype);
  tree name = mangle_conv_op_name_for_type (rettype);
  tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
  tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
  tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
  tree fn = convfn;
  DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);

  if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
      && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
    DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;

  SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
  grokclassfn (type, fn, NO_SPECIAL);
  set_linkage_according_to_type (type, fn);
  rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
  DECL_IN_AGGR_P (fn) = 1;
  DECL_ARTIFICIAL (fn) = 1;
  DECL_NOT_REALLY_EXTERN (fn) = 1;
  DECL_DECLARED_INLINE_P (fn) = 1;
  DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
  if (nested_def)
    DECL_INTERFACE_KNOWN (fn) = 1;

  if (generic_lambda_p)
    fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));

  add_method (type, fn, NULL_TREE);

  /* Generic thunk code fails for varargs; we'll complain in mark_used if
     the conversion op is used.  */
  if (varargs_function_p (callop))
    {
      DECL_DELETED_FN (fn) = 1;
      return;
    }

  /* Now build up the thunk to be returned.  */

  name = get_identifier ("_FUN");
  tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
  fn = statfn;
  DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
  if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
      && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
    DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
  grokclassfn (type, fn, NO_SPECIAL);
  set_linkage_according_to_type (type, fn);
  rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
  DECL_IN_AGGR_P (fn) = 1;
  DECL_ARTIFICIAL (fn) = 1;
  DECL_NOT_REALLY_EXTERN (fn) = 1;
  DECL_DECLARED_INLINE_P (fn) = 1;
  DECL_STATIC_FUNCTION_P (fn) = 1;
  DECL_ARGUMENTS (fn) = fn_args;
  for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
    {
      /* Avoid duplicate -Wshadow warnings.  */
      DECL_NAME (arg) = NULL_TREE;
      DECL_CONTEXT (arg) = fn;
    }
  if (nested_def)
    DECL_INTERFACE_KNOWN (fn) = 1;

  if (generic_lambda_p)
    fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));

  add_method (type, fn, NULL_TREE);

  if (nested)
    push_function_context ();
  else
    /* Still increment function_depth so that we don't GC in the
       middle of an expression.  */
    ++function_depth;

  /* Generate the body of the thunk.  */

  start_preparsed_function (statfn, NULL_TREE,
			    SF_PRE_PARSED | SF_INCLASS_INLINE);
  if (DECL_ONE_ONLY (statfn))
    {
      /* Put the thunk in the same comdat group as the call op.  */
      cgraph_node::get_create (statfn)->add_to_same_comdat_group
	(cgraph_node::get_create (callop));
    }
  tree body = begin_function_body ();
  tree compound_stmt = begin_compound_stmt (0);
  if (!generic_lambda_p)
    {
      set_flags_from_callee (call);
      if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
	call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
    }
  call = convert_from_reference (call);
  finish_return_stmt (call);

  finish_compound_stmt (compound_stmt);
  finish_function_body (body);

  fn = finish_function (/*inline*/2);
  if (!generic_lambda_p)
    expand_or_defer_fn (fn);

  /* Generate the body of the conversion op.  */

  start_preparsed_function (convfn, NULL_TREE,
			    SF_PRE_PARSED | SF_INCLASS_INLINE);
  body = begin_function_body ();
  compound_stmt = begin_compound_stmt (0);

  /* decl_needed_p needs to see that it's used.  */
  TREE_USED (statfn) = 1;
  finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));

  finish_compound_stmt (compound_stmt);
  finish_function_body (body);

  fn = finish_function (/*inline*/2);
  if (!generic_lambda_p)
    expand_or_defer_fn (fn);

  if (nested)
    pop_function_context ();
  else
    --function_depth;
}
예제 #20
0
static rtx
aarch64_simd_expand_args (rtx target, int icode, int have_retval,
			  tree exp, builtin_simd_arg *args)
{
  rtx pat;
  tree arg[SIMD_MAX_BUILTIN_ARGS];
  rtx op[SIMD_MAX_BUILTIN_ARGS];
  machine_mode tmode = insn_data[icode].operand[0].mode;
  machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
  int argc = 0;

  if (have_retval
      && (!target
	  || GET_MODE (target) != tmode
	  || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
    target = gen_reg_rtx (tmode);

  for (;;)
    {
      builtin_simd_arg thisarg = args[argc];

      if (thisarg == SIMD_ARG_STOP)
	break;
      else
	{
	  arg[argc] = CALL_EXPR_ARG (exp, argc);
	  op[argc] = expand_normal (arg[argc]);
	  mode[argc] = insn_data[icode].operand[argc + have_retval].mode;

	  switch (thisarg)
	    {
	    case SIMD_ARG_COPY_TO_REG:
	      if (POINTER_TYPE_P (TREE_TYPE (arg[argc])))
		op[argc] = convert_memory_address (Pmode, op[argc]);
	      /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
	      if (!(*insn_data[icode].operand[argc + have_retval].predicate)
		  (op[argc], mode[argc]))
		op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
	      break;

	    case SIMD_ARG_CONSTANT:
	      if (!(*insn_data[icode].operand[argc + have_retval].predicate)
		  (op[argc], mode[argc]))
	      {
		error_at (EXPR_LOCATION (exp), "incompatible type for argument %d, "
		       "expected %<const int%>", argc + 1);
		return const0_rtx;
	      }
	      break;

	    case SIMD_ARG_STOP:
	      gcc_unreachable ();
	    }

	  argc++;
	}
    }

  if (have_retval)
    switch (argc)
      {
      case 1:
	pat = GEN_FCN (icode) (target, op[0]);
	break;

      case 2:
	pat = GEN_FCN (icode) (target, op[0], op[1]);
	break;

      case 3:
	pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
	break;

      case 4:
	pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
	break;

      case 5:
	pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
	break;

      default:
	gcc_unreachable ();
      }
  else
    switch (argc)
      {
      case 1:
	pat = GEN_FCN (icode) (op[0]);
	break;

      case 2:
	pat = GEN_FCN (icode) (op[0], op[1]);
	break;

      case 3:
	pat = GEN_FCN (icode) (op[0], op[1], op[2]);
	break;

      case 4:
	pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
	break;

      case 5:
	pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
	break;

      default:
	gcc_unreachable ();
      }

  if (!pat)
    return NULL_RTX;

  emit_insn (pat);

  return target;
}