static bool
stmt_has_simple_data_refs_p (loop_p outermost_loop ATTRIBUTE_UNUSED,
			     gimple stmt)
{
  data_reference_p dr;
  unsigned i;
  int j;
  bool res = true;
  vec<data_reference_p> drs = vNULL;
  loop_p outer;

  for (outer = loop_containing_stmt (stmt); outer; outer = loop_outer (outer))
    {
      graphite_find_data_references_in_stmt (outer,
					     loop_containing_stmt (stmt),
					     stmt, &drs);

      FOR_EACH_VEC_ELT (drs, j, dr)
	for (i = 0; i < DR_NUM_DIMENSIONS (dr); i++)
	  if (!graphite_can_represent_scev (DR_ACCESS_FN (dr, i)))
	    {
	      res = false;
	      goto done;
	    }

      free_data_refs (drs);
      drs.create (0);
    }

 done:
  free_data_refs (drs);
  return res;
}
static bool
stmt_has_scalar_dependences_outside_loop (gimple stmt)
{
  tree name;

  switch (gimple_code (stmt))
    {
    case GIMPLE_CALL:
    case GIMPLE_ASSIGN:
      name = gimple_get_lhs (stmt);
      break;

    case GIMPLE_PHI:
      name = gimple_phi_result (stmt);
      break;

    default:
      return false;
    }

  return (name
	  && TREE_CODE (name) == SSA_NAME
	  && ssa_name_has_uses_outside_loop_p (name,
					       loop_containing_stmt (stmt)));
}
static bool
ssa_name_has_uses_outside_loop_p (tree def, loop_p loop)
{
  imm_use_iterator imm_iter;
  use_operand_p use_p;

  FOR_EACH_IMM_USE_FAST (use_p, imm_iter, def)
    if (loop != loop_containing_stmt (USE_STMT (use_p)))
      return true;

  return false;
}
static bool
stmt_has_simple_data_refs_p (loop_p outermost_loop ATTRIBUTE_UNUSED,
			     gimple *stmt)
{
  data_reference_p dr;
  int j;
  bool res = true;
  vec<data_reference_p> drs = vNULL;
  loop_p outer;

  for (outer = loop_containing_stmt (stmt); outer; outer = loop_outer (outer))
    {
      graphite_find_data_references_in_stmt (outer,
					     loop_containing_stmt (stmt),
					     stmt, &drs);

      FOR_EACH_VEC_ELT (drs, j, dr)
	{
	  int nb_subscripts = DR_NUM_DIMENSIONS (dr);
	  tree ref = DR_REF (dr);

	  for (int i = nb_subscripts - 1; i >= 0; i--)
	    {
	      if (!graphite_can_represent_scev (DR_ACCESS_FN (dr, i))
		  || (TREE_CODE (ref) != ARRAY_REF
		      && TREE_CODE (ref) != MEM_REF
		      && TREE_CODE (ref) != COMPONENT_REF))
		{
		  free_data_refs (drs);
		  return false;
		}

	      ref = TREE_OPERAND (ref, 0);
	    }
	}

      free_data_refs (drs);
      drs.create (0);
    }
static void
generate_memset_zero (gimple stmt, tree op0, tree nb_iter,
		      gimple_stmt_iterator bsi)
{
  tree addr_base, nb_bytes;
  bool res = false;
  gimple_seq stmt_list = NULL, stmts;
  gimple fn_call;
  tree mem, fn;
  struct data_reference *dr = XCNEW (struct data_reference);
  location_t loc = gimple_location (stmt);

  DR_STMT (dr) = stmt;
  DR_REF (dr) = op0;
  res = dr_analyze_innermost (dr, loop_containing_stmt (stmt));
  gcc_assert (res && stride_of_unit_type_p (DR_STEP (dr), TREE_TYPE (op0)));

  nb_bytes = build_size_arg_loc (loc, nb_iter, op0, &stmt_list);
  addr_base = size_binop_loc (loc, PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr));
  addr_base = fold_convert_loc (loc, sizetype, addr_base);

  /* Test for a negative stride, iterating over every element.  */
  if (tree_int_cst_sgn (DR_STEP (dr)) == -1)
    {
      addr_base = size_binop_loc (loc, MINUS_EXPR, addr_base,
				  fold_convert_loc (loc, sizetype, nb_bytes));
      addr_base = size_binop_loc (loc, PLUS_EXPR, addr_base,
				  TYPE_SIZE_UNIT (TREE_TYPE (op0)));
    }

  addr_base = fold_build_pointer_plus_loc (loc,
					   DR_BASE_ADDRESS (dr), addr_base);
  mem = force_gimple_operand (addr_base, &stmts, true, NULL);
  gimple_seq_add_seq (&stmt_list, stmts);

  fn = build_fold_addr_expr (builtin_decl_implicit (BUILT_IN_MEMSET));
  fn_call = gimple_build_call (fn, 3, mem, integer_zero_node, nb_bytes);
  gimple_seq_add_stmt (&stmt_list, fn_call);
  gsi_insert_seq_after (&bsi, stmt_list, GSI_CONTINUE_LINKING);

  if (dump_file && (dump_flags & TDF_DETAILS))
    fprintf (dump_file, "generated memset zero\n");

  free_data_ref (dr);
}
示例#6
0
static bool
constant_after_peeling (tree op, gimple *stmt, struct loop *loop)
{
  affine_iv iv;

  if (is_gimple_min_invariant (op))
    return true;

  /* We can still fold accesses to constant arrays when index is known.  */
  if (TREE_CODE (op) != SSA_NAME)
    {
      tree base = op;

      /* First make fast look if we see constant array inside.  */
      while (handled_component_p (base))
	base = TREE_OPERAND (base, 0);
      if ((DECL_P (base)
	   && ctor_for_folding (base) != error_mark_node)
	  || CONSTANT_CLASS_P (base))
	{
	  /* If so, see if we understand all the indices.  */
	  base = op;
	  while (handled_component_p (base))
	    {
	      if (TREE_CODE (base) == ARRAY_REF
		  && !constant_after_peeling (TREE_OPERAND (base, 1), stmt, loop))
		return false;
	      base = TREE_OPERAND (base, 0);
	    }
	  return true;
	}
      return false;
    }

  /* Induction variables are constants.  */
  if (!simple_iv (loop, loop_containing_stmt (stmt), op, &iv, false))
    return false;
  if (!is_gimple_min_invariant (iv.base))
    return false;
  if (!is_gimple_min_invariant (iv.step))
    return false;
  return true;
}
示例#7
0
static tree
rewrite_bittest (block_stmt_iterator *bsi)
{
  tree stmt, lhs, rhs, var, name, use_stmt, stmt1, stmt2, t;
  use_operand_p use;

  stmt = bsi_stmt (*bsi);
  lhs = GENERIC_TREE_OPERAND (stmt, 0);
  rhs = GENERIC_TREE_OPERAND (stmt, 1);

  /* Verify that the single use of lhs is a comparison against zero.  */
  if (TREE_CODE (lhs) != SSA_NAME
      || !single_imm_use (lhs, &use, &use_stmt)
      || TREE_CODE (use_stmt) != COND_EXPR)
    return stmt;
  t = COND_EXPR_COND (use_stmt);
  if (TREE_OPERAND (t, 0) != lhs
      || (TREE_CODE (t) != NE_EXPR
	  && TREE_CODE (t) != EQ_EXPR)
      || !integer_zerop (TREE_OPERAND (t, 1)))
    return stmt;

  /* Get at the operands of the shift.  The rhs is TMP1 & 1.  */
  stmt1 = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
  if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
    return stmt;

  /* There is a conversion in between possibly inserted by fold.  */
  t = GIMPLE_STMT_OPERAND (stmt1, 1);
  if (TREE_CODE (t) == NOP_EXPR
      || TREE_CODE (t) == CONVERT_EXPR)
    {
      t = TREE_OPERAND (t, 0);
      if (TREE_CODE (t) != SSA_NAME
	  || !has_single_use (t))
	return stmt;
      stmt1 = SSA_NAME_DEF_STMT (t);
      if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
	return stmt;
      t = GIMPLE_STMT_OPERAND (stmt1, 1);
    }

  /* Verify that B is loop invariant but A is not.  Verify that with
     all the stmt walking we are still in the same loop.  */
  if (TREE_CODE (t) == RSHIFT_EXPR
      && loop_containing_stmt (stmt1) == loop_containing_stmt (stmt)
      && outermost_invariant_loop_expr (TREE_OPERAND (t, 1),
                                        loop_containing_stmt (stmt1)) != NULL
      && outermost_invariant_loop_expr (TREE_OPERAND (t, 0),
                                        loop_containing_stmt (stmt1)) == NULL)
    {
      tree a = TREE_OPERAND (t, 0);
      tree b = TREE_OPERAND (t, 1);

      /* 1 << B */
      var = create_tmp_var (TREE_TYPE (a), "shifttmp");
      add_referenced_var (var);
      t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
		       build_int_cst (TREE_TYPE (a), 1), b);
      stmt1 = build_gimple_modify_stmt (var, t);
      name = make_ssa_name (var, stmt1);
      GIMPLE_STMT_OPERAND (stmt1, 0) = name;

      /* A & (1 << B) */
      t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
      stmt2 = build_gimple_modify_stmt (var, t);
      name = make_ssa_name (var, stmt2);
      GIMPLE_STMT_OPERAND (stmt2, 0) = name;

      /* Replace the SSA_NAME we compare against zero.  Adjust
	 the type of zero accordingly.  */
      SET_USE (use, name);
      TREE_OPERAND (COND_EXPR_COND (use_stmt), 1)
	= build_int_cst_type (TREE_TYPE (name), 0);

      bsi_insert_before (bsi, stmt1, BSI_SAME_STMT);
      bsi_replace (bsi, stmt2, true);

      return stmt1;
    }

  return stmt;
}
示例#8
0
static void
gather_interchange_stats (varray_type dependence_relations, 
			  varray_type datarefs,
			  struct loop *loop,
			  struct loop *first_loop,
			  unsigned int *dependence_steps, 
			  unsigned int *nb_deps_not_carried_by_loop, 
			  unsigned int *access_strides)
{
  unsigned int i;

  *dependence_steps = 0;
  *nb_deps_not_carried_by_loop = 0;
  *access_strides = 0;

  for (i = 0; i < VARRAY_ACTIVE_SIZE (dependence_relations); i++)
    {
      int dist;
      struct data_dependence_relation *ddr = 
	(struct data_dependence_relation *) 
	VARRAY_GENERIC_PTR (dependence_relations, i);

      /* If we don't know anything about this dependence, or the distance
	 vector is NULL, or there is no dependence, then there is no reuse of
	 data.  */

      if (DDR_DIST_VECT (ddr) == NULL
	  || DDR_ARE_DEPENDENT (ddr) == chrec_dont_know
	  || DDR_ARE_DEPENDENT (ddr) == chrec_known)
	continue;
      

      
      dist = DDR_DIST_VECT (ddr)[loop->depth - first_loop->depth];
      if (dist == 0)
	(*nb_deps_not_carried_by_loop) += 1;
      else if (dist < 0)
	(*dependence_steps) += -dist;
      else
	(*dependence_steps) += dist;
    }

  /* Compute the access strides.  */
  for (i = 0; i < VARRAY_ACTIVE_SIZE (datarefs); i++)
    {
      unsigned int it;
      struct data_reference *dr = VARRAY_GENERIC_PTR (datarefs, i);
      tree stmt = DR_STMT (dr);
      struct loop *stmt_loop = loop_containing_stmt (stmt);
      struct loop *inner_loop = first_loop->inner;
      
      if (inner_loop != stmt_loop 
	  && !flow_loop_nested_p (inner_loop, stmt_loop))
	continue;
      for (it = 0; it < DR_NUM_DIMENSIONS (dr); it++)
	{
	  tree chrec = DR_ACCESS_FN (dr, it);
	  tree tstride = evolution_part_in_loop_num 
	    (chrec, loop->num);
	  
	  if (tstride == NULL_TREE
	      || TREE_CODE (tstride) != INTEGER_CST)
	    continue;
	  
	  (*access_strides) += int_cst_value (tstride);
	}
    }
}