Beispiel #1
0
tree
chrec_fold_multiply (tree type, 
		     tree op0,
		     tree op1)
{
  if (automatically_generated_chrec_p (op0)
      || automatically_generated_chrec_p (op1))
    return chrec_fold_automatically_generated_operands (op0, op1);
  
  switch (TREE_CODE (op0))
    {
    case POLYNOMIAL_CHREC:
      switch (TREE_CODE (op1))
	{
	case POLYNOMIAL_CHREC:
	  return chrec_fold_multiply_poly_poly (type, op0, op1);
	  
	default:
	  if (integer_onep (op1))
	    return op0;
	  if (integer_zerop (op1))
	    return build_int_cst (type, 0);
	  
	  return build_polynomial_chrec 
	    (CHREC_VARIABLE (op0), 
	     chrec_fold_multiply (type, CHREC_LEFT (op0), op1),
	     chrec_fold_multiply (type, CHREC_RIGHT (op0), op1));
	}
      
    default:
      if (integer_onep (op0))
	return op1;
      
      if (integer_zerop (op0))
    	return build_int_cst (type, 0);
      
      switch (TREE_CODE (op1))
	{
	case POLYNOMIAL_CHREC:
	  return build_polynomial_chrec 
	    (CHREC_VARIABLE (op1), 
	     chrec_fold_multiply (type, CHREC_LEFT (op1), op0),
	     chrec_fold_multiply (type, CHREC_RIGHT (op1), op0));
	  
	default:
	  if (integer_onep (op1))
	    return op0;
	  if (integer_zerop (op1))
	    return build_int_cst (type, 0);
	  return fold_build2 (MULT_EXPR, type, op0, op1);
	}
    }
}
Beispiel #2
0
tree
build_must_not_throw_expr (tree body, tree cond)
{
  tree type = body ? TREE_TYPE (body) : void_type_node;

  if (!flag_exceptions)
    return body;

  if (!cond)
    /* OK, unconditional.  */;
  else
    {
      tree conv = NULL_TREE;
      if (!type_dependent_expression_p (cond))
	conv = perform_implicit_conversion_flags (boolean_type_node, cond,
						  tf_warning_or_error,
						  LOOKUP_NORMAL);
      if (tree inst = instantiate_non_dependent_or_null (conv))
	cond = cxx_constant_value (inst);
      else
	require_constant_expression (cond);
      if (integer_zerop (cond))
	return body;
      else if (integer_onep (cond))
	cond = NULL_TREE;
    }

  return build2 (MUST_NOT_THROW_EXPR, type, body, cond);
}
Beispiel #3
0
static tree
create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr)
{
  if (!valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
    return NULL_TREE;

  if (addr->step && integer_onep (addr->step))
    addr->step = NULL_TREE;

  if (addr->offset && integer_zerop (addr->offset))
    addr->offset = NULL_TREE;

  /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.  */
  if (alias_ptr_type
      && !addr->index
      && !addr->step
      && (!addr->base || POINTER_TYPE_P (TREE_TYPE (addr->base))))
    {
      tree base, offset;
      gcc_assert (!addr->symbol ^ !addr->base);
      if (addr->symbol)
	base = build_fold_addr_expr (addr->symbol);
      else
	base = addr->base;
      if (addr->offset)
	offset = fold_convert (alias_ptr_type, addr->offset);
      else
	offset = build_int_cst (alias_ptr_type, 0);
      return fold_build2 (MEM_REF, type, base, offset);
    }

  return build6 (TARGET_MEM_REF, type,
		 addr->symbol, addr->base, addr->index,
		 addr->step, addr->offset, NULL);
}
Beispiel #4
0
void
gfc_trans_runtime_check (bool error, bool once, tree cond, stmtblock_t * pblock,
			 locus * where, const char * msgid, ...)
{
  va_list ap;
  stmtblock_t block;
  tree body;
  tree tmp;
  tree tmpvar = NULL;

  if (integer_zerop (cond))
    return;

  if (once)
    {
       tmpvar = gfc_create_var (boolean_type_node, "print_warning");
       TREE_STATIC (tmpvar) = 1;
       DECL_INITIAL (tmpvar) = boolean_true_node;
       gfc_add_expr_to_block (pblock, tmpvar);
    }

  gfc_start_block (&block);

  /* The code to generate the error.  */
  va_start (ap, msgid);
  gfc_add_expr_to_block (&block,
			 trans_runtime_error_vararg (error, where,
						     msgid, ap));

  if (once)
    gfc_add_modify (&block, tmpvar, boolean_false_node);

  body = gfc_finish_block (&block);

  if (integer_onep (cond))
    {
      gfc_add_expr_to_block (pblock, body);
    }
  else
    {
      /* Tell the compiler that this isn't likely.  */
      if (once)
	cond = fold_build2_loc (where->lb->location, TRUTH_AND_EXPR,
				long_integer_type_node, tmpvar, cond);
      else
	cond = fold_convert (long_integer_type_node, cond);

      tmp = build_int_cst (long_integer_type_node, 0);
      cond = build_call_expr_loc (where->lb->location,
			      built_in_decls[BUILT_IN_EXPECT], 2, cond, tmp);
      cond = fold_convert (boolean_type_node, cond);

      tmp = fold_build3_loc (where->lb->location, COND_EXPR, void_type_node,
			     cond, body,
			     build_empty_stmt (where->lb->location));
      gfc_add_expr_to_block (pblock, tmp);
    }
}
Beispiel #5
0
static bool
forward_propagate_addr_into_variable_array_index (tree offset,
						  tree def_rhs,
						  gimple_stmt_iterator *use_stmt_gsi)
{
  tree index;
  gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);

  /* Get the offset's defining statement.  */
  offset_def = SSA_NAME_DEF_STMT (offset);

  /* Try to find an expression for a proper index.  This is either a
     multiplication expression by the element size or just the ssa name we came
     along in case the element size is one. In that case, however, we do not
     allow multiplications because they can be computing index to a higher
     level dimension (PR 37861). */
  if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
    {
      if (is_gimple_assign (offset_def)
	  && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
	return false;

      index = offset;
    }
  else
    {
      /* The statement which defines OFFSET before type conversion
         must be a simple GIMPLE_ASSIGN.  */
      if (!is_gimple_assign (offset_def))
	return false;

      /* The RHS of the statement which defines OFFSET must be a
	 multiplication of an object by the size of the array elements. 
	 This implicitly verifies that the size of the array elements
	 is constant.  */
     offset = gimple_assign_rhs1 (offset_def);
     if (gimple_assign_rhs_code (offset_def) != MULT_EXPR
	 || TREE_CODE (gimple_assign_rhs2 (offset_def)) != INTEGER_CST
	 || !simple_cst_equal (gimple_assign_rhs2 (offset_def),
			       TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
	return false;

      /* The first operand to the MULT_EXPR is the desired index.  */
      index = offset;
    }

  /* Replace the pointer addition with array indexing.  */
  gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
  use_stmt = gsi_stmt (*use_stmt_gsi);
  TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
    = index;

  /* That should have created gimple, so there is no need to
     record information to undo the propagation.  */
  fold_stmt_inplace (use_stmt);
  tidy_after_forward_propagate_addr (use_stmt);
  return true;
}
Beispiel #6
0
void
gfc_trans_runtime_check (tree cond, const char * msgid, stmtblock_t * pblock,
			 locus * where)
{
  stmtblock_t block;
  tree body;
  tree tmp;
  tree args;
  char * message;
  int line;

  if (integer_zerop (cond))
    return;

  /* The code to generate the error.  */
  gfc_start_block (&block);

  if (where)
    {
#ifdef USE_MAPPED_LOCATION
      line = LOCATION_LINE (where->lb->location);
#else 
      line = where->lb->linenum;
#endif
      asprintf (&message, "%s (in file '%s', at line %d)", _(msgid),
		where->lb->file->filename, line);
    }
  else
    asprintf (&message, "%s (in file '%s', around line %d)", _(msgid),
	      gfc_source_file, input_line + 1);

  tmp = gfc_build_addr_expr (pchar_type_node, gfc_build_cstring_const(message));
  gfc_free(message);
  args = gfc_chainon_list (NULL_TREE, tmp);

  tmp = build_function_call_expr (gfor_fndecl_runtime_error, args);
  gfc_add_expr_to_block (&block, tmp);

  body = gfc_finish_block (&block);

  if (integer_onep (cond))
    {
      gfc_add_expr_to_block (pblock, body);
    }
  else
    {
      /* Tell the compiler that this isn't likely.  */
      cond = fold_convert (long_integer_type_node, cond);
      tmp = gfc_chainon_list (NULL_TREE, cond);
      tmp = gfc_chainon_list (tmp, build_int_cst (long_integer_type_node, 0));
      cond = build_function_call_expr (built_in_decls[BUILT_IN_EXPECT], tmp);
      cond = fold_convert (boolean_type_node, cond);

      tmp = build3_v (COND_EXPR, cond, body, build_empty_stmt ());
      gfc_add_expr_to_block (pblock, tmp);
    }
}
Beispiel #7
0
static bool
remove_redundant_iv_tests (struct loop *loop)
{
  struct nb_iter_bound *elt;
  bool changed = false;

  if (!loop->any_upper_bound)
    return false;
  for (elt = loop->bounds; elt; elt = elt->next)
    {
      /* Exit is pointless if it won't be taken before loop reaches
	 upper bound.  */
      if (elt->is_exit && loop->any_upper_bound
          && wi::ltu_p (loop->nb_iterations_upper_bound, elt->bound))
	{
	  basic_block bb = gimple_bb (elt->stmt);
	  edge exit_edge = EDGE_SUCC (bb, 0);
	  struct tree_niter_desc niter;

	  if (!loop_exit_edge_p (loop, exit_edge))
	    exit_edge = EDGE_SUCC (bb, 1);

	  /* Only when we know the actual number of iterations, not
	     just a bound, we can remove the exit.  */
	  if (!number_of_iterations_exit (loop, exit_edge,
					  &niter, false, false)
	      || !integer_onep (niter.assumptions)
	      || !integer_zerop (niter.may_be_zero)
	      || !niter.niter
	      || TREE_CODE (niter.niter) != INTEGER_CST
	      || !wi::ltu_p (loop->nb_iterations_upper_bound,
			     wi::to_widest (niter.niter)))
	    continue;
	  
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    {
	      fprintf (dump_file, "Removed pointless exit: ");
	      print_gimple_stmt (dump_file, elt->stmt, 0, 0);
	    }
	  gcond *cond_stmt = as_a <gcond *> (elt->stmt);
	  if (exit_edge->flags & EDGE_TRUE_VALUE)
	    gimple_cond_make_false (cond_stmt);
	  else
	    gimple_cond_make_true (cond_stmt);
	  update_stmt (cond_stmt);
	  changed = true;
	}
    }
  return changed;
}
Beispiel #8
0
tree
build_must_not_throw_expr (tree body, tree cond)
{
  tree type = body ? TREE_TYPE (body) : void_type_node;

  if (cond && !value_dependent_expression_p (cond))
    {
      cond = cxx_constant_value (cond);
      if (integer_zerop (cond))
	return body;
      else if (integer_onep (cond))
	cond = NULL_TREE;
    }

  return build2 (MUST_NOT_THROW_EXPR, type, body, cond);
}
static tree
create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
		    bool verify)
{
  tree base, index2;

  if (verify
      && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
    return NULL_TREE;

  if (addr->step && integer_onep (addr->step))
    addr->step = NULL_TREE;

  if (addr->offset)
    addr->offset = fold_convert (alias_ptr_type, addr->offset);
  else
    addr->offset = build_int_cst (alias_ptr_type, 0);

  if (addr->symbol)
    {
      base = addr->symbol;
      index2 = addr->base;
    }
  else if (addr->base
	   && POINTER_TYPE_P (TREE_TYPE (addr->base)))
    {
      base = addr->base;
      index2 = NULL_TREE;
    }
  else
    {
      base = build_int_cst (ptr_type_node, 0);
      index2 = addr->base;
    }

  /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
     ???  As IVOPTs does not follow restrictions to where the base
     pointer may point to create a MEM_REF only if we know that
     base is valid.  */
  if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
      && (!index2 || integer_zerop (index2))
      && (!addr->index || integer_zerop (addr->index)))
    return fold_build2 (MEM_REF, type, base, addr->offset);

  return build5 (TARGET_MEM_REF, type,
		 base, addr->offset, addr->index, addr->step, index2);
}
Beispiel #10
0
static tree
create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
		    bool verify)
{
  tree base, index2;

  if (verify
      && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
    return NULL_TREE;

  if (addr->step && integer_onep (addr->step))
    addr->step = NULL_TREE;

  if (addr->offset)
    addr->offset = fold_convert (alias_ptr_type, addr->offset);
  else
    addr->offset = build_int_cst (alias_ptr_type, 0);

  if (addr->symbol)
    {
      base = addr->symbol;
      index2 = addr->base;
    }
  else if (addr->base
	   && POINTER_TYPE_P (TREE_TYPE (addr->base)))
    {
      base = addr->base;
      index2 = NULL_TREE;
    }
  else
    {
      base = build_int_cst (ptr_type_node, 0);
      index2 = addr->base;
    }

  /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.  */
  if (alias_ptr_type
      && (!index2 || integer_zerop (index2))
      && (!addr->index || integer_zerop (addr->index)))
    return fold_build2 (MEM_REF, type, base, addr->offset);

  return build5 (TARGET_MEM_REF, type,
		 base, addr->offset, addr->index, addr->step, index2);
}
Beispiel #11
0
static tree
examine_tree_for_in_charge_use (tree *tp, int *walk_subtrees, void *vdata)
{
  struct thunk_tree_walk_data *data = (struct thunk_tree_walk_data *) vdata;
  switch (TREE_CODE (*tp))
    {
      case PARM_DECL:
	if (*tp == data->in_charge_parm)
	  data->in_charge_use = NO_THUNKS;
	return NULL;
      case BIT_AND_EXPR:
	if (TREE_OPERAND (*tp, 0) == data->in_charge_parm
	    && integer_onep (TREE_OPERAND (*tp, 1)))
	  {
	    *walk_subtrees = 0;
	    if (data->in_charge_use == ALL_THUNKS
		|| data->in_charge_use == IN_CHARGE_1)
	      data->in_charge_use = IN_CHARGE_1;
	    else
	      data->in_charge_use = NO_THUNKS;
	  }
	return NULL;
      case EQ_EXPR:
      case NE_EXPR:
	if (TREE_OPERAND (*tp, 0) == data->in_charge_parm
	    && integer_zerop (TREE_OPERAND (*tp, 1)))
	  {
	    *walk_subtrees = 0;
	    if (data->in_charge_use == ALL_THUNKS
		|| data->in_charge_use == IN_CHARGE_0)
	      data->in_charge_use = IN_CHARGE_0;
	    else
	      data->in_charge_use = NO_THUNKS;
	  }
	return NULL;
      default:
	return NULL;
    }
}
Beispiel #12
0
/// HandleUnion - Handle a UNION_TYPE or QUAL_UNION_TYPE tree.
void DefaultABI::HandleUnion(tree type, std::vector<const Type*> &ScalarElts) {
  if (TYPE_TRANSPARENT_UNION(type)) {
    tree Field = TYPE_FIELDS(type);
    assert(Field && "Transparent union must have some elements!");
    while (TREE_CODE(Field) != FIELD_DECL) {
      Field = TREE_CHAIN(Field);
      assert(Field && "Transparent union must have some elements!");
    }

    HandleArgument(TREE_TYPE(Field), ScalarElts);
  } else {
    // Unions pass the largest element.
    unsigned MaxSize = 0;
    tree MaxElt = 0;
    for (tree Field = TYPE_FIELDS(type); Field; Field = TREE_CHAIN(Field)) {
      if (TREE_CODE(Field) == FIELD_DECL) {
	// Skip fields that are known not to be present.
	if (TREE_CODE(type) == QUAL_UNION_TYPE &&
	    integer_zerop(DECL_QUALIFIER(Field)))
	  continue;

	tree SizeTree = TYPE_SIZE(TREE_TYPE(Field));
	unsigned Size = ((unsigned)TREE_INT_CST_LOW(SizeTree)+7)/8;
	if (Size > MaxSize) {
	  MaxSize = Size;
	  MaxElt = Field;
	}

	// Skip remaining fields if this one is known to be present.
	if (TREE_CODE(type) == QUAL_UNION_TYPE &&
	    integer_onep(DECL_QUALIFIER(Field)))
	  break;
      }
    }

    if (MaxElt)
      HandleArgument(TREE_TYPE(MaxElt), ScalarElts);
  }
}
Beispiel #13
0
/* Dump ADDR into dump_file.  */
static void
chkp_print_addr (const address_t &addr)
{
    unsigned int n = 0;
    for (n = 0; n < addr.pol.length (); n++)
    {
        if (n > 0)
            fprintf (dump_file, " + ");

        if (addr.pol[n].var == NULL_TREE)
            print_generic_expr (dump_file, addr.pol[n].cst, 0);
        else
        {
            if (TREE_CODE (addr.pol[n].cst) != INTEGER_CST
                    || !integer_onep (addr.pol[n].cst))
            {
                print_generic_expr (dump_file, addr.pol[n].cst, 0);
                fprintf (dump_file, " * ");
            }
            print_generic_expr (dump_file, addr.pol[n].var, 0);
        }
    }
}
Beispiel #14
0
static void
adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
{
  tree var, a_acc_arg, m_acc_arg;

  if (m)
    m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
  if (a)
    a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);

  a_acc_arg = a_acc;
  m_acc_arg = m_acc;
  if (a)
    {
      if (m_acc)
	{
	  if (integer_onep (a))
	    var = m_acc;
	  else
	    var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
						a, gsi);
	}
      else
	var = a;

      a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
    }

  if (m)
    m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);

  if (a_acc)
    add_successor_phi_arg (back, a_acc, a_acc_arg);

  if (m_acc)
    add_successor_phi_arg (back, m_acc, m_acc_arg);
}
tree
create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
		tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
{
  tree mem_ref, tmp;
  struct mem_address parts;

  addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
  gimplify_mem_ref_parts (gsi, &parts);
  mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
  if (mem_ref)
    return mem_ref;

  /* The expression is too complicated.  Try making it simpler.  */

  if (parts.step && !integer_onep (parts.step))
    {
      /* Move the multiplication to index.  */
      gcc_assert (parts.index);
      parts.index = force_gimple_operand_gsi (gsi,
				fold_build2 (MULT_EXPR, sizetype,
					     parts.index, parts.step),
				true, NULL_TREE, true, GSI_SAME_STMT);
      parts.step = NULL_TREE;

      mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
      if (mem_ref)
	return mem_ref;
    }

  if (parts.symbol)
    {
      tmp = parts.symbol;
      gcc_assert (is_gimple_val (tmp));

      /* Add the symbol to base, eventually forcing it to register.  */
      if (parts.base)
	{
	  gcc_assert (useless_type_conversion_p
				(sizetype, TREE_TYPE (parts.base)));

	  if (parts.index)
	    {
	      parts.base = force_gimple_operand_gsi_1 (gsi,
			fold_build_pointer_plus (tmp, parts.base),
			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
	    }
	  else
	    {
	      parts.index = parts.base;
	      parts.base = tmp;
	    }
	}
      else
	parts.base = tmp;
      parts.symbol = NULL_TREE;

      mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
      if (mem_ref)
	return mem_ref;
    }

  if (parts.index)
    {
      /* Add index to base.  */
      if (parts.base)
	{
	  parts.base = force_gimple_operand_gsi_1 (gsi,
			fold_build_pointer_plus (parts.base, parts.index),
			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
	}
      else
	parts.base = parts.index;
      parts.index = NULL_TREE;

      mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
      if (mem_ref)
	return mem_ref;
    }

  if (parts.offset && !integer_zerop (parts.offset))
    {
      /* Try adding offset to base.  */
      if (parts.base)
	{
	  parts.base = force_gimple_operand_gsi_1 (gsi,
			fold_build_pointer_plus (parts.base, parts.offset),
			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
	}
      else
	parts.base = parts.offset;

      parts.offset = NULL_TREE;

      mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
      if (mem_ref)
	return mem_ref;
    }

  /* Verify that the address is in the simplest possible shape
     (only a register).  If we cannot create such a memory reference,
     something is really wrong.  */
  gcc_assert (parts.symbol == NULL_TREE);
  gcc_assert (parts.index == NULL_TREE);
  gcc_assert (!parts.step || integer_onep (parts.step));
  gcc_assert (!parts.offset || integer_zerop (parts.offset));
  gcc_unreachable ();
}
static tree
forward_propagate_into_cond_1 (tree cond, tree *test_var_p)
{
  tree new_cond = NULL_TREE;
  enum tree_code cond_code = TREE_CODE (cond);
  tree test_var = NULL_TREE;
  tree def;
  tree def_rhs;

  /* If the condition is not a lone variable or an equality test of an
     SSA_NAME against an integral constant, then we do not have an
     optimizable case.

     Note these conditions also ensure the COND_EXPR has no
     virtual operands or other side effects.  */
  if (cond_code != SSA_NAME
      && !((cond_code == EQ_EXPR || cond_code == NE_EXPR)
	   && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME
	   && CONSTANT_CLASS_P (TREE_OPERAND (cond, 1))
	   && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (cond, 1)))))
    return NULL_TREE;

  /* Extract the single variable used in the test into TEST_VAR.  */
  if (cond_code == SSA_NAME)
    test_var = cond;
  else
    test_var = TREE_OPERAND (cond, 0);

  /* Now get the defining statement for TEST_VAR.  Skip this case if
     it's not defined by some MODIFY_EXPR.  */
  def = SSA_NAME_DEF_STMT (test_var);
  if (TREE_CODE (def) != MODIFY_EXPR)
    return NULL_TREE;

  def_rhs = TREE_OPERAND (def, 1);

  /* If TEST_VAR is set by adding or subtracting a constant
     from an SSA_NAME, then it is interesting to us as we
     can adjust the constant in the conditional and thus
     eliminate the arithmetic operation.  */
  if (TREE_CODE (def_rhs) == PLUS_EXPR
      || TREE_CODE (def_rhs) == MINUS_EXPR)
    {
      tree op0 = TREE_OPERAND (def_rhs, 0);
      tree op1 = TREE_OPERAND (def_rhs, 1);

      /* The first operand must be an SSA_NAME and the second
	 operand must be a constant.  */
      if (TREE_CODE (op0) != SSA_NAME
	  || !CONSTANT_CLASS_P (op1)
	  || !INTEGRAL_TYPE_P (TREE_TYPE (op1)))
	return NULL_TREE;

      /* Don't propagate if the first operand occurs in
	 an abnormal PHI.  */
      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
	return NULL_TREE;

      if (has_single_use (test_var))
	{
	  enum tree_code new_code;
	  tree t;

	  /* If the variable was defined via X + C, then we must
	     subtract C from the constant in the conditional.
	     Otherwise we add C to the constant in the
	     conditional.  The result must fold into a valid
	     gimple operand to be optimizable.  */
	  new_code = (TREE_CODE (def_rhs) == PLUS_EXPR
		      ? MINUS_EXPR : PLUS_EXPR);
	  t = int_const_binop (new_code, TREE_OPERAND (cond, 1), op1, 0);
	  if (!is_gimple_val (t))
	    return NULL_TREE;

	  new_cond = build (cond_code, boolean_type_node, op0, t);
	}
    }

  /* These cases require comparisons of a naked SSA_NAME or
     comparison of an SSA_NAME against zero or one.  */
  else if (TREE_CODE (cond) == SSA_NAME
	   || integer_zerop (TREE_OPERAND (cond, 1))
	   || integer_onep (TREE_OPERAND (cond, 1)))
    {
      /* If TEST_VAR is set from a relational operation
	 between two SSA_NAMEs or a combination of an SSA_NAME
	 and a constant, then it is interesting.  */
      if (COMPARISON_CLASS_P (def_rhs))
	{
	  tree op0 = TREE_OPERAND (def_rhs, 0);
	  tree op1 = TREE_OPERAND (def_rhs, 1);

	  /* Both operands of DEF_RHS must be SSA_NAMEs or
	     constants.  */
	  if ((TREE_CODE (op0) != SSA_NAME
	       && !is_gimple_min_invariant (op0))
	      || (TREE_CODE (op1) != SSA_NAME
		  && !is_gimple_min_invariant (op1)))
	    return NULL_TREE;

	  /* Don't propagate if the first operand occurs in
	     an abnormal PHI.  */
	  if (TREE_CODE (op0) == SSA_NAME
	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
	    return NULL_TREE;

	  /* Don't propagate if the second operand occurs in
	     an abnormal PHI.  */
	  if (TREE_CODE (op1) == SSA_NAME
	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op1))
	    return NULL_TREE;

	  if (has_single_use (test_var))
	    {
	      /* TEST_VAR was set from a relational operator.  */
	      new_cond = build (TREE_CODE (def_rhs),
				boolean_type_node, op0, op1);

	      /* Invert the conditional if necessary.  */
	      if ((cond_code == EQ_EXPR
		   && integer_zerop (TREE_OPERAND (cond, 1)))
		  || (cond_code == NE_EXPR
		      && integer_onep (TREE_OPERAND (cond, 1))))
		{
		  new_cond = invert_truthvalue (new_cond);

		  /* If we did not get a simple relational
		     expression or bare SSA_NAME, then we can
		     not optimize this case.  */
		  if (!COMPARISON_CLASS_P (new_cond)
		      && TREE_CODE (new_cond) != SSA_NAME)
		    new_cond = NULL_TREE;
		}
	    }
	}

      /* If TEST_VAR is set from a TRUTH_NOT_EXPR, then it
	 is interesting.  */
      else if (TREE_CODE (def_rhs) == TRUTH_NOT_EXPR)
	{
	  enum tree_code new_code;

	  def_rhs = TREE_OPERAND (def_rhs, 0);

	  /* DEF_RHS must be an SSA_NAME or constant.  */
	  if (TREE_CODE (def_rhs) != SSA_NAME
	      && !is_gimple_min_invariant (def_rhs))
	    return NULL_TREE;

	  /* Don't propagate if the operand occurs in
	     an abnormal PHI.  */
	  if (TREE_CODE (def_rhs) == SSA_NAME
	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_rhs))
	    return NULL_TREE;

	  if (cond_code == SSA_NAME
	      || (cond_code == NE_EXPR
		  && integer_zerop (TREE_OPERAND (cond, 1)))
	      || (cond_code == EQ_EXPR
		  && integer_onep (TREE_OPERAND (cond, 1))))
	    new_code = EQ_EXPR;
	  else
	    new_code = NE_EXPR;

	  new_cond = build2 (new_code, boolean_type_node, def_rhs,
			     fold_convert (TREE_TYPE (def_rhs),
					   integer_zero_node));
	}

      /* If TEST_VAR was set from a cast of an integer type
	 to a boolean type or a cast of a boolean to an
	 integral, then it is interesting.  */
      else if (TREE_CODE (def_rhs) == NOP_EXPR
	       || TREE_CODE (def_rhs) == CONVERT_EXPR)
	{
	  tree outer_type;
	  tree inner_type;

	  outer_type = TREE_TYPE (def_rhs);
	  inner_type = TREE_TYPE (TREE_OPERAND (def_rhs, 0));

	  if ((TREE_CODE (outer_type) == BOOLEAN_TYPE
	       && INTEGRAL_TYPE_P (inner_type))
	      || (TREE_CODE (inner_type) == BOOLEAN_TYPE
		  && INTEGRAL_TYPE_P (outer_type)))
	    ;
	  else if (INTEGRAL_TYPE_P (outer_type)
		   && INTEGRAL_TYPE_P (inner_type)
		   && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME
		   && ssa_name_defined_by_comparison_p (TREE_OPERAND (def_rhs,
								      0)))
	    ;
	  else
	    return NULL_TREE;

	  /* Don't propagate if the operand occurs in
	     an abnormal PHI.  */
	  if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME
	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND
						  (def_rhs, 0)))
	    return NULL_TREE;

	  if (has_single_use (test_var))
	    {
	      enum tree_code new_code;
	      tree new_arg;

	      if (cond_code == SSA_NAME
		  || (cond_code == NE_EXPR
		      && integer_zerop (TREE_OPERAND (cond, 1)))
		  || (cond_code == EQ_EXPR
		      && integer_onep (TREE_OPERAND (cond, 1))))
		new_code = NE_EXPR;
	      else
		new_code = EQ_EXPR;

	      new_arg = TREE_OPERAND (def_rhs, 0);
	      new_cond = build2 (new_code, boolean_type_node, new_arg,
				 fold_convert (TREE_TYPE (new_arg),
					       integer_zero_node));
	    }
	}
    }

  *test_var_p = test_var;
  return new_cond;
}
rtx
addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
		  bool really_expand)
{
  enum machine_mode address_mode = targetm.addr_space.address_mode (as);
  enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
  rtx address, sym, bse, idx, st, off;
  struct mem_addr_template *templ;

  if (addr->step && !integer_onep (addr->step))
    st = immed_double_int_const (tree_to_double_int (addr->step), pointer_mode);
  else
    st = NULL_RTX;

  if (addr->offset && !integer_zerop (addr->offset))
    off = immed_double_int_const
	    (double_int_sext (tree_to_double_int (addr->offset),
			      TYPE_PRECISION (TREE_TYPE (addr->offset))),
	     pointer_mode);
  else
    off = NULL_RTX;

  if (!really_expand)
    {
      unsigned int templ_index
	= TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);

      if (templ_index
	  >= VEC_length (mem_addr_template, mem_addr_template_list))
	VEC_safe_grow_cleared (mem_addr_template, gc, mem_addr_template_list,
			       templ_index + 1);

      /* Reuse the templates for addresses, so that we do not waste memory.  */
      templ = VEC_index (mem_addr_template, mem_addr_template_list, templ_index);
      if (!templ->ref)
	{
	  sym = (addr->symbol ?
		 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
		 : NULL_RTX);
	  bse = (addr->base ?
		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
		 : NULL_RTX);
	  idx = (addr->index ?
		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
		 : NULL_RTX);

	  gen_addr_rtx (pointer_mode, sym, bse, idx,
			st? const0_rtx : NULL_RTX,
			off? const0_rtx : NULL_RTX,
			&templ->ref,
			&templ->step_p,
			&templ->off_p);
	}

      if (st)
	*templ->step_p = st;
      if (off)
	*templ->off_p = off;

      return templ->ref;
    }

  /* Otherwise really expand the expressions.  */
  sym = (addr->symbol
	 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
	 : NULL_RTX);
  bse = (addr->base
	 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
	 : NULL_RTX);
  idx = (addr->index
	 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
	 : NULL_RTX);

  gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
  if (pointer_mode != address_mode)
    address = convert_memory_address (address_mode, address);
  return address;
}
Beispiel #18
0
void
do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
{
  enum tree_code code = TREE_CODE (exp);
  rtx temp;
  int i;
  tree type;
  enum machine_mode mode;
  rtx drop_through_label = 0;

  switch (code)
    {
    case ERROR_MARK:
      break;

    case INTEGER_CST:
      temp = integer_zerop (exp) ? if_false_label : if_true_label;
      if (temp)
        emit_jump (temp);
      break;

#if 0
      /* This is not true with #pragma weak  */
    case ADDR_EXPR:
      /* The address of something can never be zero.  */
      if (if_true_label)
        emit_jump (if_true_label);
      break;
#endif

    case NOP_EXPR:
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
        goto normal;
    case CONVERT_EXPR:
      /* If we are narrowing the operand, we have to do the compare in the
         narrower mode.  */
      if ((TYPE_PRECISION (TREE_TYPE (exp))
           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
        goto normal;
    case NON_LVALUE_EXPR:
    case ABS_EXPR:
    case NEGATE_EXPR:
    case LROTATE_EXPR:
    case RROTATE_EXPR:
      /* These cannot change zero->nonzero or vice versa.  */
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
      break;

    case TRUTH_NOT_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
	       inv (prob));
      break;

    case COND_EXPR:
      {
	rtx label1 = gen_label_rtx ();
	if (!if_true_label || !if_false_label)
	  {
	    drop_through_label = gen_label_rtx ();
	    if (!if_true_label)
	      if_true_label = drop_through_label;
	    if (!if_false_label)
	      if_false_label = drop_through_label;
	  }

        do_pending_stack_adjust ();
	do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1);
	do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
        emit_label (label1);
	do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
	break;
      }

    case COMPOUND_EXPR:
      /* Lowered by gimplify.c.  */
      gcc_unreachable ();

    case COMPONENT_REF:
    case BIT_FIELD_REF:
    case ARRAY_REF:
    case ARRAY_RANGE_REF:
      {
        HOST_WIDE_INT bitsize, bitpos;
        int unsignedp;
        enum machine_mode mode;
        tree type;
        tree offset;
        int volatilep = 0;

        /* Get description of this reference.  We don't actually care
           about the underlying object here.  */
        get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
                             &unsignedp, &volatilep, false);

        type = lang_hooks.types.type_for_size (bitsize, unsignedp);
        if (! SLOW_BYTE_ACCESS
            && type != 0 && bitsize >= 0
            && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
            && have_insn_for (COMPARE, TYPE_MODE (type)))
          {
	    do_jump (fold_convert (type, exp), if_false_label, if_true_label,
		     prob);
            break;
          }
        goto normal;
      }

    case MINUS_EXPR:
      /* Nonzero iff operands of minus differ.  */
      code = NE_EXPR;

      /* FALLTHRU */
    case EQ_EXPR:
    case NE_EXPR:
    case LT_EXPR:
    case LE_EXPR:
    case GT_EXPR:
    case GE_EXPR:
    case ORDERED_EXPR:
    case UNORDERED_EXPR:
    case UNLT_EXPR:
    case UNLE_EXPR:
    case UNGT_EXPR:
    case UNGE_EXPR:
    case UNEQ_EXPR:
    case LTGT_EXPR:
    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
    other_code:
      do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
		 if_false_label, if_true_label, prob);
      break;

    case BIT_AND_EXPR:
      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
	 See if the former is preferred for jump tests and restore it
	 if so.  */
      if (integer_onep (TREE_OPERAND (exp, 1)))
	{
	  tree exp0 = TREE_OPERAND (exp, 0);
	  rtx set_label, clr_label;
	  int setclr_prob = prob;

	  /* Strip narrowing integral type conversions.  */
	  while (CONVERT_EXPR_P (exp0)
		 && TREE_OPERAND (exp0, 0) != error_mark_node
		 && TYPE_PRECISION (TREE_TYPE (exp0))
		    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
	    exp0 = TREE_OPERAND (exp0, 0);

	  /* "exp0 ^ 1" inverts the sense of the single bit test.  */
	  if (TREE_CODE (exp0) == BIT_XOR_EXPR
	      && integer_onep (TREE_OPERAND (exp0, 1)))
	    {
	      exp0 = TREE_OPERAND (exp0, 0);
	      clr_label = if_true_label;
	      set_label = if_false_label;
	      setclr_prob = inv (prob);
	    }
	  else
	    {
	      clr_label = if_false_label;
	      set_label = if_true_label;
	    }

	  if (TREE_CODE (exp0) == RSHIFT_EXPR)
	    {
	      tree arg = TREE_OPERAND (exp0, 0);
	      tree shift = TREE_OPERAND (exp0, 1);
	      tree argtype = TREE_TYPE (arg);
	      if (TREE_CODE (shift) == INTEGER_CST
		  && compare_tree_int (shift, 0) >= 0
		  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
		  && prefer_and_bit_test (TYPE_MODE (argtype),
					  TREE_INT_CST_LOW (shift)))
		{
		  unsigned HOST_WIDE_INT mask
		    = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
		  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
				   build_int_cstu (argtype, mask)),
			   clr_label, set_label, setclr_prob);
		  break;
		}
	    }
	}

      /* If we are AND'ing with a small constant, do this comparison in the
         smallest type that fits.  If the machine doesn't have comparisons
         that small, it will be converted back to the wider comparison.
         This helps if we are testing the sign bit of a narrower object.
         combine can't do this for us because it can't know whether a
         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */

      if (! SLOW_BYTE_ACCESS
          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
          && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
          && have_insn_for (COMPARE, TYPE_MODE (type)))
        {
	  do_jump (fold_convert (type, exp), if_false_label, if_true_label,
		   prob);
          break;
        }

      if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
	  || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
	goto normal;

      /* Boolean comparisons can be compiled as TRUTH_AND_EXPR.  */

    case TRUTH_AND_EXPR:
      /* High branch cost, expand as the bitwise AND of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
      if (BRANCH_COST (optimize_insn_for_speed_p (),
		       false) >= 4
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
	goto normal;
      code = TRUTH_ANDIF_EXPR;
      goto other_code;

    case BIT_IOR_EXPR:
    case TRUTH_OR_EXPR:
      /* High branch cost, expand as the bitwise OR of the conditions.
	 Do the same if the RHS has side effects, because we're effectively
	 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
      if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
	goto normal;
      code = TRUTH_ORIF_EXPR;
      goto other_code;

      /* Fall through and generate the normal code.  */
    default:
    normal:
      temp = expand_normal (exp);
      do_pending_stack_adjust ();
      /* The RTL optimizers prefer comparisons against pseudos.  */
      if (GET_CODE (temp) == SUBREG)
	{
	  /* Compare promoted variables in their promoted mode.  */
	  if (SUBREG_PROMOTED_VAR_P (temp)
	      && REG_P (XEXP (temp, 0)))
	    temp = XEXP (temp, 0);
	  else
	    temp = copy_to_reg (temp);
	}
      do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
			       NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
			       GET_MODE (temp), NULL_RTX,
			       if_false_label, if_true_label, prob);
    }

  if (drop_through_label)
    {
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}
static bool
recognize_single_bit_test (gimple cond, tree *name, tree *bit)
{
  gimple stmt;

  /* Get at the definition of the result of the bit test.  */
  if (gimple_cond_code (cond) != NE_EXPR
      || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
      || !integer_zerop (gimple_cond_rhs (cond)))
    return false;
  stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
  if (!is_gimple_assign (stmt))
    return false;

  /* Look at which bit is tested.  One form to recognize is
     D.1985_5 = state_3(D) >> control1_4(D);
     D.1986_6 = (int) D.1985_5;
     D.1987_7 = op0 & 1;
     if (D.1987_7 != 0)  */
  if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
      && integer_onep (gimple_assign_rhs2 (stmt))
      && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
    {
      tree orig_name = gimple_assign_rhs1 (stmt);

      /* Look through copies and conversions to eventually
	 find the stmt that computes the shift.  */
      stmt = SSA_NAME_DEF_STMT (orig_name);

      while (is_gimple_assign (stmt)
	     && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
		  && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt)))
		      <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt)))))
		 || gimple_assign_ssa_name_copy_p (stmt)))
	stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));

      /* If we found such, decompose it.  */
      if (is_gimple_assign (stmt)
	  && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
	{
	  /* op0 & (1 << op1) */
	  *bit = gimple_assign_rhs2 (stmt);
	  *name = gimple_assign_rhs1 (stmt);
	}
      else
	{
	  /* t & 1 */
	  *bit = integer_zero_node;
	  *name = get_name_for_bit_test (orig_name);
	}

      return true;
    }

  /* Another form is
     D.1987_7 = op0 & (1 << CST)
     if (D.1987_7 != 0)  */
  if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
      && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
      && integer_pow2p (gimple_assign_rhs2 (stmt)))
    {
      *name = gimple_assign_rhs1 (stmt);
      *bit = build_int_cst (integer_type_node,
			    tree_log2 (gimple_assign_rhs2 (stmt)));
      return true;
    }

  /* Another form is
     D.1986_6 = 1 << control1_4(D)
     D.1987_7 = op0 & D.1986_6
     if (D.1987_7 != 0)  */
  if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
      && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
      && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
    {
      gimple tmp;

      /* Both arguments of the BIT_AND_EXPR can be the single-bit
	 specifying expression.  */
      tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
      if (is_gimple_assign (tmp)
	  && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
	  && integer_onep (gimple_assign_rhs1 (tmp)))
	{
	  *name = gimple_assign_rhs2 (stmt);
	  *bit = gimple_assign_rhs2 (tmp);
	  return true;
	}

      tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
      if (is_gimple_assign (tmp)
	  && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
	  && integer_onep (gimple_assign_rhs1 (tmp)))
	{
	  *name = gimple_assign_rhs1 (stmt);
	  *bit = gimple_assign_rhs2 (tmp);
	  return true;
	}
    }

  return false;
}
Beispiel #20
0
void
do_jump (tree exp, rtx if_false_label, rtx if_true_label)
{
  enum tree_code code = TREE_CODE (exp);
  rtx temp;
  int i;
  tree type;
  enum machine_mode mode;
  rtx drop_through_label = 0;

  switch (code)
    {
    case ERROR_MARK:
      break;

    case INTEGER_CST:
      temp = integer_zerop (exp) ? if_false_label : if_true_label;
      if (temp)
        emit_jump (temp);
      break;

#if 0
      /* This is not true with #pragma weak  */
    case ADDR_EXPR:
      /* The address of something can never be zero.  */
      if (if_true_label)
        emit_jump (if_true_label);
      break;
#endif

    case NOP_EXPR:
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
        goto normal;
    case CONVERT_EXPR:
      /* If we are narrowing the operand, we have to do the compare in the
         narrower mode.  */
      if ((TYPE_PRECISION (TREE_TYPE (exp))
           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
        goto normal;
    case NON_LVALUE_EXPR:
    case ABS_EXPR:
    case NEGATE_EXPR:
    case LROTATE_EXPR:
    case RROTATE_EXPR:
      /* These cannot change zero->nonzero or vice versa.  */
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
      break;

    case BIT_AND_EXPR:
      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
         See if the former is preferred for jump tests and restore it
         if so.  */
      if (integer_onep (TREE_OPERAND (exp, 1)))
        {
          tree exp0 = TREE_OPERAND (exp, 0);
          rtx set_label, clr_label;

          /* Strip narrowing integral type conversions.  */
          while ((TREE_CODE (exp0) == NOP_EXPR
                  || TREE_CODE (exp0) == CONVERT_EXPR
                  || TREE_CODE (exp0) == NON_LVALUE_EXPR)
                 && TREE_OPERAND (exp0, 0) != error_mark_node
                 && TYPE_PRECISION (TREE_TYPE (exp0))
                    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
            exp0 = TREE_OPERAND (exp0, 0);

          /* "exp0 ^ 1" inverts the sense of the single bit test.  */
          if (TREE_CODE (exp0) == BIT_XOR_EXPR
              && integer_onep (TREE_OPERAND (exp0, 1)))
            {
              exp0 = TREE_OPERAND (exp0, 0);
              clr_label = if_true_label;
              set_label = if_false_label;
            }
          else
            {
              clr_label = if_false_label;
              set_label = if_true_label;
            }

          if (TREE_CODE (exp0) == RSHIFT_EXPR)
            {
              tree arg = TREE_OPERAND (exp0, 0);
              tree shift = TREE_OPERAND (exp0, 1);
              tree argtype = TREE_TYPE (arg);
              if (TREE_CODE (shift) == INTEGER_CST
                  && compare_tree_int (shift, 0) >= 0
                  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
                  && prefer_and_bit_test (TYPE_MODE (argtype),
                                          TREE_INT_CST_LOW (shift)))
                {
                  HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
                                       << TREE_INT_CST_LOW (shift);
                  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
                                   build_int_cst_type (argtype, mask)),
                           clr_label, set_label);
                  break;
                }
            }
        }

      /* If we are AND'ing with a small constant, do this comparison in the
         smallest type that fits.  If the machine doesn't have comparisons
         that small, it will be converted back to the wider comparison.
         This helps if we are testing the sign bit of a narrower object.
         combine can't do this for us because it can't know whether a
         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */

      if (! SLOW_BYTE_ACCESS
          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
          && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
          && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
              != CODE_FOR_nothing))
        {
          do_jump (fold_convert (type, exp), if_false_label, if_true_label);
          break;
        }
      goto normal;

    case TRUTH_NOT_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
      break;

    case COND_EXPR:
      {
        rtx label1 = gen_label_rtx ();
        if (!if_true_label || !if_false_label)
          {
            drop_through_label = gen_label_rtx ();
            if (!if_true_label)
              if_true_label = drop_through_label;
            if (!if_false_label)
              if_false_label = drop_through_label;
          }

        do_pending_stack_adjust ();
        do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
        do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
        emit_label (label1);
        do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
        break;
      }

    case TRUTH_ANDIF_EXPR:
    case TRUTH_ORIF_EXPR:
    case COMPOUND_EXPR:
      /* Lowered by gimplify.c.  */
      gcc_unreachable ();

    case COMPONENT_REF:
    case BIT_FIELD_REF:
    case ARRAY_REF:
    case ARRAY_RANGE_REF:
      {
        HOST_WIDE_INT bitsize, bitpos;
        int unsignedp;
        enum machine_mode mode;
        tree type;
        tree offset;
        int volatilep = 0;

        /* Get description of this reference.  We don't actually care
           about the underlying object here.  */
        get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
                             &unsignedp, &volatilep, false);

        type = lang_hooks.types.type_for_size (bitsize, unsignedp);
        if (! SLOW_BYTE_ACCESS
            && type != 0 && bitsize >= 0
            && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
            && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
                != CODE_FOR_nothing))
          {
            do_jump (fold_convert (type, exp), if_false_label, if_true_label);
            break;
          }
        goto normal;
      }

    case EQ_EXPR:
      {
        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));

        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_FLOAT);
        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_INT);
        
        if (integer_zerop (TREE_OPERAND (exp, 1)))
          do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
                 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
          do_jump_by_parts_equality (exp, if_false_label, if_true_label);
        else
          do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
        break;
      }

    case MINUS_EXPR:
      /* Nonzero iff operands of minus differ.  */
      exp = build2 (NE_EXPR, TREE_TYPE (exp),
                    TREE_OPERAND (exp, 0),
                    TREE_OPERAND (exp, 1));
      /* FALLTHRU */
    case NE_EXPR:
      {
        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));

        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_FLOAT);
        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
                    != MODE_COMPLEX_INT);
        
        if (integer_zerop (TREE_OPERAND (exp, 1)))
          do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
           && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
          do_jump_by_parts_equality (exp, if_true_label, if_false_label);
        else
          do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
        break;
      }

    case LT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LT, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
      else
        do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
      break;

    case LE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LE, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
      else
        do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
      break;

    case GT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GT, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
      else
        do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
      break;

    case GE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GE, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
      else
        do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
      break;

    case UNORDERED_EXPR:
    case ORDERED_EXPR:
      {
        enum rtx_code cmp, rcmp;
        int do_rev;

        if (code == UNORDERED_EXPR)
          cmp = UNORDERED, rcmp = ORDERED;
        else
          cmp = ORDERED, rcmp = UNORDERED;
        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));

        do_rev = 0;
        if (! can_compare_p (cmp, mode, ccp_jump)
            && (can_compare_p (rcmp, mode, ccp_jump)
          /* If the target doesn't provide either UNORDERED or ORDERED
             comparisons, canonicalize on UNORDERED for the library.  */
          || rcmp == UNORDERED))
          do_rev = 1;

        if (! do_rev)
          do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
        else
          do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
      }
      break;

    {
      enum rtx_code rcode1;
      enum tree_code tcode1, tcode2;

      case UNLT_EXPR:
        rcode1 = UNLT;
        tcode1 = UNORDERED_EXPR;
        tcode2 = LT_EXPR;
        goto unordered_bcc;
      case UNLE_EXPR:
        rcode1 = UNLE;
        tcode1 = UNORDERED_EXPR;
        tcode2 = LE_EXPR;
        goto unordered_bcc;
      case UNGT_EXPR:
        rcode1 = UNGT;
        tcode1 = UNORDERED_EXPR;
        tcode2 = GT_EXPR;
        goto unordered_bcc;
      case UNGE_EXPR:
        rcode1 = UNGE;
        tcode1 = UNORDERED_EXPR;
        tcode2 = GE_EXPR;
        goto unordered_bcc;
      case UNEQ_EXPR:
        rcode1 = UNEQ;
        tcode1 = UNORDERED_EXPR;
        tcode2 = EQ_EXPR;
        goto unordered_bcc;
      case LTGT_EXPR:
        /* It is ok for LTGT_EXPR to trap when the result is unordered,
           so expand to (a < b) || (a > b).  */
        rcode1 = LTGT;
        tcode1 = LT_EXPR;
        tcode2 = GT_EXPR;
        goto unordered_bcc;

      unordered_bcc:
        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
        if (can_compare_p (rcode1, mode, ccp_jump))
          do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
                               if_true_label);
        else
          {
            tree op0 = save_expr (TREE_OPERAND (exp, 0));
            tree op1 = save_expr (TREE_OPERAND (exp, 1));
            tree cmp0, cmp1;

            /* If the target doesn't support combined unordered
               compares, decompose into two comparisons.  */
            if (if_true_label == 0)
              drop_through_label = if_true_label = gen_label_rtx ();
              
            cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
            cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
            do_jump (cmp0, 0, if_true_label);
            do_jump (cmp1, if_false_label, if_true_label);
          }
      }
      break;

    case TRUTH_AND_EXPR:
      /* High branch cost, expand as the bitwise AND of the conditions.
         Do the same if the RHS has side effects, because we're effectively
         turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
      if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
        goto normal;

      if (if_false_label == NULL_RTX)
        {
          drop_through_label = gen_label_rtx ();
          do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX);
          do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label);
        }
      else
        {
          do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
          do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
        }
      break;

    case TRUTH_OR_EXPR:
      /* High branch cost, expand as the bitwise OR of the conditions.
         Do the same if the RHS has side effects, because we're effectively
         turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
      if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
        goto normal;

      if (if_true_label == NULL_RTX)
        {
          drop_through_label = gen_label_rtx ();
          do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label);
          do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX);
        }
      else
        {
          do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
          do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
        }
      break;

      /* Special case:
          __builtin_expect (<test>, 0)        and
          __builtin_expect (<test>, 1)

         We need to do this here, so that <test> is not converted to a SCC
         operation on machines that use condition code registers and COMPARE
         like the PowerPC, and then the jump is done based on whether the SCC
         operation produced a 1 or 0.  */
    case CALL_EXPR:
      /* Check for a built-in function.  */
      {
        tree fndecl = get_callee_fndecl (exp);
        tree arglist = TREE_OPERAND (exp, 1);

        if (fndecl
            && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
            && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
            && arglist != NULL_TREE
            && TREE_CHAIN (arglist) != NULL_TREE)
          {
            rtx seq = expand_builtin_expect_jump (exp, if_false_label,
                                                  if_true_label);

            if (seq != NULL_RTX)
              {
                emit_insn (seq);
                return;
              }
          }
      }
 
      /* Fall through and generate the normal code.  */
    default:
    normal:
      temp = expand_normal (exp);
      do_pending_stack_adjust ();
      /* The RTL optimizers prefer comparisons against pseudos.  */
      if (GET_CODE (temp) == SUBREG)
        {
          /* Compare promoted variables in their promoted mode.  */
          if (SUBREG_PROMOTED_VAR_P (temp)
              && REG_P (XEXP (temp, 0)))
            temp = XEXP (temp, 0);
          else
            temp = copy_to_reg (temp);
        }
      do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
                               NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
                               GET_MODE (temp), NULL_RTX,
                               if_false_label, if_true_label);
    }

  if (drop_through_label)
    {
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}
static void restrict_range_to_consts()
{
  size_t i;
  unsigned num_vr_values = num_ssa_names;
  for (i = 0; i < num_vr_values; i++)
    if (vr_value[i])
      {
        value_range_t *vr = vr_value[i];
        tree type = TREE_TYPE (ssa_name(i));
        tree minimum = NULL;
        tree maximum = NULL;
        unsigned var_prec = TYPE_PRECISION(type);

	//fprintf(stderr, "%ld\n", i);

        if (INTEGRAL_TYPE_P(type) && vr->min && vr->max)
	  {
	    bool is_neg_inf = is_negative_overflow_infinity (vr->min) || 
                 (INTEGRAL_TYPE_P (type)
	           && !TYPE_UNSIGNED (type)
	           && vrp_val_is_min (vr->min));
	           
	    bool is_pos_inf = is_positive_overflow_infinity (vr->max) || 
	        (INTEGRAL_TYPE_P (type)
	         && vrp_val_is_max (vr->max));
	    if(TREE_CODE (vr->min) != INTEGER_CST && !is_neg_inf)
	    {
	      /// check if greater than zero
	      bool strict_overflow_p;
	      tree val = compare_name_with_value(GE_EXPR, ssa_name(i), integer_zero_node, &strict_overflow_p);
	      if(!strict_overflow_p && val)
	      {
	        if(integer_onep (val))
	        {
	          minimum = integer_zero_node;
	        }
	        else
	        {
	          tree neg_const;
	          unsigned prec_index = 1;
	          while(prec_index < var_prec && !strict_overflow_p)
	          {
	            neg_const = build_int_cst (type, -(((unsigned HOST_WIDE_INT)1) << prec_index));
	            tree val = compare_name_with_value(GE_EXPR, ssa_name(i), neg_const, &strict_overflow_p);
	            if(val && integer_onep (val))
	            {
	              minimum = neg_const;
	              break;
	            }
	            ++prec_index;
	          }
	        }
	      }
	    } else if(is_neg_inf)
	      minimum = vr->min;
	    
	    if(TREE_CODE (vr->max) != INTEGER_CST && !is_pos_inf)
	    {
	      bool strict_overflow_p=false;
	      tree pos_const;
	      unsigned prec_index = 0;
	      while(prec_index < var_prec && !strict_overflow_p)
	      {
	        pos_const = build_int_cst (type, (((unsigned HOST_WIDE_INT)1) << prec_index));
	        tree val = compare_name_with_value(LT_EXPR, ssa_name(i), pos_const, &strict_overflow_p);
	        if(val && integer_onep (val))
	        {
	          maximum = build_int_cst (type, (((unsigned HOST_WIDE_INT)1) << prec_index)-1);
	          break;
	        }
	        ++prec_index;
	      }
	    }
	    else if(is_pos_inf)
	      maximum = vr->max;
	      
	    if(minimum)
	    {
	      vr->min = minimum;
	      vr->type = VR_RANGE;
	    }
	    if(maximum)
	    {
	      vr->max = maximum;
	      vr->type = VR_RANGE;
	    }
	  }
    }
  // do further restrictions by exploiting assert_expr
  for (i = 0; i < num_vr_values; i++)
    if (vr_value[i])
    {
      tree type = TREE_TYPE (ssa_name(i));
      value_range_t *vr = vr_value[i];
      if(INTEGRAL_TYPE_P(type) && vr->type == VR_RANGE && vr->min && vr->max)
      {
        tree sa_var = ssa_name(i);
        GIMPLE_type def_stmt = SSA_NAME_DEF_STMT (sa_var );
        if(is_gimple_assign (def_stmt)
	  && gimple_assign_rhs_code (def_stmt) == ASSERT_EXPR)
	{
	  tree src_var = ASSERT_EXPR_VAR (gimple_assign_rhs1 (def_stmt));
	  value_range_t *src_vr = vr_value[SSA_NAME_VERSION(src_var)];
	  if(src_vr && src_vr->type == VR_RANGE && src_vr->min && src_vr->max)
	  {
	    bool strict_overflow_p=false;
	    tree val = compare_name_with_value(LT_EXPR, src_var, vr->max, &strict_overflow_p);
	    if(val && integer_onep (val))
	      vr->max = src_vr->max;
	    strict_overflow_p=false;
	    val = compare_name_with_value(GT_EXPR, src_var, vr->min, &strict_overflow_p);
	    if(val && integer_onep (val))
	      vr->min = src_vr->min;
	  }
	}
      }
    }
}
static bool
forward_propagate_addr_into_variable_array_index (tree offset,
						  tree def_rhs,
						  gimple_stmt_iterator *use_stmt_gsi)
{
  tree index, tunit;
  gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
  tree tmp;

  tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
  if (!host_integerp (tunit, 1))
    return false;

  /* Get the offset's defining statement.  */
  offset_def = SSA_NAME_DEF_STMT (offset);

  /* Try to find an expression for a proper index.  This is either a
     multiplication expression by the element size or just the ssa name we came
     along in case the element size is one. In that case, however, we do not
     allow multiplications because they can be computing index to a higher
     level dimension (PR 37861). */
  if (integer_onep (tunit))
    {
      if (is_gimple_assign (offset_def)
	  && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
	return false;

      index = offset;
    }
  else
    {
      /* The statement which defines OFFSET before type conversion
         must be a simple GIMPLE_ASSIGN.  */
      if (!is_gimple_assign (offset_def))
	return false;

      /* The RHS of the statement which defines OFFSET must be a
	 multiplication of an object by the size of the array elements.
	 This implicitly verifies that the size of the array elements
	 is constant.  */
     if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
	 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
	 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
       {
	 /* The first operand to the MULT_EXPR is the desired index.  */
	 index = gimple_assign_rhs1 (offset_def);
       }
     /* If we have idx * tunit + CST * tunit re-associate that.  */
     else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
	       || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
	      && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
	      && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
	      && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
					       gimple_assign_rhs2 (offset_def),
					       tunit)) != NULL_TREE)
       {
	 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
	 if (is_gimple_assign (offset_def2)
	     && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
	     && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
	     && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
	   {
	     index = fold_build2 (gimple_assign_rhs_code (offset_def),
				  TREE_TYPE (offset),
				  gimple_assign_rhs1 (offset_def2), tmp);
	   }
	 else
	   return false;
       }
     else
	return false;
    }

  /* Replace the pointer addition with array indexing.  */
  index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
				    true, GSI_SAME_STMT);
  gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
  use_stmt = gsi_stmt (*use_stmt_gsi);
  TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
    = index;

  /* That should have created gimple, so there is no need to
     record information to undo the propagation.  */
  fold_stmt_inplace (use_stmt);
  tidy_after_forward_propagate_addr (use_stmt);
  return true;
}
static bool
conditional_replacement (basic_block cond_bb, basic_block middle_bb,
                         edge e0, edge e1, gimple phi,
                         tree arg0, tree arg1)
{
    tree result;
    gimple stmt, new_stmt;
    tree cond;
    gimple_stmt_iterator gsi;
    edge true_edge, false_edge;
    tree new_var, new_var2;

    /* FIXME: Gimplification of complex type is too hard for now.  */
    if (TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
            || TREE_CODE (TREE_TYPE (arg1)) == COMPLEX_TYPE)
        return false;

    /* The PHI arguments have the constants 0 and 1, then convert
       it to the conditional.  */
    if ((integer_zerop (arg0) && integer_onep (arg1))
            || (integer_zerop (arg1) && integer_onep (arg0)))
        ;
    else
        return false;

    if (!empty_block_p (middle_bb))
        return false;

    /* At this point we know we have a GIMPLE_COND with two successors.
       One successor is BB, the other successor is an empty block which
       falls through into BB.

       There is a single PHI node at the join point (BB) and its arguments
       are constants (0, 1).

       So, given the condition COND, and the two PHI arguments, we can
       rewrite this PHI into non-branching code:

         dest = (COND) or dest = COND'

       We use the condition as-is if the argument associated with the
       true edge has the value one or the argument associated with the
       false edge as the value zero.  Note that those conditions are not
       the same since only one of the outgoing edges from the GIMPLE_COND
       will directly reach BB and thus be associated with an argument.  */

    stmt = last_stmt (cond_bb);
    result = PHI_RESULT (phi);

    /* To handle special cases like floating point comparison, it is easier and
       less error-prone to build a tree and gimplify it on the fly though it is
       less efficient.  */
    cond = fold_build2 (gimple_cond_code (stmt), boolean_type_node,
                        gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));

    /* We need to know which is the true edge and which is the false
       edge so that we know when to invert the condition below.  */
    extract_true_false_edges_from_block (cond_bb, &true_edge, &false_edge);
    if ((e0 == true_edge && integer_zerop (arg0))
            || (e0 == false_edge && integer_onep (arg0))
            || (e1 == true_edge && integer_zerop (arg1))
            || (e1 == false_edge && integer_onep (arg1)))
        cond = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (cond), cond);

    /* Insert our new statements at the end of conditional block before the
       COND_STMT.  */
    gsi = gsi_for_stmt (stmt);
    new_var = force_gimple_operand_gsi (&gsi, cond, true, NULL, true,
                                        GSI_SAME_STMT);

    if (!useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (new_var)))
    {
        new_var2 = create_tmp_var (TREE_TYPE (result), NULL);
        add_referenced_var (new_var2);
        new_stmt = gimple_build_assign_with_ops (CONVERT_EXPR, new_var2,
                   new_var, NULL);
        new_var2 = make_ssa_name (new_var2, new_stmt);
        gimple_assign_set_lhs (new_stmt, new_var2);
        gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
        new_var = new_var2;
    }

    replace_phi_edge_with_variable (cond_bb, e1, phi, new_var);

    /* Note that we optimized this PHI.  */
    return true;
}
Beispiel #24
0
static tree
gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
		  gfc_omp_clauses *do_clauses)
{
  gfc_se se;
  tree dovar, stmt, from, to, step, type, init, cond, incr;
  tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
  stmtblock_t block;
  stmtblock_t body;
  int simple = 0;
  bool dovar_found = false;
  gfc_omp_clauses *clauses = code->ext.omp_clauses;

  code = code->block->next;
  gcc_assert (code->op == EXEC_DO);

  if (pblock == NULL)
    {
      gfc_start_block (&block);
      pblock = &block;
    }

  omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
  if (clauses)
    {
      gfc_namelist *n;
      for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL; n = n->next)
	if (code->ext.iterator->var->symtree->n.sym == n->sym)
	  break;
      if (n == NULL)
	for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
	  if (code->ext.iterator->var->symtree->n.sym == n->sym)
	    break;
      if (n != NULL)
	dovar_found = true;
    }

  /* Evaluate all the expressions in the iterator.  */
  gfc_init_se (&se, NULL);
  gfc_conv_expr_lhs (&se, code->ext.iterator->var);
  gfc_add_block_to_block (pblock, &se.pre);
  dovar = se.expr;
  type = TREE_TYPE (dovar);
  gcc_assert (TREE_CODE (type) == INTEGER_TYPE);

  gfc_init_se (&se, NULL);
  gfc_conv_expr_val (&se, code->ext.iterator->start);
  gfc_add_block_to_block (pblock, &se.pre);
  from = gfc_evaluate_now (se.expr, pblock);

  gfc_init_se (&se, NULL);
  gfc_conv_expr_val (&se, code->ext.iterator->end);
  gfc_add_block_to_block (pblock, &se.pre);
  to = gfc_evaluate_now (se.expr, pblock);

  gfc_init_se (&se, NULL);
  gfc_conv_expr_val (&se, code->ext.iterator->step);
  gfc_add_block_to_block (pblock, &se.pre);
  step = gfc_evaluate_now (se.expr, pblock);

  /* Special case simple loops.  */
  if (integer_onep (step))
    simple = 1;
  else if (tree_int_cst_equal (step, integer_minus_one_node))
    simple = -1;

  /* Loop body.  */
  if (simple)
    {
      init = build2_v (MODIFY_EXPR, dovar, from);
      cond = build2 (simple > 0 ? LE_EXPR : GE_EXPR, boolean_type_node,
		     dovar, to);
      incr = fold_build2 (PLUS_EXPR, type, dovar, step);
      incr = fold_build2 (MODIFY_EXPR, type, dovar, incr);
      if (pblock != &block)
	{
	  pushlevel (0);
	  gfc_start_block (&block);
	}
      gfc_start_block (&body);
    }
  else
    {
      /* STEP is not 1 or -1.  Use:
	 for (count = 0; count < (to + step - from) / step; count++)
	   {
	     dovar = from + count * step;
	     body;
	   cycle_label:;
	   }  */
      tmp = fold_build2 (MINUS_EXPR, type, step, from);
      tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
      tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
      tmp = gfc_evaluate_now (tmp, pblock);
      count = gfc_create_var (type, "count");
      init = build2_v (MODIFY_EXPR, count, build_int_cst (type, 0));
      cond = build2 (LT_EXPR, boolean_type_node, count, tmp);
      incr = fold_build2 (PLUS_EXPR, type, count, build_int_cst (type, 1));
      incr = fold_build2 (MODIFY_EXPR, type, count, incr);

      if (pblock != &block)
	{
	  pushlevel (0);
	  gfc_start_block (&block);
	}
      gfc_start_block (&body);

      /* Initialize DOVAR.  */
      tmp = fold_build2 (MULT_EXPR, type, count, step);
      tmp = build2 (PLUS_EXPR, type, from, tmp);
      gfc_add_modify_expr (&body, dovar, tmp);
    }

  if (!dovar_found)
    {
      tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
      OMP_CLAUSE_DECL (tmp) = dovar;
      omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
    }
  if (!simple)
    {
      tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
      OMP_CLAUSE_DECL (tmp) = count;
      omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
    }

  /* Cycle statement is implemented with a goto.  Exit statement must not be
     present for this loop.  */
  cycle_label = gfc_build_label_decl (NULL_TREE);

  /* Put these labels where they can be found later. We put the
     labels in a TREE_LIST node (because TREE_CHAIN is already
     used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
     label in TREE_VALUE (backend_decl).  */

  code->block->backend_decl = tree_cons (cycle_label, NULL, NULL);

  /* Main loop body.  */
  tmp = gfc_trans_omp_code (code->block->next, true);
  gfc_add_expr_to_block (&body, tmp);

  /* Label for cycle statements (if needed).  */
  if (TREE_USED (cycle_label))
    {
      tmp = build1_v (LABEL_EXPR, cycle_label);
      gfc_add_expr_to_block (&body, tmp);
    }

  /* End of loop body.  */
  stmt = make_node (OMP_FOR);

  TREE_TYPE (stmt) = void_type_node;
  OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
  OMP_FOR_CLAUSES (stmt) = omp_clauses;
  OMP_FOR_INIT (stmt) = init;
  OMP_FOR_COND (stmt) = cond;
  OMP_FOR_INCR (stmt) = incr;
  gfc_add_expr_to_block (&block, stmt);

  return gfc_finish_block (&block);
}
Beispiel #25
0
tree
gfc_conv_constant_to_tree (gfc_expr * expr)
{
  tree res;

  gcc_assert (expr->expr_type == EXPR_CONSTANT);

  /* If it is has a prescribed memory representation, we build a string
     constant and VIEW_CONVERT to its type.  */
 
  switch (expr->ts.type)
    {
    case BT_INTEGER:
      if (expr->representation.string)
	return fold_build1_loc (input_location, VIEW_CONVERT_EXPR,
			 gfc_get_int_type (expr->ts.kind),
			 gfc_build_string_const (expr->representation.length,
						 expr->representation.string));
      else
	return gfc_conv_mpz_to_tree (expr->value.integer, expr->ts.kind);

    case BT_REAL:
      if (expr->representation.string)
	return fold_build1_loc (input_location, VIEW_CONVERT_EXPR,
			 gfc_get_real_type (expr->ts.kind),
			 gfc_build_string_const (expr->representation.length,
						 expr->representation.string));
      else
	return gfc_conv_mpfr_to_tree (expr->value.real, expr->ts.kind, expr->is_snan);

    case BT_LOGICAL:
      if (expr->representation.string)
	{
	  tree tmp = fold_build1_loc (input_location, VIEW_CONVERT_EXPR,
			gfc_get_int_type (expr->ts.kind),
			gfc_build_string_const (expr->representation.length,
						expr->representation.string));
	  if (!integer_zerop (tmp) && !integer_onep (tmp))
	    gfc_warning ("Assigning value other than 0 or 1 to LOGICAL"
			 " has undefined result at %L", &expr->where);
	  return fold_convert (gfc_get_logical_type (expr->ts.kind), tmp);
	}
      else
	return build_int_cst (gfc_get_logical_type (expr->ts.kind),
			      expr->value.logical);

    case BT_COMPLEX:
      if (expr->representation.string)
	return fold_build1_loc (input_location, VIEW_CONVERT_EXPR,
			 gfc_get_complex_type (expr->ts.kind),
			 gfc_build_string_const (expr->representation.length,
						 expr->representation.string));
      else
	{
	  tree real = gfc_conv_mpfr_to_tree (mpc_realref (expr->value.complex),
					  expr->ts.kind, expr->is_snan);
	  tree imag = gfc_conv_mpfr_to_tree (mpc_imagref (expr->value.complex),
					  expr->ts.kind, expr->is_snan);

	  return build_complex (gfc_typenode_for_spec (&expr->ts),
				real, imag);
	}

    case BT_CHARACTER:
      res = gfc_build_wide_string_const (expr->ts.kind,
					 expr->value.character.length,
					 expr->value.character.string);
      return res;

    case BT_HOLLERITH:
      return gfc_build_string_const (expr->representation.length,
				     expr->representation.string);

    default:
      fatal_error ("gfc_conv_constant_to_tree(): invalid type: %s",
		   gfc_typename (&expr->ts));
    }
}
Beispiel #26
0
static void
adjust_simduid_builtins (hash_table<simduid_to_vf> *htab)
{
  basic_block bb;

  FOR_EACH_BB_FN (bb, cfun)
    {
      gimple_stmt_iterator i;

      for (i = gsi_start_bb (bb); !gsi_end_p (i); )
	{
	  unsigned int vf = 1;
	  enum internal_fn ifn;
	  gimple *stmt = gsi_stmt (i);
	  tree t;
	  if (!is_gimple_call (stmt)
	      || !gimple_call_internal_p (stmt))
	    {
	      gsi_next (&i);
	      continue;
	    }
	  ifn = gimple_call_internal_fn (stmt);
	  switch (ifn)
	    {
	    case IFN_GOMP_SIMD_LANE:
	    case IFN_GOMP_SIMD_VF:
	    case IFN_GOMP_SIMD_LAST_LANE:
	      break;
	    case IFN_GOMP_SIMD_ORDERED_START:
	    case IFN_GOMP_SIMD_ORDERED_END:
	      if (integer_onep (gimple_call_arg (stmt, 0)))
		{
		  enum built_in_function bcode
		    = (ifn == IFN_GOMP_SIMD_ORDERED_START
		       ? BUILT_IN_GOMP_ORDERED_START
		       : BUILT_IN_GOMP_ORDERED_END);
		  gimple *g
		    = gimple_build_call (builtin_decl_explicit (bcode), 0);
		  tree vdef = gimple_vdef (stmt);
		  gimple_set_vdef (g, vdef);
		  SSA_NAME_DEF_STMT (vdef) = g;
		  gimple_set_vuse (g, gimple_vuse (stmt));
		  gsi_replace (&i, g, true);
		  continue;
		}
	      gsi_remove (&i, true);
	      unlink_stmt_vdef (stmt);
	      continue;
	    default:
	      gsi_next (&i);
	      continue;
	    }
	  tree arg = gimple_call_arg (stmt, 0);
	  gcc_assert (arg != NULL_TREE);
	  gcc_assert (TREE_CODE (arg) == SSA_NAME);
	  simduid_to_vf *p = NULL, data;
	  data.simduid = DECL_UID (SSA_NAME_VAR (arg));
	  /* Need to nullify loop safelen field since it's value is not
	     valid after transformation.  */
	  if (bb->loop_father && bb->loop_father->safelen > 0)
	    bb->loop_father->safelen = 0;
	  if (htab)
	    {
	      p = htab->find (&data);
	      if (p)
		vf = p->vf;
	    }
	  switch (ifn)
	    {
	    case IFN_GOMP_SIMD_VF:
	      t = build_int_cst (unsigned_type_node, vf);
	      break;
	    case IFN_GOMP_SIMD_LANE:
	      t = build_int_cst (unsigned_type_node, 0);
	      break;
	    case IFN_GOMP_SIMD_LAST_LANE:
	      t = gimple_call_arg (stmt, 1);
	      break;
	    default:
	      gcc_unreachable ();
	    }
	  update_call_from_tree (&i, t);
	  gsi_next (&i);
	}
    }
Beispiel #27
0
tree
chrec_fold_multiply (tree type,
		     tree op0,
		     tree op1)
{
  if (automatically_generated_chrec_p (op0)
      || automatically_generated_chrec_p (op1))
    return chrec_fold_automatically_generated_operands (op0, op1);

  switch (TREE_CODE (op0))
    {
    case POLYNOMIAL_CHREC:
      gcc_checking_assert
	(!chrec_contains_symbols_defined_in_loop (op0, CHREC_VARIABLE (op0)));
      switch (TREE_CODE (op1))
	{
	case POLYNOMIAL_CHREC:
	  gcc_checking_assert
	    (!chrec_contains_symbols_defined_in_loop (op1,
						      CHREC_VARIABLE (op1)));
	  return chrec_fold_multiply_poly_poly (type, op0, op1);

	CASE_CONVERT:
	  if (tree_contains_chrecs (op1, NULL))
	    return chrec_dont_know;

	default:
	  if (integer_onep (op1))
	    return op0;
	  if (integer_zerop (op1))
	    return build_int_cst (type, 0);

	  return build_polynomial_chrec
	    (CHREC_VARIABLE (op0),
	     chrec_fold_multiply (type, CHREC_LEFT (op0), op1),
	     chrec_fold_multiply (type, CHREC_RIGHT (op0), op1));
	}

    CASE_CONVERT:
      if (tree_contains_chrecs (op0, NULL))
	return chrec_dont_know;

    default:
      if (integer_onep (op0))
	return op1;

      if (integer_zerop (op0))
    	return build_int_cst (type, 0);

      switch (TREE_CODE (op1))
	{
	case POLYNOMIAL_CHREC:
	  gcc_checking_assert
	    (!chrec_contains_symbols_defined_in_loop (op1,
						      CHREC_VARIABLE (op1)));
	  return build_polynomial_chrec
	    (CHREC_VARIABLE (op1),
	     chrec_fold_multiply (type, CHREC_LEFT (op1), op0),
	     chrec_fold_multiply (type, CHREC_RIGHT (op1), op0));

	CASE_CONVERT:
	  if (tree_contains_chrecs (op1, NULL))
	    return chrec_dont_know;

	default:
	  if (integer_onep (op1))
	    return op0;
	  if (integer_zerop (op1))
	    return build_int_cst (type, 0);
	  return fold_build2 (MULT_EXPR, type, op0, op1);
	}
    }
}
static void
substitute_single_use_vars (varray_type *cond_worklist,
			    varray_type vars_worklist)
{
  while (VARRAY_ACTIVE_SIZE (vars_worklist) > 0)
    {
      tree test_var = VARRAY_TOP_TREE (vars_worklist);
      tree def = SSA_NAME_DEF_STMT (test_var);
      dataflow_t df;
      int j, num_uses, propagated_uses;

      VARRAY_POP (vars_worklist);

      /* Now compute the immediate uses of TEST_VAR.  */
      df = get_immediate_uses (def);
      num_uses = num_immediate_uses (df);
      propagated_uses = 0;

      /* If TEST_VAR is used more than once and is not a boolean set
	 via TRUTH_NOT_EXPR with another SSA_NAME as its argument, then
	 we can not optimize.  */
      if (num_uses == 1
	  || (TREE_CODE (TREE_TYPE (test_var)) == BOOLEAN_TYPE
	      && TREE_CODE (TREE_OPERAND (def, 1)) == TRUTH_NOT_EXPR
	      && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (def, 1), 0))
		  == SSA_NAME)))
	;
      else
	continue;

      /* Walk over each use and try to forward propagate the RHS of
	 DEF into the use.  */
      for (j = 0; j < num_uses; j++)
	{
	  tree cond_stmt;
	  tree cond;
	  enum tree_code cond_code;
	  tree def_rhs;
	  enum tree_code def_rhs_code;
	  tree new_cond;

	  cond_stmt = immediate_use (df, j);

	  /* For now we can only propagate into COND_EXPRs.  */
	  if (TREE_CODE (cond_stmt) != COND_EXPR) 
	    continue;

	  cond = COND_EXPR_COND (cond_stmt);
	  cond_code = TREE_CODE (cond);
	  def_rhs = TREE_OPERAND (def, 1);
	  def_rhs_code = TREE_CODE (def_rhs);

	  /* If the definition of the single use variable was from an
	     arithmetic operation, then we just need to adjust the
	     constant in the COND_EXPR_COND and update the variable tested.  */
	  if (def_rhs_code == PLUS_EXPR || def_rhs_code == MINUS_EXPR)
	    {
	      tree op0 = TREE_OPERAND (def_rhs, 0);
	      tree op1 = TREE_OPERAND (def_rhs, 1);
	      enum tree_code new_code;
	      tree t;

	      /* If the variable was defined via X + C, then we must subtract
		 C from the constant in the conditional.  Otherwise we add
		 C to the constant in the conditional.  The result must fold
		 into a valid gimple operand to be optimizable.  */
	      new_code = def_rhs_code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
	      t = int_const_binop (new_code, TREE_OPERAND (cond, 1), op1, 0);
	      if (!is_gimple_val (t))
		continue;

	      new_cond = build (cond_code, boolean_type_node, op0, t);
	    }
	  /* If the variable is defined by a conditional expression... */
	  else if (TREE_CODE_CLASS (def_rhs_code) == tcc_comparison)
	    {
	      /* TEST_VAR was set from a relational operator.  */
	      tree op0 = TREE_OPERAND (def_rhs, 0);
	      tree op1 = TREE_OPERAND (def_rhs, 1);

	      new_cond = build (def_rhs_code, boolean_type_node, op0, op1);

	      /* Invert the conditional if necessary.  */
	      if ((cond_code == EQ_EXPR
		   && integer_zerop (TREE_OPERAND (cond, 1)))
		  || (cond_code == NE_EXPR
		      && integer_onep (TREE_OPERAND (cond, 1))))
		{
		  new_cond = invert_truthvalue (new_cond);

		  /* If we did not get a simple relational expression or
		     bare SSA_NAME, then we can not optimize this case.  */
		  if (!COMPARISON_CLASS_P (new_cond)
		      && TREE_CODE (new_cond) != SSA_NAME)
		    continue;
		}
	    }
	  else
	    {
	      bool invert = false;
	      enum tree_code new_code;
	      tree new_arg;

	      /* TEST_VAR was set from a TRUTH_NOT_EXPR or a NOP_EXPR.  */
	      if (def_rhs_code == TRUTH_NOT_EXPR)
		invert = true;
      
	      /* If we don't have <NE_EXPR/EQ_EXPR x INT_CST>, then we cannot
	         optimize this case.  */
	      if ((cond_code == NE_EXPR || cond_code == EQ_EXPR)
	          && TREE_CODE (TREE_OPERAND (cond, 1)) != INTEGER_CST)
		continue;
		
	      if (cond_code == SSA_NAME
		  || (cond_code == NE_EXPR
		      && integer_zerop (TREE_OPERAND (cond, 1)))
		  || (cond_code == EQ_EXPR
		      && integer_onep (TREE_OPERAND (cond, 1))))
		new_code = NE_EXPR;
	      else
		new_code = EQ_EXPR;

	      if (invert)
		new_code = (new_code == EQ_EXPR ? NE_EXPR  : EQ_EXPR);

	      new_arg = TREE_OPERAND (def_rhs, 0);
	      new_cond = build2 (new_code, boolean_type_node, new_arg,
				 fold_convert (TREE_TYPE (new_arg),
					       integer_zero_node));
	    }

	  /* Dump details.  */
	  if (dump_file && (dump_flags & TDF_DETAILS))
	    {
	      fprintf (dump_file, "  Replaced '");
	      print_generic_expr (dump_file, cond, dump_flags);
	      fprintf (dump_file, "' with '");
	      print_generic_expr (dump_file, new_cond, dump_flags);
	      fprintf (dump_file, "'\n");
	    }

	  /* Replace the condition.  */
	  COND_EXPR_COND (cond_stmt) = new_cond;
	  modify_stmt (cond_stmt);
	  propagated_uses++;
	  VARRAY_PUSH_TREE (*cond_worklist, cond_stmt);
	}

      /* If we propagated into all the uses, then we can delete DEF.
	 Unfortunately, we have to find the defining statement in
	 whatever block it might be in.  */
      if (num_uses && num_uses == propagated_uses)
	{
	  block_stmt_iterator bsi = bsi_for_stmt (def);
	  bsi_remove (&bsi);
	}
    }
}
Beispiel #29
0
void
do_jump (tree exp, rtx if_false_label, rtx if_true_label)
{
  enum tree_code code = TREE_CODE (exp);
  /* Some cases need to create a label to jump to
     in order to properly fall through.
     These cases set DROP_THROUGH_LABEL nonzero.  */
  rtx drop_through_label = 0;
  rtx temp;
  int i;
  tree type;
  enum machine_mode mode;

  emit_queue ();

  switch (code)
    {
    case ERROR_MARK:
      break;

    case INTEGER_CST:
      temp = integer_zerop (exp) ? if_false_label : if_true_label;
      if (temp)
        emit_jump (temp);
      break;

#if 0
      /* This is not true with #pragma weak  */
    case ADDR_EXPR:
      /* The address of something can never be zero.  */
      if (if_true_label)
        emit_jump (if_true_label);
      break;
#endif

    case UNSAVE_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
      TREE_OPERAND (exp, 0)
	= (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
      break;

    case NOP_EXPR:
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
        goto normal;
    case CONVERT_EXPR:
      /* If we are narrowing the operand, we have to do the compare in the
         narrower mode.  */
      if ((TYPE_PRECISION (TREE_TYPE (exp))
           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
        goto normal;
    case NON_LVALUE_EXPR:
    case REFERENCE_EXPR:
    case ABS_EXPR:
    case NEGATE_EXPR:
    case LROTATE_EXPR:
    case RROTATE_EXPR:
      /* These cannot change zero->nonzero or vice versa.  */
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
      break;

    case WITH_RECORD_EXPR:
      /* Put the object on the placeholder list, recurse through our first
         operand, and pop the list.  */
      placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
                                    placeholder_list);
      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
      placeholder_list = TREE_CHAIN (placeholder_list);
      break;

#if 0
      /* This is never less insns than evaluating the PLUS_EXPR followed by
         a test and can be longer if the test is eliminated.  */
    case PLUS_EXPR:
      /* Reduce to minus.  */
      exp = build (MINUS_EXPR, TREE_TYPE (exp),
                   TREE_OPERAND (exp, 0),
                   fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
                                 TREE_OPERAND (exp, 1))));
      /* Process as MINUS.  */
#endif

    case MINUS_EXPR:
      /* Nonzero iff operands of minus differ.  */
      do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
                                  TREE_OPERAND (exp, 0),
                                  TREE_OPERAND (exp, 1)),
                           NE, NE, if_false_label, if_true_label);
      break;

    case BIT_AND_EXPR:
      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
	 See if the former is preferred for jump tests and restore it
	 if so.  */
      if (integer_onep (TREE_OPERAND (exp, 1)))
	{
	  tree exp0 = TREE_OPERAND (exp, 0);
	  rtx set_label, clr_label;

	  /* Strip narrowing integral type conversions.  */
	  while ((TREE_CODE (exp0) == NOP_EXPR
		  || TREE_CODE (exp0) == CONVERT_EXPR
		  || TREE_CODE (exp0) == NON_LVALUE_EXPR)
		 && TREE_OPERAND (exp0, 0) != error_mark_node
		 && TYPE_PRECISION (TREE_TYPE (exp0))
		    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
	    exp0 = TREE_OPERAND (exp0, 0);

	  /* "exp0 ^ 1" inverts the sense of the single bit test.  */
	  if (TREE_CODE (exp0) == BIT_XOR_EXPR
	      && integer_onep (TREE_OPERAND (exp0, 1)))
	    {
	      exp0 = TREE_OPERAND (exp0, 0);
	      clr_label = if_true_label;
	      set_label = if_false_label;
	    }
	  else
	    {
	      clr_label = if_false_label;
	      set_label = if_true_label;
	    }

	  if (TREE_CODE (exp0) == RSHIFT_EXPR)
	    {
	      tree arg = TREE_OPERAND (exp0, 0);
	      tree shift = TREE_OPERAND (exp0, 1);
	      tree argtype = TREE_TYPE (arg);
	      if (TREE_CODE (shift) == INTEGER_CST
		  && compare_tree_int (shift, 0) >= 0
		  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
		  && prefer_and_bit_test (TYPE_MODE (argtype),
					  TREE_INT_CST_LOW (shift)))
		{
		  HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
				       << TREE_INT_CST_LOW (shift);
		  tree t = build_int_2 (mask, 0);
		  TREE_TYPE (t) = argtype;
		  do_jump (build (BIT_AND_EXPR, argtype, arg, t),
			   clr_label, set_label);
		  break;
		}
	    }
	}

      /* If we are AND'ing with a small constant, do this comparison in the
         smallest type that fits.  If the machine doesn't have comparisons
         that small, it will be converted back to the wider comparison.
         This helps if we are testing the sign bit of a narrower object.
         combine can't do this for us because it can't know whether a
         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */

      if (! SLOW_BYTE_ACCESS
          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
          && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
          && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
              != CODE_FOR_nothing))
        {
          do_jump (convert (type, exp), if_false_label, if_true_label);
          break;
        }
      goto normal;

    case TRUTH_NOT_EXPR:
      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
      break;

    case TRUTH_ANDIF_EXPR:
      if (if_false_label == 0)
        if_false_label = drop_through_label = gen_label_rtx ();
      do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
      start_cleanup_deferral ();
      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
      end_cleanup_deferral ();
      break;

    case TRUTH_ORIF_EXPR:
      if (if_true_label == 0)
        if_true_label = drop_through_label = gen_label_rtx ();
      do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
      start_cleanup_deferral ();
      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
      end_cleanup_deferral ();
      break;

    case COMPOUND_EXPR:
      push_temp_slots ();
      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
      preserve_temp_slots (NULL_RTX);
      free_temp_slots ();
      pop_temp_slots ();
      emit_queue ();
      do_pending_stack_adjust ();
      do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
      break;

    case COMPONENT_REF:
    case BIT_FIELD_REF:
    case ARRAY_REF:
    case ARRAY_RANGE_REF:
      {
        HOST_WIDE_INT bitsize, bitpos;
        int unsignedp;
        enum machine_mode mode;
        tree type;
        tree offset;
        int volatilep = 0;

        /* Get description of this reference.  We don't actually care
           about the underlying object here.  */
        get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
                             &unsignedp, &volatilep);

        type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
        if (! SLOW_BYTE_ACCESS
            && type != 0 && bitsize >= 0
            && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
            && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
          != CODE_FOR_nothing))
          {
            do_jump (convert (type, exp), if_false_label, if_true_label);
            break;
          }
        goto normal;
      }

    case COND_EXPR:
      /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases.  */
      if (integer_onep (TREE_OPERAND (exp, 1))
          && integer_zerop (TREE_OPERAND (exp, 2)))
        do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);

      else if (integer_zerop (TREE_OPERAND (exp, 1))
               && integer_onep (TREE_OPERAND (exp, 2)))
        do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);

      else
      {
        rtx label1 = gen_label_rtx ();
        drop_through_label = gen_label_rtx ();

        do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);

        start_cleanup_deferral ();
        /* Now the THEN-expression.  */
        do_jump (TREE_OPERAND (exp, 1),
                 if_false_label ? if_false_label : drop_through_label,
                 if_true_label ? if_true_label : drop_through_label);
        /* In case the do_jump just above never jumps.  */
        do_pending_stack_adjust ();
        emit_label (label1);

        /* Now the ELSE-expression.  */
        do_jump (TREE_OPERAND (exp, 2),
           if_false_label ? if_false_label : drop_through_label,
           if_true_label ? if_true_label : drop_through_label);
        end_cleanup_deferral ();
      }
      break;

    case EQ_EXPR:
      {
        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));

        if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
            || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
          {
            tree exp0 = save_expr (TREE_OPERAND (exp, 0));
            tree exp1 = save_expr (TREE_OPERAND (exp, 1));
            do_jump
              (fold
               (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
                 fold (build (EQ_EXPR, TREE_TYPE (exp),
                  fold (build1 (REALPART_EXPR,
                    TREE_TYPE (inner_type),
                    exp0)),
                  fold (build1 (REALPART_EXPR,
                    TREE_TYPE (inner_type),
                    exp1)))),
                 fold (build (EQ_EXPR, TREE_TYPE (exp),
                  fold (build1 (IMAGPART_EXPR,
                    TREE_TYPE (inner_type),
                    exp0)),
                  fold (build1 (IMAGPART_EXPR,
                    TREE_TYPE (inner_type),
                    exp1)))))),
               if_false_label, if_true_label);
          }

        else if (integer_zerop (TREE_OPERAND (exp, 1)))
          do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);

        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
                 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
          do_jump_by_parts_equality (exp, if_false_label, if_true_label);
        else
          do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
        break;
      }

    case NE_EXPR:
      {
        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));

        if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
            || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
          {
            tree exp0 = save_expr (TREE_OPERAND (exp, 0));
            tree exp1 = save_expr (TREE_OPERAND (exp, 1));
            do_jump
              (fold
               (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
                 fold (build (NE_EXPR, TREE_TYPE (exp),
                  fold (build1 (REALPART_EXPR,
                    TREE_TYPE (inner_type),
                    exp0)),
                  fold (build1 (REALPART_EXPR,
                    TREE_TYPE (inner_type),
                    exp1)))),
                 fold (build (NE_EXPR, TREE_TYPE (exp),
                    fold (build1 (IMAGPART_EXPR,
                      TREE_TYPE (inner_type),
                      exp0)),
                    fold (build1 (IMAGPART_EXPR,
                      TREE_TYPE (inner_type),
                      exp1)))))),
               if_false_label, if_true_label);
          }

        else if (integer_zerop (TREE_OPERAND (exp, 1)))
          do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);

        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
           && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
          do_jump_by_parts_equality (exp, if_true_label, if_false_label);
        else
          do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
        break;
      }

    case LT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LT, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
      else
        do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
      break;

    case LE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (LE, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
      else
        do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
      break;

    case GT_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GT, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
      else
        do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
      break;

    case GE_EXPR:
      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
      if (GET_MODE_CLASS (mode) == MODE_INT
          && ! can_compare_p (GE, mode, ccp_jump))
        do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
      else
        do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
      break;

    case UNORDERED_EXPR:
    case ORDERED_EXPR:
      {
        enum rtx_code cmp, rcmp;
        int do_rev;

        if (code == UNORDERED_EXPR)
          cmp = UNORDERED, rcmp = ORDERED;
        else
          cmp = ORDERED, rcmp = UNORDERED;
        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));

        do_rev = 0;
        if (! can_compare_p (cmp, mode, ccp_jump)
            && (can_compare_p (rcmp, mode, ccp_jump)
          /* If the target doesn't provide either UNORDERED or ORDERED
             comparisons, canonicalize on UNORDERED for the library.  */
          || rcmp == UNORDERED))
          do_rev = 1;

        if (! do_rev)
          do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
        else
          do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
      }
      break;

    {
      enum rtx_code rcode1;
      enum tree_code tcode2;

      case UNLT_EXPR:
        rcode1 = UNLT;
        tcode2 = LT_EXPR;
        goto unordered_bcc;
      case UNLE_EXPR:
        rcode1 = UNLE;
        tcode2 = LE_EXPR;
        goto unordered_bcc;
      case UNGT_EXPR:
        rcode1 = UNGT;
        tcode2 = GT_EXPR;
        goto unordered_bcc;
      case UNGE_EXPR:
        rcode1 = UNGE;
        tcode2 = GE_EXPR;
        goto unordered_bcc;
      case UNEQ_EXPR:
        rcode1 = UNEQ;
        tcode2 = EQ_EXPR;
        goto unordered_bcc;

      unordered_bcc:
        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
        if (can_compare_p (rcode1, mode, ccp_jump))
          do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
                               if_true_label);
        else
          {
            tree op0 = save_expr (TREE_OPERAND (exp, 0));
            tree op1 = save_expr (TREE_OPERAND (exp, 1));
            tree cmp0, cmp1;

            /* If the target doesn't support combined unordered
               compares, decompose into UNORDERED + comparison.  */
            cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
            cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
            exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
            do_jump (exp, if_false_label, if_true_label);
          }
      }
      break;

      /* Special case:
          __builtin_expect (<test>, 0)	and
          __builtin_expect (<test>, 1)

         We need to do this here, so that <test> is not converted to a SCC
         operation on machines that use condition code registers and COMPARE
         like the PowerPC, and then the jump is done based on whether the SCC
         operation produced a 1 or 0.  */
    case CALL_EXPR:
      /* Check for a built-in function.  */
      {
	tree fndecl = get_callee_fndecl (exp);
	tree arglist = TREE_OPERAND (exp, 1);

	if (fndecl
	    && DECL_BUILT_IN (fndecl)
	    && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
	    && arglist != NULL_TREE
	    && TREE_CHAIN (arglist) != NULL_TREE)
	  {
	    rtx seq = expand_builtin_expect_jump (exp, if_false_label,
						  if_true_label);

	    if (seq != NULL_RTX)
	      {
		emit_insn (seq);
		return;
	      }
	  }
      }
      /* Fall through and generate the normal code.  */

    default:
    normal:
      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
#if 0
      /* This is not needed any more and causes poor code since it causes
         comparisons and tests from non-SI objects to have different code
         sequences.  */
      /* Copy to register to avoid generating bad insns by cse
         from (set (mem ...) (arithop))  (set (cc0) (mem ...)).  */
      if (!cse_not_expected && GET_CODE (temp) == MEM)
        temp = copy_to_reg (temp);
#endif
      do_pending_stack_adjust ();
      /* Do any postincrements in the expression that was tested.  */
      emit_queue ();

      if (GET_CODE (temp) == CONST_INT
          || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
          || GET_CODE (temp) == LABEL_REF)
        {
          rtx target = temp == const0_rtx ? if_false_label : if_true_label;
          if (target)
            emit_jump (target);
        }
      else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
               && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
        /* Note swapping the labels gives us not-equal.  */
        do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
      else if (GET_MODE (temp) != VOIDmode)
	{
	  /* The RTL optimizers prefer comparisons against pseudos.  */
	  if (GET_CODE (temp) == SUBREG)
	    {
	      /* Compare promoted variables in their promoted mode.  */
	      if (SUBREG_PROMOTED_VAR_P (temp)
		  && GET_CODE (XEXP (temp, 0)) == REG)
		temp = XEXP (temp, 0);
	      else
		temp = copy_to_reg (temp);
	    }
	  do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
				   NE, TREE_UNSIGNED (TREE_TYPE (exp)),
				   GET_MODE (temp), NULL_RTX,
				   if_false_label, if_true_label);
	}
      else
        abort ();
    }

  if (drop_through_label)
    {
      /* If do_jump produces code that might be jumped around,
         do any stack adjusts from that code, before the place
         where control merges in.  */
      do_pending_stack_adjust ();
      emit_label (drop_through_label);
    }
}
static tree
gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
		  gfc_omp_clauses *do_clauses, tree par_clauses)
{
  gfc_se se;
  tree dovar, stmt, from, to, step, type, init, cond, incr;
  tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
  stmtblock_t block;
  stmtblock_t body;
  gfc_omp_clauses *clauses = code->ext.omp_clauses;
  int i, collapse = clauses->collapse;
  tree dovar_init = NULL_TREE;

  if (collapse <= 0)
    collapse = 1;

  code = code->block->next;
  gcc_assert (code->op == EXEC_DO);

  init = make_tree_vec (collapse);
  cond = make_tree_vec (collapse);
  incr = make_tree_vec (collapse);

  if (pblock == NULL)
    {
      gfc_start_block (&block);
      pblock = &block;
    }

  omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);

  for (i = 0; i < collapse; i++)
    {
      int simple = 0;
      int dovar_found = 0;
      tree dovar_decl;

      if (clauses)
	{
	  gfc_namelist *n;
	  for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
	       n = n->next)
	    if (code->ext.iterator->var->symtree->n.sym == n->sym)
	      break;
	  if (n != NULL)
	    dovar_found = 1;
	  else if (n == NULL)
	    for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
	      if (code->ext.iterator->var->symtree->n.sym == n->sym)
		break;
	  if (n != NULL)
	    dovar_found++;
	}

      /* Evaluate all the expressions in the iterator.  */
      gfc_init_se (&se, NULL);
      gfc_conv_expr_lhs (&se, code->ext.iterator->var);
      gfc_add_block_to_block (pblock, &se.pre);
      dovar = se.expr;
      type = TREE_TYPE (dovar);
      gcc_assert (TREE_CODE (type) == INTEGER_TYPE);

      gfc_init_se (&se, NULL);
      gfc_conv_expr_val (&se, code->ext.iterator->start);
      gfc_add_block_to_block (pblock, &se.pre);
      from = gfc_evaluate_now (se.expr, pblock);

      gfc_init_se (&se, NULL);
      gfc_conv_expr_val (&se, code->ext.iterator->end);
      gfc_add_block_to_block (pblock, &se.pre);
      to = gfc_evaluate_now (se.expr, pblock);

      gfc_init_se (&se, NULL);
      gfc_conv_expr_val (&se, code->ext.iterator->step);
      gfc_add_block_to_block (pblock, &se.pre);
      step = gfc_evaluate_now (se.expr, pblock);
      dovar_decl = dovar;

      /* Special case simple loops.  */
      if (TREE_CODE (dovar) == VAR_DECL)
	{
	  if (integer_onep (step))
	    simple = 1;
	  else if (tree_int_cst_equal (step, integer_minus_one_node))
	    simple = -1;
	}
      else
	dovar_decl
	  = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym);

      /* Loop body.  */
      if (simple)
	{
	  TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
	  TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR,
						boolean_type_node, dovar, to);
	  TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step);
	  TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, dovar,
						TREE_VEC_ELT (incr, i));
	}
      else
	{
	  /* STEP is not 1 or -1.  Use:
	     for (count = 0; count < (to + step - from) / step; count++)
	       {
		 dovar = from + count * step;
		 body;
	       cycle_label:;
	       }  */
	  tmp = fold_build2 (MINUS_EXPR, type, step, from);
	  tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
	  tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
	  tmp = gfc_evaluate_now (tmp, pblock);
	  count = gfc_create_var (type, "count");
	  TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
					     build_int_cst (type, 0));
	  TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node,
						count, tmp);
	  TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count,
						build_int_cst (type, 1));
	  TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type,
						count, TREE_VEC_ELT (incr, i));

	  /* Initialize DOVAR.  */
	  tmp = fold_build2 (MULT_EXPR, type, count, step);
	  tmp = fold_build2 (PLUS_EXPR, type, from, tmp);
	  dovar_init = tree_cons (dovar, tmp, dovar_init);
	}

      if (!dovar_found)
	{
	  tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
	  OMP_CLAUSE_DECL (tmp) = dovar_decl;
	  omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
	}
      else if (dovar_found == 2)
	{
	  tree c = NULL;

	  tmp = NULL;
	  if (!simple)
	    {
	      /* If dovar is lastprivate, but different counter is used,
		 dovar += step needs to be added to
		 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
		 will have the value on entry of the last loop, rather
		 than value after iterator increment.  */
	      tmp = gfc_evaluate_now (step, pblock);
	      tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp);
	      tmp = fold_build2 (MODIFY_EXPR, type, dovar, tmp);
	      for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
		    && OMP_CLAUSE_DECL (c) == dovar_decl)
		  {
		    OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
		    break;
		  }
	    }
	  if (c == NULL && par_clauses != NULL)
	    {
	      for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
		    && OMP_CLAUSE_DECL (c) == dovar_decl)
		  {
		    tree l = build_omp_clause (input_location,
					       OMP_CLAUSE_LASTPRIVATE);
		    OMP_CLAUSE_DECL (l) = dovar_decl;
		    OMP_CLAUSE_CHAIN (l) = omp_clauses;
		    OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
		    omp_clauses = l;
		    OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
		    break;
		  }
	    }
	  gcc_assert (simple || c != NULL);
	}
      if (!simple)
	{
	  tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
	  OMP_CLAUSE_DECL (tmp) = count;
	  omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
	}

      if (i + 1 < collapse)
	code = code->block->next;
    }

  if (pblock != &block)
    {
      pushlevel (0);
      gfc_start_block (&block);
    }

  gfc_start_block (&body);

  dovar_init = nreverse (dovar_init);
  while (dovar_init)
    {
      gfc_add_modify (&body, TREE_PURPOSE (dovar_init),
			   TREE_VALUE (dovar_init));
      dovar_init = TREE_CHAIN (dovar_init);
    }

  /* Cycle statement is implemented with a goto.  Exit statement must not be
     present for this loop.  */
  cycle_label = gfc_build_label_decl (NULL_TREE);

  /* Put these labels where they can be found later. We put the
     labels in a TREE_LIST node (because TREE_CHAIN is already
     used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
     label in TREE_VALUE (backend_decl).  */

  code->block->backend_decl = tree_cons (cycle_label, NULL, NULL);

  /* Main loop body.  */
  tmp = gfc_trans_omp_code (code->block->next, true);
  gfc_add_expr_to_block (&body, tmp);

  /* Label for cycle statements (if needed).  */
  if (TREE_USED (cycle_label))
    {
      tmp = build1_v (LABEL_EXPR, cycle_label);
      gfc_add_expr_to_block (&body, tmp);
    }

  /* End of loop body.  */
  stmt = make_node (OMP_FOR);

  TREE_TYPE (stmt) = void_type_node;
  OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
  OMP_FOR_CLAUSES (stmt) = omp_clauses;
  OMP_FOR_INIT (stmt) = init;
  OMP_FOR_COND (stmt) = cond;
  OMP_FOR_INCR (stmt) = incr;
  gfc_add_expr_to_block (&block, stmt);

  return gfc_finish_block (&block);
}