Beispiel #1
0
tree
build_throw (tree exp)
{
  if (exp == error_mark_node)
    return exp;

  if (processing_template_decl)
    {
      if (cfun)
	current_function_returns_abnormally = 1;
      exp = build_min (THROW_EXPR, void_type_node, exp);
      SET_EXPR_LOCATION (exp, input_location);
      return exp;
    }

  if (exp && null_node_p (exp))
    warning (0, "throwing NULL, which has integral, not pointer type");

  if (exp != NULL_TREE)
    {
      if (!is_admissible_throw_operand_or_catch_parameter (exp, true))
	return error_mark_node;
    }

  if (! doing_eh ())
    return error_mark_node;

  if (exp)
    {
      tree throw_type;
      tree temp_type;
      tree cleanup;
      tree object, ptr;
      tree tmp;
      tree allocate_expr;

      /* The CLEANUP_TYPE is the internal type of a destructor.  */
      if (!cleanup_type)
	{
	  tmp = build_function_type_list (void_type_node,
					  ptr_type_node, NULL_TREE);
	  cleanup_type = build_pointer_type (tmp);
	}

      if (!throw_fn)
	{
	  tree name = get_identifier ("__cxa_throw");
	  throw_fn = get_global_binding (name);
	  if (!throw_fn)
	    {
	      /* Declare void __cxa_throw (void*, void*, void (*)(void*)).  */
	      /* ??? Second argument is supposed to be "std::type_info*".  */
	      tmp = build_function_type_list (void_type_node,
					      ptr_type_node, ptr_type_node,
					      cleanup_type, NULL_TREE);
	      throw_fn = push_throw_library_fn (name, tmp);

	      if (flag_tm)
		{
		  tree itm_name = get_identifier ("_ITM_cxa_throw");
		  tree itm_fn = get_global_binding (itm_name);
		  if (!itm_fn)
		    itm_fn = push_throw_library_fn (itm_name, tmp);
		  apply_tm_attr (itm_fn, get_identifier ("transaction_pure"));
		  record_tm_replacement (throw_fn, itm_fn);
		}
	    }
	}

      /* [except.throw]

	 A throw-expression initializes a temporary object, the type
	 of which is determined by removing any top-level
	 cv-qualifiers from the static type of the operand of throw
	 and adjusting the type from "array of T" or "function return
	 T" to "pointer to T" or "pointer to function returning T"
	 respectively.  */
      temp_type = is_bitfield_expr_with_lowered_type (exp);
      if (!temp_type)
	temp_type = cv_unqualified (type_decays_to (TREE_TYPE (exp)));

      /* OK, this is kind of wacky.  The standard says that we call
	 terminate when the exception handling mechanism, after
	 completing evaluation of the expression to be thrown but
	 before the exception is caught (_except.throw_), calls a
	 user function that exits via an uncaught exception.

	 So we have to protect the actual initialization of the
	 exception object with terminate(), but evaluate the
	 expression first.  Since there could be temps in the
	 expression, we need to handle that, too.  We also expand
	 the call to __cxa_allocate_exception first (which doesn't
	 matter, since it can't throw).  */

      /* Allocate the space for the exception.  */
      allocate_expr = do_allocate_exception (temp_type);
      allocate_expr = get_target_expr (allocate_expr);
      ptr = TARGET_EXPR_SLOT (allocate_expr);
      TARGET_EXPR_CLEANUP (allocate_expr) = do_free_exception (ptr);
      CLEANUP_EH_ONLY (allocate_expr) = 1;

      object = build_nop (build_pointer_type (temp_type), ptr);
      object = cp_build_fold_indirect_ref (object);

      /* And initialize the exception object.  */
      if (CLASS_TYPE_P (temp_type))
	{
	  int flags = LOOKUP_NORMAL | LOOKUP_ONLYCONVERTING;
	  vec<tree, va_gc> *exp_vec;
	  bool converted = false;

	  /* Under C++0x [12.8/16 class.copy], a thrown lvalue is sometimes
	     treated as an rvalue for the purposes of overload resolution
	     to favor move constructors over copy constructors.  */
	  if (/* Must be a local, automatic variable.  */
	      VAR_P (exp)
	      && DECL_CONTEXT (exp) == current_function_decl
	      && ! TREE_STATIC (exp)
	      /* The variable must not have the `volatile' qualifier.  */
	      && !(cp_type_quals (TREE_TYPE (exp)) & TYPE_QUAL_VOLATILE))
	    {
	      tree moved = move (exp);
	      exp_vec = make_tree_vector_single (moved);
	      moved = (build_special_member_call
		       (object, complete_ctor_identifier, &exp_vec,
			TREE_TYPE (object), flags|LOOKUP_PREFER_RVALUE,
			tf_none));
	      release_tree_vector (exp_vec);
	      if (moved != error_mark_node)
		{
		  exp = moved;
		  converted = true;
		}
	    }

	  /* Call the copy constructor.  */
	  if (!converted)
	    {
	      exp_vec = make_tree_vector_single (exp);
	      exp = (build_special_member_call
		     (object, complete_ctor_identifier, &exp_vec,
		      TREE_TYPE (object), flags, tf_warning_or_error));
	      release_tree_vector (exp_vec);
	    }

	  if (exp == error_mark_node)
	    {
	      error ("  in thrown expression");
	      return error_mark_node;
	    }
	}
      else
	{
	  tmp = decay_conversion (exp, tf_warning_or_error);
	  if (tmp == error_mark_node)
	    return error_mark_node;
	  exp = build2 (INIT_EXPR, temp_type, object, tmp);
	}

      /* Mark any cleanups from the initialization as MUST_NOT_THROW, since
	 they are run after the exception object is initialized.  */
      cp_walk_tree_without_duplicates (&exp, wrap_cleanups_r, 0);

      /* Prepend the allocation.  */
      exp = build2 (COMPOUND_EXPR, TREE_TYPE (exp), allocate_expr, exp);

      /* Force all the cleanups to be evaluated here so that we don't have
	 to do them during unwinding.  */
      exp = build1 (CLEANUP_POINT_EXPR, void_type_node, exp);

      throw_type = build_eh_type_type (prepare_eh_type (TREE_TYPE (object)));

      cleanup = NULL_TREE;
      if (type_build_dtor_call (TREE_TYPE (object)))
	{
	  tree dtor_fn = lookup_fnfields (TYPE_BINFO (TREE_TYPE (object)),
					  complete_dtor_identifier, 0);
	  dtor_fn = BASELINK_FUNCTIONS (dtor_fn);
	  mark_used (dtor_fn);
	  if (TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (object)))
	    {
	      cxx_mark_addressable (dtor_fn);
	      /* Pretend it's a normal function.  */
	      cleanup = build1 (ADDR_EXPR, cleanup_type, dtor_fn);
	    }
	}
      if (cleanup == NULL_TREE)
	cleanup = build_int_cst (cleanup_type, 0);

      /* ??? Indicate that this function call throws throw_type.  */
      tmp = cp_build_function_call_nary (throw_fn, tf_warning_or_error,
					 ptr, throw_type, cleanup, NULL_TREE);

      /* Tack on the initialization stuff.  */
      exp = build2 (COMPOUND_EXPR, TREE_TYPE (tmp), exp, tmp);
    }
  else
    {
      /* Rethrow current exception.  */
      if (!rethrow_fn)
	{
	  tree name = get_identifier ("__cxa_rethrow");
	  rethrow_fn = get_global_binding (name);
	  if (!rethrow_fn)
	    /* Declare void __cxa_rethrow (void).  */
	    rethrow_fn = push_throw_library_fn
	      (name, build_function_type_list (void_type_node, NULL_TREE));

	  if (flag_tm)
	    apply_tm_attr (rethrow_fn, get_identifier ("transaction_pure"));
	}

      /* ??? Indicate that this function call allows exceptions of the type
	 of the enclosing catch block (if known).  */
      exp = cp_build_function_call_vec (rethrow_fn, NULL, tf_warning_or_error);
    }

  exp = build1 (THROW_EXPR, void_type_node, exp);
  SET_EXPR_LOCATION (exp, input_location);

  return exp;
}
Beispiel #2
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
tree stmt = *stmt_p;
struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
struct pointer_set_t *p_set = wtd->p_set;

if (is_invisiref_parm (stmt)
  /* Don't dereference parms in a thunk, pass the references through. */
  && !(DECL_THUNK_P (current_function_decl)
   && TREE_CODE (stmt) == PARM_DECL))
{
  *stmt_p = convert_from_reference (stmt);
  *walk_subtrees = 0;
  return NULL;
}

/* Map block scope extern declarations to visible declarations with the
 same name and type in outer scopes if any.  */
if (cp_function_chain->extern_decl_map
  && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
  && DECL_EXTERNAL (stmt))
{
  struct cxx_int_tree_map *h, in;
  in.uid = DECL_UID (stmt);
  h = (struct cxx_int_tree_map *)
  htab_find_with_hash (cp_function_chain->extern_decl_map,
			   &in, in.uid);
  if (h)
{
  *stmt_p = h->to;
  *walk_subtrees = 0;
  return NULL;
}
}

/* Other than invisiref parms, don't walk the same tree twice.  */
if (pointer_set_contains (p_set, stmt))
{
  *walk_subtrees = 0;
  return NULL_TREE;
}

if (TREE_CODE (stmt) == ADDR_EXPR
  && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
{
  *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
  *walk_subtrees = 0;
}
else if (TREE_CODE (stmt) == RETURN_EXPR
   && TREE_OPERAND (stmt, 0)
   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
/* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
*walk_subtrees = 0;
else if (TREE_CODE (stmt) == OMP_CLAUSE)
switch (OMP_CLAUSE_CODE (stmt))
  {
  case OMP_CLAUSE_LASTPRIVATE:
/* Don't dereference an invisiref in OpenMP clauses.  */
if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
  {
	*walk_subtrees = 0;
	if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
	  cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
			cp_genericize_r, data, NULL);
  }
break;
  case OMP_CLAUSE_PRIVATE:
  case OMP_CLAUSE_SHARED:
  case OMP_CLAUSE_FIRSTPRIVATE:
  case OMP_CLAUSE_COPYIN:
  case OMP_CLAUSE_COPYPRIVATE:
/* Don't dereference an invisiref in OpenMP clauses.  */
if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
  *walk_subtrees = 0;
break;
  case OMP_CLAUSE_REDUCTION:
gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
break;
  default:
break;
  }
else if (IS_TYPE_OR_DECL_P (stmt))
*walk_subtrees = 0;

/* Due to the way voidify_wrapper_expr is written, we don't get a chance
 to lower this construct before scanning it, so we need to lower these
 before doing anything else.  */
else if (TREE_CODE (stmt) == CLEANUP_STMT)
*stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
					 : TRY_FINALLY_EXPR,
		  void_type_node,
		  CLEANUP_BODY (stmt),
		  CLEANUP_EXPR (stmt));

else if (TREE_CODE (stmt) == IF_STMT)
{
  genericize_if_stmt (stmt_p);
  /* *stmt_p has changed, tail recurse to handle it again.  */
  return cp_genericize_r (stmt_p, walk_subtrees, data);
}

/* COND_EXPR might have incompatible types in branches if one or both
 arms are bitfields.FILE *
my_dump_begin (int phase, int *flag_ptr)  Fix it up now.  */
else if (TREE_CODE (stmt) == COND_EXPR)
{
  tree type_left
= (TREE_OPERAND (stmt, 1)
   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
   : NULL_TREE);
  tree type_right
= (TREE_OPERAND (stmt, 2)
   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
   : NULL_TREE);
  if (type_left
  && !useless_type_conversion_p (TREE_TYPE (stmt),
				 TREE_TYPE (TREE_OPERAND (stmt, 1))))
{
  TREE_OPERAND (stmt, 1)
	= fold_convert (type_left, TREE_OPERAND (stmt, 1));
  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
					 type_left));
}
  if (type_right
  && !useless_type_conversion_p (TREE_TYPE (stmt),
				 TREE_TYPE (TREE_OPERAND (stmt, 2))))
{
  TREE_OPERAND (stmt, 2)
	= fold_convert (type_right, TREE_OPERAND (stmt, 2));
  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
					 type_right));
}
}

else if (TREE_CODE (stmt) == BIND_EXPR)
{
  VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
  cp_walk_tree (&BIND_EXPR_BODY (stmt),
		cp_genericize_r, data, NULL);
  VEC_pop (tree, wtd->bind_expr_stack);
}

else if (TREE_CODE (stmt) == USING_STMT)
{
  tree block = NULL_TREE;

  /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
	 BLOCK, and append an IMPORTED_DECL to its
 BLOCK_VARS chained list.  */
  if (wtd->bind_expr_stack)
{
  int i;
  for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
	if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
						 wtd->bind_expr_stack, i))))
	  break;
}
  if (block)
{
  tree using_directive;
  gcc_assert (TREE_OPERAND (stmt, 0));

  using_directive = make_node (IMPORTED_DECL);
  TREE_TYPE (using_directive) = void_type_node;

  IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
	= TREE_OPERAND (stmt, 0);
  DECL_CHAIN (using_directive) = BLOCK_VARS (block);
  BLOCK_VARS (block) = using_directive;
}
  /* The USING_STMT won't appear in GENERIC.  */
  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
  *walk_subtrees = 0;
}

else if (TREE_CODE (stmt) == DECL_EXPR
   && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
{
  /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
  *walk_subtrees = 0;
}

pointer_set_insert (p_set, *stmt_p);

return NULL;
}
Beispiel #3
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
  tree stmt = *stmt_p;
  struct pointer_set_t *p_set = (struct pointer_set_t*) data;

  if (is_invisiref_parm (stmt)
      /* Don't dereference parms in a thunk, pass the references through. */
      && !(DECL_THUNK_P (current_function_decl)
	   && TREE_CODE (stmt) == PARM_DECL))
    {
      *stmt_p = convert_from_reference (stmt);
      *walk_subtrees = 0;
      return NULL;
    }

  /* Map block scope extern declarations to visible declarations with the
     same name and type in outer scopes if any.  */
  if (cp_function_chain->extern_decl_map
      && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
      && DECL_EXTERNAL (stmt))
    {
      struct cxx_int_tree_map *h, in;
      in.uid = DECL_UID (stmt);
      h = (struct cxx_int_tree_map *)
	  htab_find_with_hash (cp_function_chain->extern_decl_map,
			       &in, in.uid);
      if (h)
	{
	  *stmt_p = h->to;
	  *walk_subtrees = 0;
	  return NULL;
	}
    }

  /* Other than invisiref parms, don't walk the same tree twice.  */
  if (pointer_set_contains (p_set, stmt))
    {
      *walk_subtrees = 0;
      return NULL_TREE;
    }

  if (TREE_CODE (stmt) == ADDR_EXPR
      && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    {
      *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == RETURN_EXPR
	   && TREE_OPERAND (stmt, 0)
	   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
    *walk_subtrees = 0;
  else if (TREE_CODE (stmt) == OMP_CLAUSE)
    switch (OMP_CLAUSE_CODE (stmt))
      {
      case OMP_CLAUSE_LASTPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  {
	    *walk_subtrees = 0;
	    if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
	      cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
			    cp_genericize_r, p_set, NULL);
	  }
	break;
      case OMP_CLAUSE_PRIVATE:
      case OMP_CLAUSE_SHARED:
      case OMP_CLAUSE_FIRSTPRIVATE:
      case OMP_CLAUSE_COPYIN:
      case OMP_CLAUSE_COPYPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	break;
      case OMP_CLAUSE_REDUCTION:
	gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
	break;
      default:
	break;
      }
  else if (IS_TYPE_OR_DECL_P (stmt))
    *walk_subtrees = 0;

  /* Due to the way voidify_wrapper_expr is written, we don't get a chance
     to lower this construct before scanning it, so we need to lower these
     before doing anything else.  */
  else if (TREE_CODE (stmt) == CLEANUP_STMT)
    *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
					     : TRY_FINALLY_EXPR,
		      void_type_node,
		      CLEANUP_BODY (stmt),
		      CLEANUP_EXPR (stmt));

  else if (TREE_CODE (stmt) == IF_STMT)
    {
      genericize_if_stmt (stmt_p);
      /* *stmt_p has changed, tail recurse to handle it again.  */
      return cp_genericize_r (stmt_p, walk_subtrees, data);
    }

  /* COND_EXPR might have incompatible types in branches if one or both
     arms are bitfields.  Fix it up now.  */
  else if (TREE_CODE (stmt) == COND_EXPR)
    {
      tree type_left
	= (TREE_OPERAND (stmt, 1)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
	   : NULL_TREE);
      tree type_right
	= (TREE_OPERAND (stmt, 2)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
	   : NULL_TREE);
      if (type_left
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 1))))
	{
	  TREE_OPERAND (stmt, 1)
	    = fold_convert (type_left, TREE_OPERAND (stmt, 1));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_left));
	}
      if (type_right
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 2))))
	{
	  TREE_OPERAND (stmt, 2)
	    = fold_convert (type_right, TREE_OPERAND (stmt, 2));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_right));
	}
    }

  pointer_set_insert (p_set, *stmt_p);

  return NULL;
}
Beispiel #4
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
  tree stmt = *stmt_p;
  struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
  struct pointer_set_t *p_set = wtd->p_set;

  /* If in an OpenMP context, note var uses.  */
  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
      && (VAR_P (stmt)
	  || TREE_CODE (stmt) == PARM_DECL
	  || TREE_CODE (stmt) == RESULT_DECL)
      && omp_var_to_track (stmt))
    omp_cxx_notice_variable (wtd->omp_ctx, stmt);

  if (is_invisiref_parm (stmt)
      /* Don't dereference parms in a thunk, pass the references through. */
      && !(DECL_THUNK_P (current_function_decl)
	   && TREE_CODE (stmt) == PARM_DECL))
    {
      *stmt_p = convert_from_reference (stmt);
      *walk_subtrees = 0;
      return NULL;
    }

  /* Map block scope extern declarations to visible declarations with the
     same name and type in outer scopes if any.  */
  if (cp_function_chain->extern_decl_map
      && VAR_OR_FUNCTION_DECL_P (stmt)
      && DECL_EXTERNAL (stmt))
    {
      struct cxx_int_tree_map *h, in;
      in.uid = DECL_UID (stmt);
      h = (struct cxx_int_tree_map *)
	  htab_find_with_hash (cp_function_chain->extern_decl_map,
			       &in, in.uid);
      if (h)
	{
	  *stmt_p = h->to;
	  *walk_subtrees = 0;
	  return NULL;
	}
    }

  /* Other than invisiref parms, don't walk the same tree twice.  */
  if (pointer_set_contains (p_set, stmt))
    {
      *walk_subtrees = 0;
      return NULL_TREE;
    }

  if (TREE_CODE (stmt) == ADDR_EXPR
      && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    {
      /* If in an OpenMP context, note var uses.  */
      if (__builtin_expect (wtd->omp_ctx != NULL, 0)
	  && omp_var_to_track (TREE_OPERAND (stmt, 0)))
	omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
      *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == RETURN_EXPR
	   && TREE_OPERAND (stmt, 0)
	   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
    *walk_subtrees = 0;
  else if (TREE_CODE (stmt) == OMP_CLAUSE)
    switch (OMP_CLAUSE_CODE (stmt))
      {
      case OMP_CLAUSE_LASTPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  {
	    *walk_subtrees = 0;
	    if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
	      cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
			    cp_genericize_r, data, NULL);
	  }
	break;
      case OMP_CLAUSE_PRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	else if (wtd->omp_ctx != NULL)
	  {
	    /* Private clause doesn't cause any references to the
	       var in outer contexts, avoid calling
	       omp_cxx_notice_variable for it.  */
	    struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
	    wtd->omp_ctx = NULL;
	    cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
			  data, NULL);
	    wtd->omp_ctx = old;
	    *walk_subtrees = 0;
	  }
	break;
      case OMP_CLAUSE_SHARED:
      case OMP_CLAUSE_FIRSTPRIVATE:
      case OMP_CLAUSE_COPYIN:
      case OMP_CLAUSE_COPYPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	break;
      case OMP_CLAUSE_REDUCTION:
	/* Don't dereference an invisiref in reduction clause's
	   OMP_CLAUSE_DECL either.  OMP_CLAUSE_REDUCTION_{INIT,MERGE}
	   still needs to be genericized.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  {
	    *walk_subtrees = 0;
	    if (OMP_CLAUSE_REDUCTION_INIT (stmt))
	      cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
			    cp_genericize_r, data, NULL);
	    if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
	      cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
			    cp_genericize_r, data, NULL);
	  }
	break;
      default:
	break;
      }
  else if (IS_TYPE_OR_DECL_P (stmt))
    *walk_subtrees = 0;

  /* Due to the way voidify_wrapper_expr is written, we don't get a chance
     to lower this construct before scanning it, so we need to lower these
     before doing anything else.  */
  else if (TREE_CODE (stmt) == CLEANUP_STMT)
    *stmt_p = build2_loc (EXPR_LOCATION (stmt),
			  CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
						 : TRY_FINALLY_EXPR,
			  void_type_node,
			  CLEANUP_BODY (stmt),
			  CLEANUP_EXPR (stmt));

  else if (TREE_CODE (stmt) == IF_STMT)
    {
      genericize_if_stmt (stmt_p);
      /* *stmt_p has changed, tail recurse to handle it again.  */
      return cp_genericize_r (stmt_p, walk_subtrees, data);
    }

  /* COND_EXPR might have incompatible types in branches if one or both
     arms are bitfields.  Fix it up now.  */
  else if (TREE_CODE (stmt) == COND_EXPR)
    {
      tree type_left
	= (TREE_OPERAND (stmt, 1)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
	   : NULL_TREE);
      tree type_right
	= (TREE_OPERAND (stmt, 2)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
	   : NULL_TREE);
      if (type_left
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 1))))
	{
	  TREE_OPERAND (stmt, 1)
	    = fold_convert (type_left, TREE_OPERAND (stmt, 1));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_left));
	}
      if (type_right
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 2))))
	{
	  TREE_OPERAND (stmt, 2)
	    = fold_convert (type_right, TREE_OPERAND (stmt, 2));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_right));
	}
    }

  else if (TREE_CODE (stmt) == BIND_EXPR)
    {
      if (__builtin_expect (wtd->omp_ctx != NULL, 0))
	{
	  tree decl;
	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
	    if (VAR_P (decl)
		&& !DECL_EXTERNAL (decl)
		&& omp_var_to_track (decl))
	      {
		splay_tree_node n
		  = splay_tree_lookup (wtd->omp_ctx->variables,
				       (splay_tree_key) decl);
		if (n == NULL)
		  splay_tree_insert (wtd->omp_ctx->variables,
				     (splay_tree_key) decl,
				     TREE_STATIC (decl)
				     ? OMP_CLAUSE_DEFAULT_SHARED
				     : OMP_CLAUSE_DEFAULT_PRIVATE);
	      }
	}
      wtd->bind_expr_stack.safe_push (stmt);
      cp_walk_tree (&BIND_EXPR_BODY (stmt),
		    cp_genericize_r, data, NULL);
      wtd->bind_expr_stack.pop ();
    }

  else if (TREE_CODE (stmt) == USING_STMT)
    {
      tree block = NULL_TREE;

      /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
         BLOCK, and append an IMPORTED_DECL to its
	 BLOCK_VARS chained list.  */
      if (wtd->bind_expr_stack.exists ())
	{
	  int i;
	  for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
	    if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
	      break;
	}
      if (block)
	{
	  tree using_directive;
	  gcc_assert (TREE_OPERAND (stmt, 0));

	  using_directive = make_node (IMPORTED_DECL);
	  TREE_TYPE (using_directive) = void_type_node;

	  IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
	    = TREE_OPERAND (stmt, 0);
	  DECL_CHAIN (using_directive) = BLOCK_VARS (block);
	  BLOCK_VARS (block) = using_directive;
	}
      /* The USING_STMT won't appear in GENERIC.  */
      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
      *walk_subtrees = 0;
    }

  else if (TREE_CODE (stmt) == DECL_EXPR
	   && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
    {
      /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
    {
      struct cp_genericize_omp_taskreg omp_ctx;
      tree c, decl;
      splay_tree_node n;

      *walk_subtrees = 0;
      cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
      omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
      omp_ctx.default_shared = omp_ctx.is_parallel;
      omp_ctx.outer = wtd->omp_ctx;
      omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
      wtd->omp_ctx = &omp_ctx;
      for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
	switch (OMP_CLAUSE_CODE (c))
	  {
	  case OMP_CLAUSE_SHARED:
	  case OMP_CLAUSE_PRIVATE:
	  case OMP_CLAUSE_FIRSTPRIVATE:
	  case OMP_CLAUSE_LASTPRIVATE:
	    decl = OMP_CLAUSE_DECL (c);
	    if (decl == error_mark_node || !omp_var_to_track (decl))
	      break;
	    n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
	    if (n != NULL)
	      break;
	    splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
			       ? OMP_CLAUSE_DEFAULT_SHARED
			       : OMP_CLAUSE_DEFAULT_PRIVATE);
	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
		&& omp_ctx.outer)
	      omp_cxx_notice_variable (omp_ctx.outer, decl);
	    break;
	  case OMP_CLAUSE_DEFAULT:
	    if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
	      omp_ctx.default_shared = true;
	  default:
	    break;
	  }
      cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
      wtd->omp_ctx = omp_ctx.outer;
      splay_tree_delete (omp_ctx.variables);
    }
  else if (TREE_CODE (stmt) == CONVERT_EXPR)
    gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
  else if (TREE_CODE (stmt) == FOR_STMT)
    genericize_for_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == WHILE_STMT)
    genericize_while_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == DO_STMT)
    genericize_do_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == SWITCH_STMT)
    genericize_switch_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == CONTINUE_STMT)
    genericize_continue_stmt (stmt_p);
  else if (TREE_CODE (stmt) == BREAK_STMT)
    genericize_break_stmt (stmt_p);
  else if (TREE_CODE (stmt) == OMP_FOR
	   || TREE_CODE (stmt) == OMP_SIMD
	   || TREE_CODE (stmt) == OMP_DISTRIBUTE)
    genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == SIZEOF_EXPR)
    {
      if (SIZEOF_EXPR_TYPE_P (stmt))
	*stmt_p
	  = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
					SIZEOF_EXPR, false);
      else if (TYPE_P (TREE_OPERAND (stmt, 0)))
	*stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
					      SIZEOF_EXPR, false);
      else
	*stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
					      SIZEOF_EXPR, false);
      if (*stmt_p == error_mark_node)
	*stmt_p = size_one_node;
      return NULL;
    }    

  pointer_set_insert (p_set, *stmt_p);

  return NULL;
}