Ejemplo n.º 1
0
static void
process_references (struct ipa_ref_list *list,
		    symtab_node *first,
		    bool before_inlining_p,
		    struct pointer_set_t *reachable)
{
  int i;
  struct ipa_ref *ref;
  for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
    {
      if (is_a <cgraph_node> (ref->referred))
	{
	  struct cgraph_node *node = ipa_ref_node (ref);

	  if (node->analyzed
	      && (!DECL_EXTERNAL (node->symbol.decl)
		  || node->alias
	          || before_inlining_p))
	    pointer_set_insert (reachable, node);
	  enqueue_node ((symtab_node) node, first, reachable);
	}
      else
	{
	  struct varpool_node *node = ipa_ref_varpool_node (ref);

	  if (node->analyzed
	      && (!DECL_EXTERNAL (node->symbol.decl)
		  || node->alias
		  || before_inlining_p))
	    pointer_set_insert (reachable, node);
	  enqueue_node ((symtab_node) node, first, reachable);
	}
    }
}
static void walk_functions(tree_set *visited, const struct cgraph_node *node)
{
	struct cgraph_edge *e;
	const_tree caller;

	if (!node)
		return;
	caller = NODE_DECL(node);

	if (pointer_set_insert(visited, caller))
		return;

	for (e = node->callees; e; e = e->next_callee) {
		const struct cgraph_node *next_node;
		tree callee = gimple_call_fndecl(e->call_stmt);

		if (DECL_BUILT_IN(callee))
			continue;

		print_function(caller, callee);

		next_node = cgraph_get_node(callee);
		walk_functions(visited, next_node);
	}
}
Ejemplo n.º 3
0
void
gfc_resolve_omp_parallel_blocks (gfc_code *code, gfc_namespace *ns)
{
  struct omp_context ctx;
  gfc_omp_clauses *omp_clauses = code->ext.omp_clauses;
  gfc_namelist *n;
  int list;

  ctx.code = code;
  ctx.sharing_clauses = pointer_set_create ();
  ctx.private_iterators = pointer_set_create ();
  ctx.previous = omp_current_ctx;
  omp_current_ctx = &ctx;

  for (list = 0; list < OMP_LIST_NUM; list++)
    for (n = omp_clauses->lists[list]; n; n = n->next)
      pointer_set_insert (ctx.sharing_clauses, n->sym);

  if (code->op == EXEC_OMP_PARALLEL_DO)
    gfc_resolve_omp_do_blocks (code, ns);
  else
    gfc_resolve_blocks (code->block, ns);

  omp_current_ctx = ctx.previous;
  pointer_set_destroy (ctx.sharing_clauses);
  pointer_set_destroy (ctx.private_iterators);
}
Ejemplo n.º 4
0
static void
record_target_from_binfo (vec <cgraph_node *> &nodes,
			  tree binfo,
			  tree otr_type,
			  tree type_binfo,
			  HOST_WIDE_INT otr_token,
			  tree outer_type,
			  HOST_WIDE_INT offset,
			  pointer_set_t *inserted,
			  pointer_set_t *matched_vtables,
			  bool anonymous)
{
  tree type = BINFO_TYPE (binfo);
  int i;
  tree base_binfo;

  gcc_checking_assert (BINFO_VTABLE (type_binfo));

  if (types_same_for_odr (type, outer_type))
    {
      tree inner_binfo = get_binfo_at_offset (type_binfo,
					      offset, otr_type);
      /* For types in anonymous namespace first check if the respective vtable
	 is alive. If not, we know the type can't be called.  */
      if (!flag_ltrans && anonymous)
	{
	  tree vtable = BINFO_VTABLE (inner_binfo);
	  struct varpool_node *vnode;

	  if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
	    vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
	  vnode = varpool_get_node (vtable);
	  if (!vnode || !vnode->definition)
	    return;
	}
      gcc_assert (inner_binfo);
      if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
	{
	  tree target = gimple_get_virt_method_for_binfo (otr_token, inner_binfo);
	  if (target)
	    maybe_record_node (nodes, target, inserted, NULL);
	}
      return;
    }

  /* Walk bases.  */
  for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
    /* Walking bases that have no virtual method is pointless excercise.  */
    if (polymorphic_type_binfo_p (base_binfo))
      record_target_from_binfo (nodes, base_binfo, otr_type,
				/* In the case of single inheritance,
				   the virtual table is shared with
				   the outer type.  */
				BINFO_VTABLE (base_binfo) ? base_binfo : type_binfo,
				otr_token, outer_type, offset, inserted,
				matched_vtables, anonymous);
}
Ejemplo n.º 5
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
  tree stmt = *stmt_p;
  struct pointer_set_t *p_set = (struct pointer_set_t*) data;

  if (is_invisiref_parm (stmt)
      /* Don't dereference parms in a thunk, pass the references through. */
      && !(DECL_THUNK_P (current_function_decl)
           && TREE_CODE (stmt) == PARM_DECL))
    {
      *stmt_p = convert_from_reference (stmt);
      *walk_subtrees = 0;
      return NULL;
    }

  /* Other than invisiref parms, don't walk the same tree twice.  */
  if (pointer_set_contains (p_set, stmt))
    {
      *walk_subtrees = 0;
      return NULL_TREE;
    }

  if (TREE_CODE (stmt) == ADDR_EXPR
      && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    {
      *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == RETURN_EXPR
	   && TREE_OPERAND (stmt, 0)
	   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
    *walk_subtrees = 0;
  else if (IS_TYPE_OR_DECL_P (stmt))
    *walk_subtrees = 0;

  /* Due to the way voidify_wrapper_expr is written, we don't get a chance
     to lower this construct before scanning it, so we need to lower these
     before doing anything else.  */
  else if (TREE_CODE (stmt) == CLEANUP_STMT)
    *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
					     : TRY_FINALLY_EXPR,
		      void_type_node,
		      CLEANUP_BODY (stmt),
		      CLEANUP_EXPR (stmt));

  pointer_set_insert (p_set, *stmt_p);

  return NULL;
}
Ejemplo n.º 6
0
static void
maybe_record_node (vec <cgraph_node *> &nodes,
		   tree target, pointer_set_t *inserted)
{
  struct cgraph_node *target_node;
  enum built_in_function fcode;

  if (target
      /* Those are used to mark impossible scenarios.  */
      && (fcode = DECL_FUNCTION_CODE (target))
	  != BUILT_IN_UNREACHABLE
      && fcode != BUILT_IN_TRAP
      && !pointer_set_insert (inserted, target)
      && (target_node = cgraph_get_node (target)) != NULL
      && (TREE_PUBLIC (target)
	  || target_node->symbol.definition)
      && symtab_real_symbol_p ((symtab_node)target_node))
    {
      pointer_set_insert (cached_polymorphic_call_targets,
			  target_node);
      nodes.safe_push (target_node);
    }
}
Ejemplo n.º 7
0
static void
maybe_record_node (vec <cgraph_node *> &nodes,
		   tree target, pointer_set_t *inserted,
		   bool *completep)
{
  struct cgraph_node *target_node;
  enum built_in_function fcode;

  if (!target
      /* Those are used to mark impossible scenarios.  */
      || (fcode = DECL_FUNCTION_CODE (target))
	  == BUILT_IN_UNREACHABLE
      || fcode == BUILT_IN_TRAP)
    return;

  target_node = cgraph_get_node (target);

  if (target_node != NULL
      && (TREE_PUBLIC (target)
	  || target_node->definition)
      && symtab_real_symbol_p (target_node))
    {
      gcc_assert (!target_node->global.inlined_to);
      gcc_assert (symtab_real_symbol_p (target_node));
      if (!pointer_set_insert (inserted, target))
	{
	  pointer_set_insert (cached_polymorphic_call_targets,
			      target_node);
	  nodes.safe_push (target_node);
	}
    }
  else if (completep
	   && !type_in_anonymous_namespace_p
		 (method_class_type (TREE_TYPE (target))))
    *completep = true;
}
/* We see the expression EXP in basic block BB.  If it's an interesting
   expression (an INDIRECT_REF through an SSA_NAME) possibly insert the
   expression into the set NONTRAP or the hash table of seen expressions.
   STORE is true if this expression is on the LHS, otherwise it's on
   the RHS.  */
static void
add_or_mark_expr (basic_block bb, tree exp,
                  struct pointer_set_t *nontrap, bool store)
{
    if (INDIRECT_REF_P (exp)
            && TREE_CODE (TREE_OPERAND (exp, 0)) == SSA_NAME)
    {
        tree name = TREE_OPERAND (exp, 0);
        struct name_to_bb map;
        void **slot;
        struct name_to_bb *n2bb;
        basic_block found_bb = 0;

        /* Try to find the last seen INDIRECT_REF through the same
           SSA_NAME, which can trap.  */
        map.ssa_name = name;
        map.bb = 0;
        map.store = store;
        slot = htab_find_slot (seen_ssa_names, &map, INSERT);
        n2bb = (struct name_to_bb *) *slot;
        if (n2bb)
            found_bb = n2bb->bb;

        /* If we've found a trapping INDIRECT_REF, _and_ it dominates EXP
           (it's in a basic block on the path from us to the dominator root)
        then we can't trap.  */
        if (found_bb && found_bb->aux == (void *)1)
        {
            pointer_set_insert (nontrap, exp);
        }
        else
        {
            /* EXP might trap, so insert it into the hash table.  */
            if (n2bb)
            {
                n2bb->bb = bb;
            }
            else
            {
                n2bb = XNEW (struct name_to_bb);
                n2bb->ssa_name = name;
                n2bb->bb = bb;
                n2bb->store = store;
                *slot = n2bb;
            }
        }
    }
Ejemplo n.º 9
0
void
gfc_resolve_do_iterator (gfc_code *code, gfc_symbol *sym)
{
  int i = omp_current_do_collapse;
  gfc_code *c = omp_current_do_code;

  if (sym->attr.threadprivate)
    return;

  /* !$omp do and !$omp parallel do iteration variable is predetermined
     private just in the !$omp do resp. !$omp parallel do construct,
     with no implications for the outer parallel constructs.  */

  while (i-- >= 1)
    {
      if (code == c)
	return;

      c = c->block->next;
    }

  if (omp_current_ctx == NULL)
    return;

  if (pointer_set_contains (omp_current_ctx->sharing_clauses, sym))
    return;

  if (! pointer_set_insert (omp_current_ctx->private_iterators, sym))
    {
      gfc_omp_clauses *omp_clauses = omp_current_ctx->code->ext.omp_clauses;
      gfc_namelist *p;

      p = gfc_get_namelist ();
      p->sym = sym;
      p->next = omp_clauses->lists[OMP_LIST_PRIVATE];
      omp_clauses->lists[OMP_LIST_PRIVATE] = p;
    }
}
Ejemplo n.º 10
0
static struct pointer_set_t *
suggest_attribute (int option, tree decl, bool known_finite,
		   struct pointer_set_t *warned_about,
		   const char * attrib_name)
{
  if (!option_enabled (option, &global_options))
    return warned_about;
  if (TREE_THIS_VOLATILE (decl)
      || (known_finite && function_always_visible_to_compiler_p (decl)))
    return warned_about;

  if (!warned_about)
    warned_about = pointer_set_create (); 
  if (pointer_set_contains (warned_about, decl))
    return warned_about;
  pointer_set_insert (warned_about, decl);
  warning_at (DECL_SOURCE_LOCATION (decl),
	      option,
	      known_finite
	      ? _("function might be candidate for attribute %<%s%>")
	      : _("function might be candidate for attribute %<%s%>"
		  " if it is known to return normally"), attrib_name);
  return warned_about;
}
Ejemplo n.º 11
0
Archivo: varpool.c Proyecto: palves/gcc
static void
varpool_remove_unreferenced_decls (void)
{
  varpool_node *next, *node;
  varpool_node *first = (varpool_node *)(void *)1;
  int i;
  struct ipa_ref *ref;
  struct pointer_set_t *referenced = pointer_set_create ();

  if (seen_error ())
    return;

  if (cgraph_dump_file)
    fprintf (cgraph_dump_file, "Trivially needed variables:");
  FOR_EACH_DEFINED_VARIABLE (node)
    {
      if (node->analyzed
	  && (!varpool_can_remove_if_no_refs (node)
	      /* We just expanded all function bodies.  See if any of
		 them needed the variable.  */
	      || DECL_RTL_SET_P (node->decl)))
	{
	  enqueue_node (node, &first);
          if (cgraph_dump_file)
	    fprintf (cgraph_dump_file, " %s", node->asm_name ());
	}
    }
  while (first != (varpool_node *)(void *)1)
    {
      node = first;
      first = (varpool_node *)first->aux;

      if (node->same_comdat_group)
	{
	  symtab_node *next;
	  for (next = node->same_comdat_group;
	       next != node;
	       next = next->same_comdat_group)
	    {
	      varpool_node *vnext = dyn_cast <varpool_node *> (next);
	      if (vnext && vnext->analyzed && !symtab_comdat_local_p (next))
		enqueue_node (vnext, &first);
	    }
	}
      for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
	{
	  varpool_node *vnode = dyn_cast <varpool_node *> (ref->referred);
	  if (vnode
	      && !vnode->in_other_partition
	      && (!DECL_EXTERNAL (ref->referred->decl)
		  || vnode->alias)
	      && vnode->analyzed)
	    enqueue_node (vnode, &first);
	  else
	    pointer_set_insert (referenced, node);
	}
    }
  if (cgraph_dump_file)
    fprintf (cgraph_dump_file, "\nRemoving variables:");
  for (node = varpool_first_defined_variable (); node; node = next)
    {
      next = varpool_next_defined_variable (node);
      if (!node->aux)
	{
          if (cgraph_dump_file)
	    fprintf (cgraph_dump_file, " %s", node->asm_name ());
	  if (pointer_set_contains (referenced, node))
	    varpool_remove_initializer (node);
	  else
	    varpool_remove_node (node);
	}
    }
  pointer_set_destroy (referenced);
  if (cgraph_dump_file)
    fprintf (cgraph_dump_file, "\n");
}
Ejemplo n.º 12
0
static void
add_type_duplicate (odr_type val, tree type)
{
  if (!val->types_set)
    val->types_set = pointer_set_create ();

  /* See if this duplicate is new.  */
  if (!pointer_set_insert (val->types_set, type))
    {
      bool merge = true;
      bool base_mismatch = false;
      gcc_assert (in_lto_p);
      vec_safe_push (val->types, type);
      unsigned int i,j;

      /* First we compare memory layout.  */
      if (!types_compatible_p (val->type, type))
	{
	  merge = false;
	  if (BINFO_VTABLE (TYPE_BINFO (val->type))
	      && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
			     "type %qD violates one definition rule  ",
			     type))
	    inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
		    "a type with the same name but different layout is "
		    "defined in another translation unit");
	  if (cgraph_dump_file)
	    {
	      fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
	    
	      print_node (cgraph_dump_file, "", val->type, 0);
	      putc ('\n',cgraph_dump_file);
	      print_node (cgraph_dump_file, "", type, 0);
	      putc ('\n',cgraph_dump_file);
	    }
	}

      /* Next sanity check that bases are the same.  If not, we will end
	 up producing wrong answers.  */
      for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
	if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
	  {
	    odr_type base = get_odr_type
			       (BINFO_TYPE
				  (BINFO_BASE_BINFO (TYPE_BINFO (type),
						     i)),
				true);
	    if (val->bases.length () <= j || val->bases[j] != base)
	      base_mismatch = true;
	    j++;
	  }
      if (base_mismatch)
	{
	  merge = false;

	  if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
			  "type %qD violates one definition rule  ",
			  type))
	    inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
		    "a type with the same name but different bases is "
		    "defined in another translation unit");
	  if (cgraph_dump_file)
	    {
	      fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
	    
	      print_node (cgraph_dump_file, "", val->type, 0);
	      putc ('\n',cgraph_dump_file);
	      print_node (cgraph_dump_file, "", type, 0);
	      putc ('\n',cgraph_dump_file);
	    }
	}

      /* Regularize things a little.  During LTO same types may come with
	 different BINFOs.  Either because their virtual table was
	 not merged by tree merging and only later at decl merging or
	 because one type comes with external vtable, while other
	 with internal.  We want to merge equivalent binfos to conserve
	 memory and streaming overhead.

	 The external vtables are more harmful: they contain references
	 to external declarations of methods that may be defined in the
	 merged LTO unit.  For this reason we absolutely need to remove
	 them and replace by internal variants. Not doing so will lead
         to incomplete answers from possible_polymorphic_call_targets.  */
      if (!flag_ltrans && merge)
	{
	  tree master_binfo = TYPE_BINFO (val->type);
	  tree v1 = BINFO_VTABLE (master_binfo);
	  tree v2 = BINFO_VTABLE (TYPE_BINFO (type));

	  if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
	    {
	      gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
			  && operand_equal_p (TREE_OPERAND (v1, 1),
					      TREE_OPERAND (v2, 1), 0));
	      v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
	      v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
	    }
	  gcc_assert (DECL_ASSEMBLER_NAME (v1)
		      == DECL_ASSEMBLER_NAME (v2));

	  if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
	    {
	      unsigned int i;

	      TYPE_BINFO (val->type) = TYPE_BINFO (type);
	      for (i = 0; i < val->types->length (); i++)
		{
		  if (TYPE_BINFO ((*val->types)[i])
		      == master_binfo)
		    TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
		}
	    }
	  else
	    TYPE_BINFO (type) = master_binfo;
	}
    }
}
Ejemplo n.º 13
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
  tree stmt = *stmt_p;
  struct pointer_set_t *p_set = (struct pointer_set_t*) data;

  if (is_invisiref_parm (stmt)
      /* Don't dereference parms in a thunk, pass the references through. */
      && !(DECL_THUNK_P (current_function_decl)
	   && TREE_CODE (stmt) == PARM_DECL))
    {
      *stmt_p = convert_from_reference (stmt);
      *walk_subtrees = 0;
      return NULL;
    }

  /* Map block scope extern declarations to visible declarations with the
     same name and type in outer scopes if any.  */
  if (cp_function_chain->extern_decl_map
      && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
      && DECL_EXTERNAL (stmt))
    {
      struct cxx_int_tree_map *h, in;
      in.uid = DECL_UID (stmt);
      h = (struct cxx_int_tree_map *)
	  htab_find_with_hash (cp_function_chain->extern_decl_map,
			       &in, in.uid);
      if (h)
	{
	  *stmt_p = h->to;
	  *walk_subtrees = 0;
	  return NULL;
	}
    }

  /* Other than invisiref parms, don't walk the same tree twice.  */
  if (pointer_set_contains (p_set, stmt))
    {
      *walk_subtrees = 0;
      return NULL_TREE;
    }

  if (TREE_CODE (stmt) == ADDR_EXPR
      && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    {
      *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == RETURN_EXPR
	   && TREE_OPERAND (stmt, 0)
	   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
    *walk_subtrees = 0;
  else if (TREE_CODE (stmt) == OMP_CLAUSE)
    switch (OMP_CLAUSE_CODE (stmt))
      {
      case OMP_CLAUSE_LASTPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  {
	    *walk_subtrees = 0;
	    if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
	      cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
			    cp_genericize_r, p_set, NULL);
	  }
	break;
      case OMP_CLAUSE_PRIVATE:
      case OMP_CLAUSE_SHARED:
      case OMP_CLAUSE_FIRSTPRIVATE:
      case OMP_CLAUSE_COPYIN:
      case OMP_CLAUSE_COPYPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	break;
      case OMP_CLAUSE_REDUCTION:
	gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
	break;
      default:
	break;
      }
  else if (IS_TYPE_OR_DECL_P (stmt))
    *walk_subtrees = 0;

  /* Due to the way voidify_wrapper_expr is written, we don't get a chance
     to lower this construct before scanning it, so we need to lower these
     before doing anything else.  */
  else if (TREE_CODE (stmt) == CLEANUP_STMT)
    *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
					     : TRY_FINALLY_EXPR,
		      void_type_node,
		      CLEANUP_BODY (stmt),
		      CLEANUP_EXPR (stmt));

  else if (TREE_CODE (stmt) == IF_STMT)
    {
      genericize_if_stmt (stmt_p);
      /* *stmt_p has changed, tail recurse to handle it again.  */
      return cp_genericize_r (stmt_p, walk_subtrees, data);
    }

  /* COND_EXPR might have incompatible types in branches if one or both
     arms are bitfields.  Fix it up now.  */
  else if (TREE_CODE (stmt) == COND_EXPR)
    {
      tree type_left
	= (TREE_OPERAND (stmt, 1)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
	   : NULL_TREE);
      tree type_right
	= (TREE_OPERAND (stmt, 2)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
	   : NULL_TREE);
      if (type_left
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 1))))
	{
	  TREE_OPERAND (stmt, 1)
	    = fold_convert (type_left, TREE_OPERAND (stmt, 1));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_left));
	}
      if (type_right
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 2))))
	{
	  TREE_OPERAND (stmt, 2)
	    = fold_convert (type_right, TREE_OPERAND (stmt, 2));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_right));
	}
    }

  pointer_set_insert (p_set, *stmt_p);

  return NULL;
}
Ejemplo n.º 14
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
tree stmt = *stmt_p;
struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
struct pointer_set_t *p_set = wtd->p_set;

if (is_invisiref_parm (stmt)
  /* Don't dereference parms in a thunk, pass the references through. */
  && !(DECL_THUNK_P (current_function_decl)
   && TREE_CODE (stmt) == PARM_DECL))
{
  *stmt_p = convert_from_reference (stmt);
  *walk_subtrees = 0;
  return NULL;
}

/* Map block scope extern declarations to visible declarations with the
 same name and type in outer scopes if any.  */
if (cp_function_chain->extern_decl_map
  && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
  && DECL_EXTERNAL (stmt))
{
  struct cxx_int_tree_map *h, in;
  in.uid = DECL_UID (stmt);
  h = (struct cxx_int_tree_map *)
  htab_find_with_hash (cp_function_chain->extern_decl_map,
			   &in, in.uid);
  if (h)
{
  *stmt_p = h->to;
  *walk_subtrees = 0;
  return NULL;
}
}

/* Other than invisiref parms, don't walk the same tree twice.  */
if (pointer_set_contains (p_set, stmt))
{
  *walk_subtrees = 0;
  return NULL_TREE;
}

if (TREE_CODE (stmt) == ADDR_EXPR
  && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
{
  *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
  *walk_subtrees = 0;
}
else if (TREE_CODE (stmt) == RETURN_EXPR
   && TREE_OPERAND (stmt, 0)
   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
/* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
*walk_subtrees = 0;
else if (TREE_CODE (stmt) == OMP_CLAUSE)
switch (OMP_CLAUSE_CODE (stmt))
  {
  case OMP_CLAUSE_LASTPRIVATE:
/* Don't dereference an invisiref in OpenMP clauses.  */
if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
  {
	*walk_subtrees = 0;
	if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
	  cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
			cp_genericize_r, data, NULL);
  }
break;
  case OMP_CLAUSE_PRIVATE:
  case OMP_CLAUSE_SHARED:
  case OMP_CLAUSE_FIRSTPRIVATE:
  case OMP_CLAUSE_COPYIN:
  case OMP_CLAUSE_COPYPRIVATE:
/* Don't dereference an invisiref in OpenMP clauses.  */
if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
  *walk_subtrees = 0;
break;
  case OMP_CLAUSE_REDUCTION:
gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
break;
  default:
break;
  }
else if (IS_TYPE_OR_DECL_P (stmt))
*walk_subtrees = 0;

/* Due to the way voidify_wrapper_expr is written, we don't get a chance
 to lower this construct before scanning it, so we need to lower these
 before doing anything else.  */
else if (TREE_CODE (stmt) == CLEANUP_STMT)
*stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
					 : TRY_FINALLY_EXPR,
		  void_type_node,
		  CLEANUP_BODY (stmt),
		  CLEANUP_EXPR (stmt));

else if (TREE_CODE (stmt) == IF_STMT)
{
  genericize_if_stmt (stmt_p);
  /* *stmt_p has changed, tail recurse to handle it again.  */
  return cp_genericize_r (stmt_p, walk_subtrees, data);
}

/* COND_EXPR might have incompatible types in branches if one or both
 arms are bitfields.FILE *
my_dump_begin (int phase, int *flag_ptr)  Fix it up now.  */
else if (TREE_CODE (stmt) == COND_EXPR)
{
  tree type_left
= (TREE_OPERAND (stmt, 1)
   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
   : NULL_TREE);
  tree type_right
= (TREE_OPERAND (stmt, 2)
   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
   : NULL_TREE);
  if (type_left
  && !useless_type_conversion_p (TREE_TYPE (stmt),
				 TREE_TYPE (TREE_OPERAND (stmt, 1))))
{
  TREE_OPERAND (stmt, 1)
	= fold_convert (type_left, TREE_OPERAND (stmt, 1));
  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
					 type_left));
}
  if (type_right
  && !useless_type_conversion_p (TREE_TYPE (stmt),
				 TREE_TYPE (TREE_OPERAND (stmt, 2))))
{
  TREE_OPERAND (stmt, 2)
	= fold_convert (type_right, TREE_OPERAND (stmt, 2));
  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
					 type_right));
}
}

else if (TREE_CODE (stmt) == BIND_EXPR)
{
  VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
  cp_walk_tree (&BIND_EXPR_BODY (stmt),
		cp_genericize_r, data, NULL);
  VEC_pop (tree, wtd->bind_expr_stack);
}

else if (TREE_CODE (stmt) == USING_STMT)
{
  tree block = NULL_TREE;

  /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
	 BLOCK, and append an IMPORTED_DECL to its
 BLOCK_VARS chained list.  */
  if (wtd->bind_expr_stack)
{
  int i;
  for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
	if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
						 wtd->bind_expr_stack, i))))
	  break;
}
  if (block)
{
  tree using_directive;
  gcc_assert (TREE_OPERAND (stmt, 0));

  using_directive = make_node (IMPORTED_DECL);
  TREE_TYPE (using_directive) = void_type_node;

  IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
	= TREE_OPERAND (stmt, 0);
  DECL_CHAIN (using_directive) = BLOCK_VARS (block);
  BLOCK_VARS (block) = using_directive;
}
  /* The USING_STMT won't appear in GENERIC.  */
  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
  *walk_subtrees = 0;
}

else if (TREE_CODE (stmt) == DECL_EXPR
   && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
{
  /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
  *walk_subtrees = 0;
}

pointer_set_insert (p_set, *stmt_p);

return NULL;
}
Ejemplo n.º 15
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
  tree stmt = *stmt_p;
  struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
  struct pointer_set_t *p_set = wtd->p_set;

  /* If in an OpenMP context, note var uses.  */
  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
      && (VAR_P (stmt)
	  || TREE_CODE (stmt) == PARM_DECL
	  || TREE_CODE (stmt) == RESULT_DECL)
      && omp_var_to_track (stmt))
    omp_cxx_notice_variable (wtd->omp_ctx, stmt);

  if (is_invisiref_parm (stmt)
      /* Don't dereference parms in a thunk, pass the references through. */
      && !(DECL_THUNK_P (current_function_decl)
	   && TREE_CODE (stmt) == PARM_DECL))
    {
      *stmt_p = convert_from_reference (stmt);
      *walk_subtrees = 0;
      return NULL;
    }

  /* Map block scope extern declarations to visible declarations with the
     same name and type in outer scopes if any.  */
  if (cp_function_chain->extern_decl_map
      && VAR_OR_FUNCTION_DECL_P (stmt)
      && DECL_EXTERNAL (stmt))
    {
      struct cxx_int_tree_map *h, in;
      in.uid = DECL_UID (stmt);
      h = (struct cxx_int_tree_map *)
	  htab_find_with_hash (cp_function_chain->extern_decl_map,
			       &in, in.uid);
      if (h)
	{
	  *stmt_p = h->to;
	  *walk_subtrees = 0;
	  return NULL;
	}
    }

  /* Other than invisiref parms, don't walk the same tree twice.  */
  if (pointer_set_contains (p_set, stmt))
    {
      *walk_subtrees = 0;
      return NULL_TREE;
    }

  if (TREE_CODE (stmt) == ADDR_EXPR
      && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    {
      /* If in an OpenMP context, note var uses.  */
      if (__builtin_expect (wtd->omp_ctx != NULL, 0)
	  && omp_var_to_track (TREE_OPERAND (stmt, 0)))
	omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
      *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == RETURN_EXPR
	   && TREE_OPERAND (stmt, 0)
	   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
    *walk_subtrees = 0;
  else if (TREE_CODE (stmt) == OMP_CLAUSE)
    switch (OMP_CLAUSE_CODE (stmt))
      {
      case OMP_CLAUSE_LASTPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  {
	    *walk_subtrees = 0;
	    if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
	      cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
			    cp_genericize_r, data, NULL);
	  }
	break;
      case OMP_CLAUSE_PRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	else if (wtd->omp_ctx != NULL)
	  {
	    /* Private clause doesn't cause any references to the
	       var in outer contexts, avoid calling
	       omp_cxx_notice_variable for it.  */
	    struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
	    wtd->omp_ctx = NULL;
	    cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
			  data, NULL);
	    wtd->omp_ctx = old;
	    *walk_subtrees = 0;
	  }
	break;
      case OMP_CLAUSE_SHARED:
      case OMP_CLAUSE_FIRSTPRIVATE:
      case OMP_CLAUSE_COPYIN:
      case OMP_CLAUSE_COPYPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	break;
      case OMP_CLAUSE_REDUCTION:
	/* Don't dereference an invisiref in reduction clause's
	   OMP_CLAUSE_DECL either.  OMP_CLAUSE_REDUCTION_{INIT,MERGE}
	   still needs to be genericized.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  {
	    *walk_subtrees = 0;
	    if (OMP_CLAUSE_REDUCTION_INIT (stmt))
	      cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
			    cp_genericize_r, data, NULL);
	    if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
	      cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
			    cp_genericize_r, data, NULL);
	  }
	break;
      default:
	break;
      }
  else if (IS_TYPE_OR_DECL_P (stmt))
    *walk_subtrees = 0;

  /* Due to the way voidify_wrapper_expr is written, we don't get a chance
     to lower this construct before scanning it, so we need to lower these
     before doing anything else.  */
  else if (TREE_CODE (stmt) == CLEANUP_STMT)
    *stmt_p = build2_loc (EXPR_LOCATION (stmt),
			  CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
						 : TRY_FINALLY_EXPR,
			  void_type_node,
			  CLEANUP_BODY (stmt),
			  CLEANUP_EXPR (stmt));

  else if (TREE_CODE (stmt) == IF_STMT)
    {
      genericize_if_stmt (stmt_p);
      /* *stmt_p has changed, tail recurse to handle it again.  */
      return cp_genericize_r (stmt_p, walk_subtrees, data);
    }

  /* COND_EXPR might have incompatible types in branches if one or both
     arms are bitfields.  Fix it up now.  */
  else if (TREE_CODE (stmt) == COND_EXPR)
    {
      tree type_left
	= (TREE_OPERAND (stmt, 1)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
	   : NULL_TREE);
      tree type_right
	= (TREE_OPERAND (stmt, 2)
	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
	   : NULL_TREE);
      if (type_left
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 1))))
	{
	  TREE_OPERAND (stmt, 1)
	    = fold_convert (type_left, TREE_OPERAND (stmt, 1));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_left));
	}
      if (type_right
	  && !useless_type_conversion_p (TREE_TYPE (stmt),
					 TREE_TYPE (TREE_OPERAND (stmt, 2))))
	{
	  TREE_OPERAND (stmt, 2)
	    = fold_convert (type_right, TREE_OPERAND (stmt, 2));
	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
						 type_right));
	}
    }

  else if (TREE_CODE (stmt) == BIND_EXPR)
    {
      if (__builtin_expect (wtd->omp_ctx != NULL, 0))
	{
	  tree decl;
	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
	    if (VAR_P (decl)
		&& !DECL_EXTERNAL (decl)
		&& omp_var_to_track (decl))
	      {
		splay_tree_node n
		  = splay_tree_lookup (wtd->omp_ctx->variables,
				       (splay_tree_key) decl);
		if (n == NULL)
		  splay_tree_insert (wtd->omp_ctx->variables,
				     (splay_tree_key) decl,
				     TREE_STATIC (decl)
				     ? OMP_CLAUSE_DEFAULT_SHARED
				     : OMP_CLAUSE_DEFAULT_PRIVATE);
	      }
	}
      wtd->bind_expr_stack.safe_push (stmt);
      cp_walk_tree (&BIND_EXPR_BODY (stmt),
		    cp_genericize_r, data, NULL);
      wtd->bind_expr_stack.pop ();
    }

  else if (TREE_CODE (stmt) == USING_STMT)
    {
      tree block = NULL_TREE;

      /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
         BLOCK, and append an IMPORTED_DECL to its
	 BLOCK_VARS chained list.  */
      if (wtd->bind_expr_stack.exists ())
	{
	  int i;
	  for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
	    if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
	      break;
	}
      if (block)
	{
	  tree using_directive;
	  gcc_assert (TREE_OPERAND (stmt, 0));

	  using_directive = make_node (IMPORTED_DECL);
	  TREE_TYPE (using_directive) = void_type_node;

	  IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
	    = TREE_OPERAND (stmt, 0);
	  DECL_CHAIN (using_directive) = BLOCK_VARS (block);
	  BLOCK_VARS (block) = using_directive;
	}
      /* The USING_STMT won't appear in GENERIC.  */
      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
      *walk_subtrees = 0;
    }

  else if (TREE_CODE (stmt) == DECL_EXPR
	   && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
    {
      /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
      *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
    {
      struct cp_genericize_omp_taskreg omp_ctx;
      tree c, decl;
      splay_tree_node n;

      *walk_subtrees = 0;
      cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
      omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
      omp_ctx.default_shared = omp_ctx.is_parallel;
      omp_ctx.outer = wtd->omp_ctx;
      omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
      wtd->omp_ctx = &omp_ctx;
      for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
	switch (OMP_CLAUSE_CODE (c))
	  {
	  case OMP_CLAUSE_SHARED:
	  case OMP_CLAUSE_PRIVATE:
	  case OMP_CLAUSE_FIRSTPRIVATE:
	  case OMP_CLAUSE_LASTPRIVATE:
	    decl = OMP_CLAUSE_DECL (c);
	    if (decl == error_mark_node || !omp_var_to_track (decl))
	      break;
	    n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
	    if (n != NULL)
	      break;
	    splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
			       ? OMP_CLAUSE_DEFAULT_SHARED
			       : OMP_CLAUSE_DEFAULT_PRIVATE);
	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
		&& omp_ctx.outer)
	      omp_cxx_notice_variable (omp_ctx.outer, decl);
	    break;
	  case OMP_CLAUSE_DEFAULT:
	    if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
	      omp_ctx.default_shared = true;
	  default:
	    break;
	  }
      cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
      wtd->omp_ctx = omp_ctx.outer;
      splay_tree_delete (omp_ctx.variables);
    }
  else if (TREE_CODE (stmt) == CONVERT_EXPR)
    gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
  else if (TREE_CODE (stmt) == FOR_STMT)
    genericize_for_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == WHILE_STMT)
    genericize_while_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == DO_STMT)
    genericize_do_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == SWITCH_STMT)
    genericize_switch_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == CONTINUE_STMT)
    genericize_continue_stmt (stmt_p);
  else if (TREE_CODE (stmt) == BREAK_STMT)
    genericize_break_stmt (stmt_p);
  else if (TREE_CODE (stmt) == OMP_FOR
	   || TREE_CODE (stmt) == OMP_SIMD
	   || TREE_CODE (stmt) == OMP_DISTRIBUTE)
    genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
  else if (TREE_CODE (stmt) == SIZEOF_EXPR)
    {
      if (SIZEOF_EXPR_TYPE_P (stmt))
	*stmt_p
	  = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
					SIZEOF_EXPR, false);
      else if (TYPE_P (TREE_OPERAND (stmt, 0)))
	*stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
					      SIZEOF_EXPR, false);
      else
	*stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
					      SIZEOF_EXPR, false);
      if (*stmt_p == error_mark_node)
	*stmt_p = size_one_node;
      return NULL;
    }    

  pointer_set_insert (p_set, *stmt_p);

  return NULL;
}
Ejemplo n.º 16
0
static tree
cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
{
  tree stmt = *stmt_p;
  struct pointer_set_t *p_set = (struct pointer_set_t*) data;

  if (is_invisiref_parm (stmt)
      /* Don't dereference parms in a thunk, pass the references through. */
      && !(DECL_THUNK_P (current_function_decl)
	   && TREE_CODE (stmt) == PARM_DECL))
    {
      *stmt_p = convert_from_reference (stmt);
      *walk_subtrees = 0;
      return NULL;
    }

  /* Map block scope extern declarations to visible declarations with the
     same name and type in outer scopes if any.  */
  if (cp_function_chain->extern_decl_map
      && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
      && DECL_EXTERNAL (stmt))
    {
      struct cxx_int_tree_map *h, in;
      in.uid = DECL_UID (stmt);
      h = (struct cxx_int_tree_map *)
	  htab_find_with_hash (cp_function_chain->extern_decl_map,
			       &in, in.uid);
      if (h)
	{
	  *stmt_p = h->to;
	  *walk_subtrees = 0;
	  return NULL;
	}
    }

  /* Other than invisiref parms, don't walk the same tree twice.  */
  if (pointer_set_contains (p_set, stmt))
    {
      *walk_subtrees = 0;
      return NULL_TREE;
    }

  if (TREE_CODE (stmt) == ADDR_EXPR
      && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    {
      *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
      *walk_subtrees = 0;
    }
  else if (TREE_CODE (stmt) == RETURN_EXPR
	   && TREE_OPERAND (stmt, 0)
	   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
    /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
    *walk_subtrees = 0;
  else if (TREE_CODE (stmt) == OMP_CLAUSE)
    switch (OMP_CLAUSE_CODE (stmt))
      {
      case OMP_CLAUSE_PRIVATE:
      case OMP_CLAUSE_SHARED:
      case OMP_CLAUSE_FIRSTPRIVATE:
      case OMP_CLAUSE_LASTPRIVATE:
      case OMP_CLAUSE_COPYIN:
      case OMP_CLAUSE_COPYPRIVATE:
	/* Don't dereference an invisiref in OpenMP clauses.  */
	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
	  *walk_subtrees = 0;
	break;
      case OMP_CLAUSE_REDUCTION:
	gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
	break;
      default:
	break;
      }
  else if (IS_TYPE_OR_DECL_P (stmt))
    *walk_subtrees = 0;

  /* Due to the way voidify_wrapper_expr is written, we don't get a chance
     to lower this construct before scanning it, so we need to lower these
     before doing anything else.  */
  else if (TREE_CODE (stmt) == CLEANUP_STMT)
    *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
					     : TRY_FINALLY_EXPR,
		      void_type_node,
		      CLEANUP_BODY (stmt),
		      CLEANUP_EXPR (stmt));

  pointer_set_insert (p_set, *stmt_p);

  return NULL;
}