void varpool_analyze_node (struct varpool_node *node) { tree decl = node->symbol.decl; /* When reading back varpool at LTO time, we re-construct the queue in order to have "needed" list right by inserting all needed nodes into varpool. We however don't want to re-analyze already analyzed nodes. */ if (!node->analyzed) { gcc_assert (!in_lto_p || cgraph_function_flags_ready); /* Compute the alignment early so function body expanders are already informed about increased alignment. */ align_variable (decl, 0); } if (node->alias && node->alias_of) { struct varpool_node *tgt = varpool_node_for_decl (node->alias_of); struct varpool_node *n; for (n = tgt; n && n->alias; n = n->analyzed ? varpool_alias_aliased_node (n) : NULL) if (n == node) { error ("variable %q+D part of alias cycle", node->symbol.decl); node->alias = false; continue; } if (!vec_safe_length (node->symbol.ref_list.references)) ipa_record_reference ((symtab_node)node, (symtab_node)tgt, IPA_REF_ALIAS, NULL); if (node->extra_name_alias) { DECL_WEAK (node->symbol.decl) = DECL_WEAK (node->alias_of); DECL_EXTERNAL (node->symbol.decl) = DECL_EXTERNAL (node->alias_of); DECL_VISIBILITY (node->symbol.decl) = DECL_VISIBILITY (node->alias_of); fixup_same_cpp_alias_visibility ((symtab_node) node, (symtab_node) tgt, node->alias_of); } } else if (DECL_INITIAL (decl)) record_references_in_initializer (decl, node->analyzed); node->analyzed = true; }
/* Walk the decls we marked as necessary and see if they reference new variables or functions and add them into the worklists. */ bool varpool_analyze_pending_decls (void) { bool changed = false; timevar_push (TV_VARPOOL); while (varpool_first_unanalyzed_node) { struct varpool_node *node = varpool_first_unanalyzed_node, *next; tree decl = node->decl; bool analyzed = node->analyzed; varpool_first_unanalyzed_node->analyzed = true; varpool_first_unanalyzed_node = varpool_first_unanalyzed_node->next_needed; /* When reading back varpool at LTO time, we re-construct the queue in order to have "needed" list right by inserting all needed nodes into varpool. We however don't want to re-analyze already analyzed nodes. */ if (!analyzed) { gcc_assert (!in_lto_p || cgraph_function_flags_ready); /* Compute the alignment early so function body expanders are already informed about increased alignment. */ align_variable (decl, 0); } if (node->alias && node->alias_of) { struct varpool_node *tgt = varpool_node (node->alias_of); struct varpool_node *n; for (n = tgt; n && n->alias; n = n->analyzed ? varpool_alias_aliased_node (n) : NULL) if (n == node) { error ("variable %q+D part of alias cycle", node->decl); node->alias = false; continue; } if (!VEC_length (ipa_ref_t, node->ref_list.references)) ipa_record_reference (NULL, node, NULL, tgt, IPA_REF_ALIAS, NULL); /* C++ FE sometimes change linkage flags after producing same body aliases. */ if (node->extra_name_alias) { DECL_WEAK (node->decl) = DECL_WEAK (node->alias_of); TREE_PUBLIC (node->decl) = TREE_PUBLIC (node->alias_of); DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->alias_of); DECL_VISIBILITY (node->decl) = DECL_VISIBILITY (node->alias_of); if (TREE_PUBLIC (node->decl)) { DECL_COMDAT (node->decl) = DECL_COMDAT (node->alias_of); DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->alias_of); if (DECL_ONE_ONLY (node->alias_of) && !node->same_comdat_group) { node->same_comdat_group = tgt; if (!tgt->same_comdat_group) tgt->same_comdat_group = node; else { struct varpool_node *n; for (n = tgt->same_comdat_group; n->same_comdat_group != tgt; n = n->same_comdat_group) ; n->same_comdat_group = node; } } } } } else if (DECL_INITIAL (decl)) record_references_in_initializer (decl, analyzed); if (node->same_comdat_group) { for (next = node->same_comdat_group; next != node; next = next->same_comdat_group) varpool_mark_needed_node (next); } changed = true; } timevar_pop (TV_VARPOOL); return changed; }