along with GCC; see the file COPYING3. If not see <http://www.gnu.org/licenses/>. */ #include "config.h" #include "system.h" #include "coretypes.h" #include "tree.h" #include "tree-gimple.h" #include "tree-iterator.h" #include "ggc.h" /* This is a cache of STATEMENT_LIST nodes. We create and destroy them fairly often during gimplification. */ static GTY ((deletable (""))) tree stmt_list_cache; tree alloc_stmt_list (void) { tree list = stmt_list_cache; if (list) { stmt_list_cache = TREE_CHAIN (list); gcc_assert (stmt_list_cache != list); memset (list, 0, sizeof(struct tree_common)); TREE_SET_CODE (list, STATEMENT_LIST); } else list = make_node (STATEMENT_LIST); TREE_TYPE (list) = void_type_node;
node array and see if there are any elements with an exact match. If so, then we are done. Otherwise, we test the next larger size up and continue until we are in the last array element. We do not actually walk members of the last array element. While it might allow us to pick up a few reusable PHI nodes, it could potentially be very expensive if the program has released a bunch of large PHI nodes, but keeps asking for even larger PHI nodes. Experiments have shown that walking the elements of the last array entry would result in finding less than .1% additional reusable PHI nodes. Note that we can never have less than two PHI argument slots. Thus, the -2 on all the calculations below. */ #define NUM_BUCKETS 10 static GTY ((deletable (""))) VEC(gimple,gc) *free_phinodes[NUM_BUCKETS - 2]; static unsigned long free_phinode_count; static int ideal_phi_node_len (int); #ifdef GATHER_STATISTICS unsigned int phi_nodes_reused; unsigned int phi_nodes_created; #endif /* Initialize management of PHIs. */ void init_phinodes (void) { int i;
/* Obstack to allocate bitmap elements from. */ static struct obstack bitmap_obstack; static int bitmap_obstack_init = FALSE; #ifndef INLINE #ifndef __GNUC__ #define INLINE #else #define INLINE __inline__ #endif #endif /* Global data */ bitmap_element bitmap_zero_bits; /* An element of all zero bits. */ static bitmap_element *bitmap_free; /* Freelist of bitmap elements. */ static GTY((deletable (""))) bitmap_element *bitmap_ggc_free; static void bitmap_elem_to_freelist PARAMS ((bitmap, bitmap_element *)); static void bitmap_element_free PARAMS ((bitmap, bitmap_element *)); static bitmap_element *bitmap_element_allocate PARAMS ((bitmap)); static int bitmap_element_zerop PARAMS ((bitmap_element *)); static void bitmap_element_link PARAMS ((bitmap, bitmap_element *)); static bitmap_element *bitmap_find_bit PARAMS ((bitmap, unsigned int)); /* Add ELEM to the appropriate freelist. */ static INLINE void bitmap_elem_to_freelist (head, elt) bitmap head; bitmap_element *elt; { if (head->using_obstack)
node array and see if there are any elements with an exact match. If so, then we are done. Otherwise, we test the next larger size up and continue until we are in the last array element. We do not actually walk members of the last array element. While it might allow us to pick up a few reusable PHI nodes, it could potentially be very expensive if the program has released a bunch of large PHI nodes, but keeps asking for even larger PHI nodes. Experiments have shown that walking the elements of the last array entry would result in finding less than .1% additional reusable PHI nodes. Note that we can never have less than two PHI argument slots. Thus, the -2 on all the calculations below. */ #define NUM_BUCKETS 10 static GTY ((deletable (""))) tree free_phinodes[NUM_BUCKETS - 2]; static unsigned long free_phinode_count; static int ideal_phi_node_len (int); static void resize_phi_node (tree *, int); #ifdef GATHER_STATISTICS unsigned int phi_nodes_reused; unsigned int phi_nodes_created; #endif /* Initialize management of PHIs. */ void init_phinodes (void) {
node array and see if there are any elements with an exact match. If so, then we are done. Otherwise, we test the next larger size up and continue until we are in the last array element. We do not actually walk members of the last array element. While it might allow us to pick up a few reusable PHI nodes, it could potentially be very expensive if the program has released a bunch of large PHI nodes, but keeps asking for even larger PHI nodes. Experiments have shown that walking the elements of the last array entry would result in finding less than .1% additional reusable PHI nodes. Note that we can never have less than two PHI argument slots. Thus, the -2 on all the calculations below. */ #define NUM_BUCKETS 10 static GTY ((deletable (""))) vec<gimple, va_gc> *free_phinodes[NUM_BUCKETS - 2]; static unsigned long free_phinode_count; static int ideal_phi_node_len (int); unsigned int phi_nodes_reused; unsigned int phi_nodes_created; /* Dump some simple statistics regarding the re-use of PHI nodes. */ void phinodes_print_statistics (void) { fprintf (stderr, "PHI nodes allocated: %u\n", phi_nodes_created); fprintf (stderr, "PHI nodes reused: %u\n", phi_nodes_reused); }
void ItemXML::slotDelete() { if (deletable()) deleteItem(); }
You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see <http://www.gnu.org/licenses/>. */ #include "config.h" #include "system.h" #include "coretypes.h" #include "tree.h" #include "tree-iterator.h" #include "ggc.h" /* This is a cache of STATEMENT_LIST nodes. We create and destroy them fairly often during gimplification. */ static GTY ((deletable (""))) vec<tree, va_gc> *stmt_list_cache; tree alloc_stmt_list (void) { tree list; if (!vec_safe_is_empty (stmt_list_cache)) { list = stmt_list_cache->pop (); memset (list, 0, sizeof (struct tree_base)); TREE_SET_CODE (list, STATEMENT_LIST); } else list = make_node (STATEMENT_LIST); TREE_TYPE (list) = void_type_node; return list;
along with GCC; see the file COPYING. If not, write to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ #include "config.h" #include "system.h" #include "toplev.h" #include "rtl.h" #include "ggc.h" static void free_list PARAMS ((rtx *, rtx *)); /* Functions for maintaining cache-able lists of EXPR_LIST and INSN_LISTs. */ /* An INSN_LIST containing all INSN_LISTs allocated but currently unused. */ static GTY ((deletable (""))) rtx unused_insn_list; /* An EXPR_LIST containing all EXPR_LISTs allocated but currently unused. */ static GTY ((deletable (""))) rtx unused_expr_list; /* This function will free an entire list of either EXPR_LIST or INSN_LIST nodes. This is to be used only on lists that consist exclusively of nodes of one type only. This is only called by free_EXPR_LIST_list and free_INSN_LIST_list. */ static void free_list (listp, unused_listp) rtx *listp, *unused_listp; { rtx link, prev_link;