static unsigned int tracer (void) { bool changed; if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1) return 0; mark_dfs_back_edges (); if (dump_file) brief_dump_cfg (dump_file, dump_flags); /* Trace formation is done on the fly inside tail_duplicate */ changed = tail_duplicate (); if (changed) { free_dominance_info (CDI_DOMINATORS); /* If we changed the CFG schedule loops for fixup by cleanup_cfg. */ if (current_loops) loops_state_set (LOOPS_NEED_FIXUP); } if (dump_file) brief_dump_cfg (dump_file, dump_flags); return changed ? TODO_cleanup_cfg : 0; }
static unsigned int tracer (void) { bool changed; if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1) return 0; mark_dfs_back_edges (); if (dump_file) brief_dump_cfg (dump_file, dump_flags); /* Trace formation is done on the fly inside tail_duplicate */ changed = tail_duplicate (); if (changed) { free_dominance_info (CDI_DOMINATORS); calculate_dominance_info (CDI_DOMINATORS); if (current_loops) fix_loop_structure (NULL); } if (dump_file) brief_dump_cfg (dump_file, dump_flags); return changed ? TODO_cleanup_cfg : 0; }
void reorder_basic_blocks (void) { int n_traces; int i; struct trace *traces; if (n_basic_blocks <= 1) return; if ((* targetm.cannot_modify_jumps_p) ()) return; timevar_push (TV_REORDER_BLOCKS); cfg_layout_initialize (); set_edge_can_fallthru_flag (); mark_dfs_back_edges (); /* We are estimating the length of uncond jump insn only once since the code for getting the insn length always returns the minimal length now. */ if (uncond_jump_length == 0) uncond_jump_length = get_uncond_jump_length (); /* We need to know some information for each basic block. */ array_size = GET_ARRAY_SIZE (last_basic_block); bbd = xmalloc (array_size * sizeof (bbro_basic_block_data)); for (i = 0; i < array_size; i++) { bbd[i].start_of_trace = -1; bbd[i].end_of_trace = -1; bbd[i].heap = NULL; bbd[i].node = NULL; } traces = xmalloc (n_basic_blocks * sizeof (struct trace)); n_traces = 0; find_traces (&n_traces, traces); connect_traces (n_traces, traces); FREE (traces); FREE (bbd); if (rtl_dump_file) dump_flow_info (rtl_dump_file); cfg_layout_finalize (); timevar_pop (TV_REORDER_BLOCKS); }
static unsigned int execute_split_paths () { /* If we don't have at least 2 real blocks and backedges in the CFG, then there's no point in trying to perform path splitting. */ if (n_basic_blocks_for_fn (cfun) <= NUM_FIXED_BLOCKS + 1 || !mark_dfs_back_edges ()) return 0; bool changed = split_paths(); if (changed) free_dominance_info (CDI_DOMINATORS); return changed ? TODO_cleanup_cfg : 0; }
void tracer (unsigned int flags) { if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1) return; cfg_layout_initialize (flags); mark_dfs_back_edges (); if (dump_file) dump_flow_info (dump_file, dump_flags); tail_duplicate (); layout_superblocks (); if (dump_file) dump_flow_info (dump_file, dump_flags); cfg_layout_finalize (); /* Merge basic blocks in duplicated traces. */ cleanup_cfg (CLEANUP_EXPENSIVE); }
static unsigned int tracer (void) { gcc_assert (current_ir_type () == IR_GIMPLE); if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1) return 0; mark_dfs_back_edges (); if (dump_file) dump_flow_info (dump_file, dump_flags); /* Trace formation is done on the fly inside tail_duplicate */ tail_duplicate (); /* FIXME: We really only need to do this when we know tail duplication has altered the CFG. */ free_dominance_info (CDI_DOMINATORS); if (dump_file) dump_flow_info (dump_file, dump_flags); return 0; }
void tracer (void) { if (n_basic_blocks <= 1) return; timevar_push (TV_TRACER); cfg_layout_initialize (); mark_dfs_back_edges (); if (rtl_dump_file) dump_flow_info (rtl_dump_file); tail_duplicate (); layout_superblocks (); if (rtl_dump_file) dump_flow_info (rtl_dump_file); cfg_layout_finalize (); /* Merge basic blocks in duplicated traces. */ cleanup_cfg (CLEANUP_EXPENSIVE); timevar_pop (TV_TRACER); }