static bool forward_propagate_addr_expr (tree name, tree rhs) { int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth; imm_use_iterator iter; gimple use_stmt; bool all = true; bool single_use_p = has_single_use (name); FOR_EACH_IMM_USE_STMT (use_stmt, iter, name) { bool result; tree use_rhs; /* If the use is not in a simple assignment statement, then there is nothing we can do. */ if (gimple_code (use_stmt) != GIMPLE_ASSIGN) { if (!is_gimple_debug (use_stmt)) all = false; continue; } /* If the use is in a deeper loop nest, then we do not want to propagate the ADDR_EXPR into the loop as that is likely adding expression evaluations into the loop. */ if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth) { all = false; continue; } { gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); result = forward_propagate_addr_expr_1 (name, rhs, &gsi, single_use_p); /* If the use has moved to a different statement adjust the update machinery for the old statement too. */ if (use_stmt != gsi_stmt (gsi)) { update_stmt (use_stmt); use_stmt = gsi_stmt (gsi); } update_stmt (use_stmt); } all &= result; /* Remove intermediate now unused copy and conversion chains. */ use_rhs = gimple_assign_rhs1 (use_stmt); if (result && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME && TREE_CODE (use_rhs) == SSA_NAME && has_zero_uses (gimple_assign_lhs (use_stmt))) { gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); release_defs (use_stmt); gsi_remove (&gsi, true); } }
void ubsan_expand_bounds_ifn (gimple_stmt_iterator *gsi) { gimple stmt = gsi_stmt (*gsi); location_t loc = gimple_location (stmt); gcc_assert (gimple_call_num_args (stmt) == 3); /* Pick up the arguments of the UBSAN_BOUNDS call. */ tree type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 0))); tree index = gimple_call_arg (stmt, 1); tree orig_index_type = TREE_TYPE (index); tree bound = gimple_call_arg (stmt, 2); gimple_stmt_iterator gsi_orig = *gsi; /* Create condition "if (index > bound)". */ basic_block then_bb, fallthru_bb; gimple_stmt_iterator cond_insert_point = create_cond_insert_point (gsi, 0/*before_p*/, false, true, &then_bb, &fallthru_bb); index = fold_convert (TREE_TYPE (bound), index); index = force_gimple_operand_gsi (&cond_insert_point, index, true/*simple_p*/, NULL_TREE, false/*before*/, GSI_NEW_STMT); gimple g = gimple_build_cond (GT_EXPR, index, bound, NULL_TREE, NULL_TREE); gimple_set_location (g, loc); gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT); /* Generate __ubsan_handle_out_of_bounds call. */ *gsi = gsi_after_labels (then_bb); if (flag_sanitize_undefined_trap_on_error) g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); else { tree data = ubsan_create_data ("__ubsan_out_of_bounds_data", &loc, NULL, ubsan_type_descriptor (type, UBSAN_PRINT_ARRAY), ubsan_type_descriptor (orig_index_type), NULL_TREE); data = build_fold_addr_expr_loc (loc, data); enum built_in_function bcode = flag_sanitize_recover ? BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS : BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS_ABORT; tree fn = builtin_decl_explicit (bcode); tree val = force_gimple_operand_gsi (gsi, ubsan_encode_value (index), true, NULL_TREE, true, GSI_SAME_STMT); g = gimple_build_call (fn, 2, data, val); } gimple_set_location (g, loc); gsi_insert_before (gsi, g, GSI_SAME_STMT); /* Get rid of the UBSAN_BOUNDS call from the IR. */ unlink_stmt_vdef (stmt); gsi_remove (&gsi_orig, true); /* Point GSI to next logical statement. */ *gsi = gsi_start_bb (fallthru_bb); }
static void remove_ctrl_stmt_and_useless_edges (basic_block bb, basic_block dest_bb) { gimple_stmt_iterator gsi; edge e; edge_iterator ei; gsi = gsi_last_bb (bb); /* If the duplicate ends with a control statement, then remove it. Note that if we are duplicating the template block rather than the original basic block, then the duplicate might not have any real statements in it. */ if (!gsi_end_p (gsi) && gsi_stmt (gsi) && (gimple_code (gsi_stmt (gsi)) == GIMPLE_COND || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO || gimple_code (gsi_stmt (gsi)) == GIMPLE_SWITCH)) gsi_remove (&gsi, true); for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) { if (e->dest != dest_bb) remove_edge (e); else ei_next (&ei); } }
void gsi_replace_with_seq (gimple_stmt_iterator *gsi, gimple_seq seq, bool update_eh_info) { gimple_stmt_iterator seqi; gimple *last; if (gimple_seq_empty_p (seq)) { gsi_remove (gsi, true); return; } seqi = gsi_last (seq); last = gsi_stmt (seqi); gsi_remove (&seqi, false); gsi_insert_seq_before (gsi, seq, GSI_SAME_STMT); gsi_replace (gsi, last, update_eh_info); }
void gsi_move_after (gimple_stmt_iterator *from, gimple_stmt_iterator *to) { gimple *stmt = gsi_stmt (*from); gsi_remove (from, false); /* We must have GSI_NEW_STMT here, as gsi_move_after is sometimes used to move statements to an empty block. */ gsi_insert_after (to, stmt, GSI_NEW_STMT); }
static bool generate_loops_for_partition (struct loop *loop, bitmap partition, bool copy_p) { unsigned i, x; gimple_stmt_iterator bsi; basic_block *bbs; if (copy_p) { loop = copy_loop_before (loop); create_preheader (loop, CP_SIMPLE_PREHEADERS); create_bb_after_loop (loop); } if (loop == NULL) return false; /* Remove stmts not in the PARTITION bitmap. The order in which we visit the phi nodes and the statements is exactly as in stmts_from_loop. */ bbs = get_loop_body_in_dom_order (loop); for (x = 0, i = 0; i < loop->num_nodes; i++) { basic_block bb = bbs[i]; for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi);) if (!bitmap_bit_p (partition, x++)) { gimple phi = gsi_stmt (bsi); if (!is_gimple_reg (gimple_phi_result (phi))) mark_virtual_phi_result_for_renaming (phi); remove_phi_node (&bsi, true); } else gsi_next (&bsi); for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi);) { gimple stmt = gsi_stmt (bsi); if (gimple_code (gsi_stmt (bsi)) != GIMPLE_LABEL && !bitmap_bit_p (partition, x++)) { unlink_stmt_vdef (stmt); gsi_remove (&bsi, true); release_defs (stmt); } else gsi_next (&bsi); } } free (bbs); return true; }
void gsi_move_before (gimple_stmt_iterator *from, gimple_stmt_iterator *to) { gimple *stmt = gsi_stmt (*from); gsi_remove (from, false); /* For consistency with gsi_move_after, it might be better to have GSI_NEW_STMT here; however, that breaks several places that expect that TO does not change. */ gsi_insert_before (to, stmt, GSI_SAME_STMT); }
static void insert_trap_and_remove_trailing_statements (gimple_stmt_iterator *si_p, tree op) { /* We want the NULL pointer dereference to actually occur so that code that wishes to catch the signal can do so. If the dereference is a load, then there's nothing to do as the LHS will be a throw-away SSA_NAME and the RHS is the NULL dereference. If the dereference is a store and we can easily transform the RHS, then simplify the RHS to enable more DCE. Note that we require the statement to be a GIMPLE_ASSIGN which filters out calls on the RHS. */ gimple stmt = gsi_stmt (*si_p); if (walk_stmt_load_store_ops (stmt, (void *)op, NULL, check_loadstore) && is_gimple_assign (stmt) && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt)))) { /* We just need to turn the RHS into zero converted to the proper type. */ tree type = TREE_TYPE (gimple_assign_lhs (stmt)); gimple_assign_set_rhs_code (stmt, INTEGER_CST); gimple_assign_set_rhs1 (stmt, fold_convert (type, integer_zero_node)); update_stmt (stmt); } gimple new_stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); gimple_seq seq = NULL; gimple_seq_add_stmt (&seq, new_stmt); /* If we had a NULL pointer dereference, then we want to insert the __builtin_trap after the statement, for the other cases we want to insert before the statement. */ if (walk_stmt_load_store_ops (stmt, (void *)op, check_loadstore, check_loadstore)) gsi_insert_after (si_p, seq, GSI_NEW_STMT); else gsi_insert_before (si_p, seq, GSI_NEW_STMT); /* We must remove statements from the end of the block so that we never reference a released SSA_NAME. */ basic_block bb = gimple_bb (gsi_stmt (*si_p)); for (gimple_stmt_iterator si = gsi_last_bb (bb); gsi_stmt (si) != gsi_stmt (*si_p); si = gsi_last_bb (bb)) { stmt = gsi_stmt (si); unlink_stmt_vdef (stmt); gsi_remove (&si, true); release_defs (stmt); } }
void remove_phi_node (gimple_stmt_iterator *gsi, bool release_lhs_p) { gimple phi = gsi_stmt (*gsi); gsi_remove (gsi, false); /* If we are deleting the PHI node, then we should release the SSA_NAME node so that it can be reused. */ release_phi_node (phi); if (release_lhs_p) release_ssa_name (gimple_phi_result (phi)); }
static void remove_unused_var (tree var) { gimple *stmt = SSA_NAME_DEF_STMT (var); if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "Deleting "); print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); } gimple_stmt_iterator gsi = gsi_for_stmt (stmt); gsi_remove (&gsi, true); release_defs (stmt); }
static void lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data) { tree old_block = data->block; gimple stmt = gsi_stmt (*gsi); tree new_block = gimple_bind_block (stmt); if (new_block) { if (new_block == old_block) { /* The outermost block of the original function may not be the outermost statement chain of the gimplified function. So we may see the outermost block just inside the function. */ gcc_assert (new_block == DECL_INITIAL (current_function_decl)); new_block = NULL; } else { /* We do not expect to handle duplicate blocks. */ gcc_assert (!TREE_ASM_WRITTEN (new_block)); TREE_ASM_WRITTEN (new_block) = 1; /* Block tree may get clobbered by inlining. Normally this would be fixed in rest_of_decl_compilation using block notes, but since we are not going to emit them, it is up to us. */ BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block); BLOCK_SUBBLOCKS (old_block) = new_block; BLOCK_SUBBLOCKS (new_block) = NULL_TREE; BLOCK_SUPERCONTEXT (new_block) = old_block; data->block = new_block; } } record_vars (gimple_bind_vars (stmt)); lower_sequence (gimple_bind_body_ptr (stmt), data); if (new_block) { gcc_assert (data->block == new_block); BLOCK_SUBBLOCKS (new_block) = blocks_nreverse (BLOCK_SUBBLOCKS (new_block)); data->block = old_block; } /* The GIMPLE_BIND no longer carries any useful information -- kill it. */ gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT); gsi_remove (gsi, false); }
static void replace_phi_edge_with_variable (basic_block cond_block, edge e, gimple phi, tree new_tree) { basic_block bb = gimple_bb (phi); basic_block block_to_remove; gimple_stmt_iterator gsi; /* Change the PHI argument to new. */ SET_USE (PHI_ARG_DEF_PTR (phi, e->dest_idx), new_tree); /* Remove the empty basic block. */ if (EDGE_SUCC (cond_block, 0)->dest == bb) { EDGE_SUCC (cond_block, 0)->flags |= EDGE_FALLTHRU; EDGE_SUCC (cond_block, 0)->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE); EDGE_SUCC (cond_block, 0)->probability = REG_BR_PROB_BASE; EDGE_SUCC (cond_block, 0)->count += EDGE_SUCC (cond_block, 1)->count; block_to_remove = EDGE_SUCC (cond_block, 1)->dest; } else { EDGE_SUCC (cond_block, 1)->flags |= EDGE_FALLTHRU; EDGE_SUCC (cond_block, 1)->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE); EDGE_SUCC (cond_block, 1)->probability = REG_BR_PROB_BASE; EDGE_SUCC (cond_block, 1)->count += EDGE_SUCC (cond_block, 0)->count; block_to_remove = EDGE_SUCC (cond_block, 0)->dest; } delete_basic_block (block_to_remove); /* Eliminate the COND_EXPR at the end of COND_BLOCK. */ gsi = gsi_last_bb (cond_block); gsi_remove (&gsi, true); if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "COND_EXPR in block %d and PHI in block %d converted to straightline code.\n", cond_block->index, bb->index); }
static void lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data) { gimple stmt = gsi_stmt (*gsi); gimple t; int i; return_statements_t tmp_rs; /* Match this up with an existing return statement that's been created. */ for (i = data->return_statements.length () - 1; i >= 0; i--) { tmp_rs = data->return_statements[i]; if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt)) { /* Remove the line number from the representative return statement. It now fills in for many such returns. Failure to remove this will result in incorrect results for coverage analysis. */ gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION); goto found; } } /* Not found. Create a new label and record the return statement. */ tmp_rs.label = create_artificial_label (cfun->function_end_locus); tmp_rs.stmt = stmt; data->return_statements.safe_push (tmp_rs); /* Generate a goto statement and remove the return statement. */ found: /* When not optimizing, make sure user returns are preserved. */ if (!optimize && gimple_has_location (stmt)) DECL_ARTIFICIAL (tmp_rs.label) = 0; t = gimple_build_goto (tmp_rs.label); gimple_set_location (t, gimple_location (stmt)); gimple_set_block (t, gimple_block (stmt)); gsi_insert_before (gsi, t, GSI_SAME_STMT); gsi_remove (gsi, false); }
/* Try to compare bounds value and address value used in the check CI. If we can prove that check always pass then remove it. */ static void chkp_remove_check_if_pass (struct check_info *ci) { int result = 0; if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "Trying to remove check: "); print_gimple_stmt (dump_file, ci->stmt, 0, 0); } result = chkp_get_check_result (ci, ci->bounds); if (result == 1) { gimple_stmt_iterator i = gsi_for_stmt (ci->stmt); if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, " action: delete check (always pass)\n"); gsi_remove (&i, true); unlink_stmt_vdef (ci->stmt); release_defs (ci->stmt); ci->stmt = NULL; } else if (result == -1) { if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, " action: keep check (always fail)\n"); warning_at (gimple_location (ci->stmt), OPT_Wchkp, "memory access check always fail"); } else if (result == 0) { if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, " action: keep check (cannot compute result)\n"); } }
static bool remove_prop_source_from_use (tree name, gimple up_to_stmt) { gimple_stmt_iterator gsi; gimple stmt; do { if (!has_zero_uses (name)) return false; stmt = SSA_NAME_DEF_STMT (name); if (stmt == up_to_stmt) return true; gsi = gsi_for_stmt (stmt); release_defs (stmt); gsi_remove (&gsi, true); name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL; } while (name && TREE_CODE (name) == SSA_NAME); return false; }
static void lower_builtin_setjmp (gimple_stmt_iterator *gsi) { gimple stmt = gsi_stmt (*gsi); location_t loc = gimple_location (stmt); tree cont_label = create_artificial_label (loc); tree next_label = create_artificial_label (loc); tree dest, t, arg; gimple g; /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */ FORCED_LABEL (next_label) = 1; dest = gimple_call_lhs (stmt); /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */ arg = build_addr (next_label, current_function_decl); t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP); g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg); gimple_set_location (g, loc); gimple_set_block (g, gimple_block (stmt)); gsi_insert_before (gsi, g, GSI_SAME_STMT); /* Build 'DEST = 0' and insert. */ if (dest) { g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest))); gimple_set_location (g, loc); gimple_set_block (g, gimple_block (stmt)); gsi_insert_before (gsi, g, GSI_SAME_STMT); } /* Build 'goto CONT_LABEL' and insert. */ g = gimple_build_goto (cont_label); gsi_insert_before (gsi, g, GSI_SAME_STMT); /* Build 'NEXT_LABEL:' and insert. */ g = gimple_build_label (next_label); gsi_insert_before (gsi, g, GSI_SAME_STMT); /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */ arg = build_addr (next_label, current_function_decl); t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER); g = gimple_build_call (t, 1, arg); gimple_set_location (g, loc); gimple_set_block (g, gimple_block (stmt)); gsi_insert_before (gsi, g, GSI_SAME_STMT); /* Build 'DEST = 1' and insert. */ if (dest) { g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest), integer_one_node)); gimple_set_location (g, loc); gimple_set_block (g, gimple_block (stmt)); gsi_insert_before (gsi, g, GSI_SAME_STMT); } /* Build 'CONT_LABEL:' and insert. */ g = gimple_build_label (cont_label); gsi_insert_before (gsi, g, GSI_SAME_STMT); /* Remove the call to __builtin_setjmp. */ gsi_remove (gsi, false); }
static void lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data) { tree old_block = data->block; gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi)); tree new_block = gimple_bind_block (stmt); if (new_block) { if (new_block == old_block) { /* The outermost block of the original function may not be the outermost statement chain of the gimplified function. So we may see the outermost block just inside the function. */ gcc_assert (new_block == DECL_INITIAL (current_function_decl)); new_block = NULL; } else { /* We do not expect to handle duplicate blocks. */ gcc_assert (!TREE_ASM_WRITTEN (new_block)); TREE_ASM_WRITTEN (new_block) = 1; /* Block tree may get clobbered by inlining. Normally this would be fixed in rest_of_decl_compilation using block notes, but since we are not going to emit them, it is up to us. */ BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block); BLOCK_SUBBLOCKS (old_block) = new_block; BLOCK_SUBBLOCKS (new_block) = NULL_TREE; BLOCK_SUPERCONTEXT (new_block) = old_block; data->block = new_block; } } record_vars (gimple_bind_vars (stmt)); /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer need gimple_bind_vars. */ tree next; /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find it by marking all BLOCK_VARS. */ if (gimple_bind_block (stmt)) for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t)) TREE_VISITED (t) = 1; for (tree var = gimple_bind_vars (stmt); var && ! TREE_VISITED (var); var = next) { next = DECL_CHAIN (var); DECL_CHAIN (var) = NULL_TREE; } /* Unmark BLOCK_VARS. */ if (gimple_bind_block (stmt)) for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t)) TREE_VISITED (t) = 0; lower_sequence (gimple_bind_body_ptr (stmt), data); if (new_block) { gcc_assert (data->block == new_block); BLOCK_SUBBLOCKS (new_block) = blocks_nreverse (BLOCK_SUBBLOCKS (new_block)); data->block = old_block; } /* The GIMPLE_BIND no longer carries any useful information -- kill it. */ gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT); gsi_remove (gsi, false); }
static void lower_builtin_setjmp (gimple_stmt_iterator *gsi) { gimple *stmt = gsi_stmt (*gsi); location_t loc = gimple_location (stmt); tree cont_label = create_artificial_label (loc); tree next_label = create_artificial_label (loc); tree dest, t, arg; gimple *g; /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL these builtins are modelled as non-local label jumps to the label that is passed to these two builtins, so pretend we have a non-local label during GIMPLE passes too. See PR60003. */ cfun->has_nonlocal_label = 1; /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */ FORCED_LABEL (next_label) = 1; tree orig_dest = dest = gimple_call_lhs (stmt); if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME) dest = create_tmp_reg (TREE_TYPE (orig_dest)); /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */ arg = build_addr (next_label); t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP); g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg); gimple_set_location (g, loc); gimple_set_block (g, gimple_block (stmt)); gsi_insert_before (gsi, g, GSI_SAME_STMT); /* Build 'DEST = 0' and insert. */ if (dest) { g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest))); gimple_set_location (g, loc); gimple_set_block (g, gimple_block (stmt)); gsi_insert_before (gsi, g, GSI_SAME_STMT); } /* Build 'goto CONT_LABEL' and insert. */ g = gimple_build_goto (cont_label); gsi_insert_before (gsi, g, GSI_SAME_STMT); /* Build 'NEXT_LABEL:' and insert. */ g = gimple_build_label (next_label); gsi_insert_before (gsi, g, GSI_SAME_STMT); /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */ arg = build_addr (next_label); t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER); g = gimple_build_call (t, 1, arg); gimple_set_location (g, loc); gimple_set_block (g, gimple_block (stmt)); gsi_insert_before (gsi, g, GSI_SAME_STMT); /* Build 'DEST = 1' and insert. */ if (dest) { g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest), integer_one_node)); gimple_set_location (g, loc); gimple_set_block (g, gimple_block (stmt)); gsi_insert_before (gsi, g, GSI_SAME_STMT); } /* Build 'CONT_LABEL:' and insert. */ g = gimple_build_label (cont_label); gsi_insert_before (gsi, g, GSI_SAME_STMT); /* Build orig_dest = dest if necessary. */ if (dest != orig_dest) { g = gimple_build_assign (orig_dest, dest); gsi_insert_before (gsi, g, GSI_SAME_STMT); } /* Remove the call to __builtin_setjmp. */ gsi_remove (gsi, false); }
static bool factor_out_conditional_conversion (edge e0, edge e1, gphi *phi, tree arg0, tree arg1) { gimple arg0_def_stmt = NULL, arg1_def_stmt = NULL, new_stmt; tree new_arg0 = NULL_TREE, new_arg1 = NULL_TREE; tree temp, result; gphi *newphi; gimple_stmt_iterator gsi, gsi_for_def; source_location locus = gimple_location (phi); enum tree_code convert_code; /* Handle only PHI statements with two arguments. TODO: If all other arguments to PHI are INTEGER_CST or if their defining statement have the same unary operation, we can handle more than two arguments too. */ if (gimple_phi_num_args (phi) != 2) return false; /* First canonicalize to simplify tests. */ if (TREE_CODE (arg0) != SSA_NAME) { std::swap (arg0, arg1); std::swap (e0, e1); } if (TREE_CODE (arg0) != SSA_NAME || (TREE_CODE (arg1) != SSA_NAME && TREE_CODE (arg1) != INTEGER_CST)) return false; /* Check if arg0 is an SSA_NAME and the stmt which defines arg0 is a conversion. */ arg0_def_stmt = SSA_NAME_DEF_STMT (arg0); if (!is_gimple_assign (arg0_def_stmt) || !gimple_assign_cast_p (arg0_def_stmt)) return false; /* Use the RHS as new_arg0. */ convert_code = gimple_assign_rhs_code (arg0_def_stmt); new_arg0 = gimple_assign_rhs1 (arg0_def_stmt); if (convert_code == VIEW_CONVERT_EXPR) new_arg0 = TREE_OPERAND (new_arg0, 0); if (TREE_CODE (arg1) == SSA_NAME) { /* Check if arg1 is an SSA_NAME and the stmt which defines arg1 is a conversion. */ arg1_def_stmt = SSA_NAME_DEF_STMT (arg1); if (!is_gimple_assign (arg1_def_stmt) || gimple_assign_rhs_code (arg1_def_stmt) != convert_code) return false; /* Use the RHS as new_arg1. */ new_arg1 = gimple_assign_rhs1 (arg1_def_stmt); if (convert_code == VIEW_CONVERT_EXPR) new_arg1 = TREE_OPERAND (new_arg1, 0); } else { /* If arg1 is an INTEGER_CST, fold it to new type. */ if (INTEGRAL_TYPE_P (TREE_TYPE (new_arg0)) && int_fits_type_p (arg1, TREE_TYPE (new_arg0))) { if (gimple_assign_cast_p (arg0_def_stmt)) new_arg1 = fold_convert (TREE_TYPE (new_arg0), arg1); else return false; } else return false; } /* If arg0/arg1 have > 1 use, then this transformation actually increases the number of expressions evaluated at runtime. */ if (!has_single_use (arg0) || (arg1_def_stmt && !has_single_use (arg1))) return false; /* If types of new_arg0 and new_arg1 are different bailout. */ if (!types_compatible_p (TREE_TYPE (new_arg0), TREE_TYPE (new_arg1))) return false; /* Create a new PHI stmt. */ result = PHI_RESULT (phi); temp = make_ssa_name (TREE_TYPE (new_arg0), NULL); newphi = create_phi_node (temp, gimple_bb (phi)); if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "PHI "); print_generic_expr (dump_file, gimple_phi_result (phi), 0); fprintf (dump_file, " changed to factor conversion out from COND_EXPR.\n"); fprintf (dump_file, "New stmt with CAST that defines "); print_generic_expr (dump_file, result, 0); fprintf (dump_file, ".\n"); } /* Remove the old cast(s) that has single use. */ gsi_for_def = gsi_for_stmt (arg0_def_stmt); gsi_remove (&gsi_for_def, true); if (arg1_def_stmt) { gsi_for_def = gsi_for_s
static void adjust_simduid_builtins (hash_table<simduid_to_vf> *htab) { basic_block bb; FOR_EACH_BB_FN (bb, cfun) { gimple_stmt_iterator i; for (i = gsi_start_bb (bb); !gsi_end_p (i); ) { unsigned int vf = 1; enum internal_fn ifn; gimple *stmt = gsi_stmt (i); tree t; if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt)) { gsi_next (&i); continue; } ifn = gimple_call_internal_fn (stmt); switch (ifn) { case IFN_GOMP_SIMD_LANE: case IFN_GOMP_SIMD_VF: case IFN_GOMP_SIMD_LAST_LANE: break; case IFN_GOMP_SIMD_ORDERED_START: case IFN_GOMP_SIMD_ORDERED_END: gsi_remove (&i, true); unlink_stmt_vdef (stmt); continue; default: gsi_next (&i); continue; } tree arg = gimple_call_arg (stmt, 0); gcc_assert (arg != NULL_TREE); gcc_assert (TREE_CODE (arg) == SSA_NAME); simduid_to_vf *p = NULL, data; data.simduid = DECL_UID (SSA_NAME_VAR (arg)); if (htab) { p = htab->find (&data); if (p) vf = p->vf; } switch (ifn) { case IFN_GOMP_SIMD_VF: t = build_int_cst (unsigned_type_node, vf); break; case IFN_GOMP_SIMD_LANE: t = build_int_cst (unsigned_type_node, 0); break; case IFN_GOMP_SIMD_LAST_LANE: t = gimple_call_arg (stmt, 1); break; default: gcc_unreachable (); } update_call_from_tree (&i, t); gsi_next (&i); } }
static void adjust_simduid_builtins (hash_table<simduid_to_vf> *htab) { basic_block bb; FOR_EACH_BB_FN (bb, cfun) { gimple_stmt_iterator i; for (i = gsi_start_bb (bb); !gsi_end_p (i); ) { unsigned int vf = 1; enum internal_fn ifn; gimple *stmt = gsi_stmt (i); tree t; if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt)) { gsi_next (&i); continue; } ifn = gimple_call_internal_fn (stmt); switch (ifn) { case IFN_GOMP_SIMD_LANE: case IFN_GOMP_SIMD_VF: case IFN_GOMP_SIMD_LAST_LANE: break; case IFN_GOMP_SIMD_ORDERED_START: case IFN_GOMP_SIMD_ORDERED_END: if (integer_onep (gimple_call_arg (stmt, 0))) { enum built_in_function bcode = (ifn == IFN_GOMP_SIMD_ORDERED_START ? BUILT_IN_GOMP_ORDERED_START : BUILT_IN_GOMP_ORDERED_END); gimple *g = gimple_build_call (builtin_decl_explicit (bcode), 0); tree vdef = gimple_vdef (stmt); gimple_set_vdef (g, vdef); SSA_NAME_DEF_STMT (vdef) = g; gimple_set_vuse (g, gimple_vuse (stmt)); gsi_replace (&i, g, true); continue; } gsi_remove (&i, true); unlink_stmt_vdef (stmt); continue; default: gsi_next (&i); continue; } tree arg = gimple_call_arg (stmt, 0); gcc_assert (arg != NULL_TREE); gcc_assert (TREE_CODE (arg) == SSA_NAME); simduid_to_vf *p = NULL, data; data.simduid = DECL_UID (SSA_NAME_VAR (arg)); /* Need to nullify loop safelen field since it's value is not valid after transformation. */ if (bb->loop_father && bb->loop_father->safelen > 0) bb->loop_father->safelen = 0; if (htab) { p = htab->find (&data); if (p) vf = p->vf; } switch (ifn) { case IFN_GOMP_SIMD_VF: t = build_int_cst (unsigned_type_node, vf); break; case IFN_GOMP_SIMD_LANE: t = build_int_cst (unsigned_type_node, 0); break; case IFN_GOMP_SIMD_LAST_LANE: t = gimple_call_arg (stmt, 1); break; default: gcc_unreachable (); } update_call_from_tree (&i, t); gsi_next (&i); } }
static void eliminate_tail_call (struct tailcall *t) { tree param, rslt; gimple stmt, call; tree arg; size_t idx; basic_block bb, first; edge e; gimple phi; gimple_stmt_iterator gsi; gimple orig_stmt; stmt = orig_stmt = gsi_stmt (t->call_gsi); bb = gsi_bb (t->call_gsi); if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "Eliminated tail recursion in bb %d : ", bb->index); print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); fprintf (dump_file, "\n"); } gcc_assert (is_gimple_call (stmt)); first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)); /* Remove the code after call_gsi that will become unreachable. The possibly unreachable code in other blocks is removed later in cfg cleanup. */ gsi = t->call_gsi; gsi_next (&gsi); while (!gsi_end_p (gsi)) { gimple t = gsi_stmt (gsi); /* Do not remove the return statement, so that redirect_edge_and_branch sees how the block ends. */ if (gimple_code (t) == GIMPLE_RETURN) break; gsi_remove (&gsi, true); release_defs (t); } /* Number of executions of function has reduced by the tailcall. */ e = single_succ_edge (gsi_bb (t->call_gsi)); decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), e->count, EDGE_FREQUENCY (e)); decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), e->count, EDGE_FREQUENCY (e)); if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e)); /* Replace the call by a jump to the start of function. */ e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)), first); gcc_assert (e); PENDING_STMT (e) = NULL; /* Add phi node entries for arguments. The ordering of the phi nodes should be the same as the ordering of the arguments. */ for (param = DECL_ARGUMENTS (current_function_decl), idx = 0, gsi = gsi_start_phis (first); param; param = DECL_CHAIN (param), idx++) { if (!arg_needs_copy_p (param)) continue; arg = gimple_call_arg (stmt, idx); phi = gsi_stmt (gsi); gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi))); add_phi_arg (phi, arg, e, gimple_location (stmt)); gsi_next (&gsi); } /* Update the values of accumulators. */ adjust_accumulator_values (t->call_gsi, t->mult, t->add, e); call = gsi_stmt (t->call_gsi); rslt = gimple_call_lhs (call); if (rslt != NULL_TREE) { /* Result of the call will no longer be defined. So adjust the SSA_NAME_DEF_STMT accordingly. */ SSA_NAME_DEF_STMT (rslt) = gimple_build_nop (); } gsi_remove (&t->call_gsi, true); release_defs (call); }
static void lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data) { gimple *stmt = gsi_stmt (*gsi); gimple_set_block (stmt, data->block); switch (gimple_code (stmt)) { case GIMPLE_BIND: lower_gimple_bind (gsi, data); /* Propagate fallthruness. */ return; case GIMPLE_COND: case GIMPLE_GOTO: case GIMPLE_SWITCH: data->cannot_fallthru = true; gsi_next (gsi); return; case GIMPLE_RETURN: if (data->cannot_fallthru) { gsi_remove (gsi, false); /* Propagate fallthruness. */ } else { lower_gimple_return (gsi, data); data->cannot_fallthru = true; } return; case GIMPLE_TRY: if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH) lower_try_catch (gsi, data); else { /* It must be a GIMPLE_TRY_FINALLY. */ bool cannot_fallthru; lower_sequence (gimple_try_eval_ptr (stmt), data); cannot_fallthru = data->cannot_fallthru; /* The finally clause is always executed after the try clause, so if it does not fall through, then the try-finally will not fall through. Otherwise, if the try clause does not fall through, then when the finally clause falls through it will resume execution wherever the try clause was going. So the whole try-finally will only fall through if both the try clause and the finally clause fall through. */ data->cannot_fallthru = false; lower_sequence (gimple_try_cleanup_ptr (stmt), data); data->cannot_fallthru |= cannot_fallthru; gsi_next (gsi); } return; case GIMPLE_EH_ELSE: { geh_else *eh_else_stmt = as_a <geh_else *> (stmt); lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data); lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data); } break; case GIMPLE_DEBUG: gcc_checking_assert (cfun->debug_nonbind_markers); /* We can't possibly have debug bind stmts before lowering, we first emit them when entering SSA. */ gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt)); /* Propagate fallthruness. */ /* If the function (e.g. from PCH) had debug stmts, but they're disabled for this compilation, remove them. */ if (!MAY_HAVE_DEBUG_MARKER_STMTS) gsi_remove (gsi, true); else gsi_next (gsi); return; case GIMPLE_NOP: case GIMPLE_ASM: case GIMPLE_ASSIGN: case GIMPLE_PREDICT: case GIMPLE_LABEL: case GIMPLE_EH_MUST_NOT_THROW: case GIMPLE_OMP_FOR: case GIMPLE_OMP_SECTIONS: case GIMPLE_OMP_SECTIONS_SWITCH: case GIMPLE_OMP_SECTION: case GIMPLE_OMP_SINGLE: case GIMPLE_OMP_MASTER: case GIMPLE_OMP_TASKGROUP: case GIMPLE_OMP_ORDERED: case GIMPLE_OMP_CRITICAL: case GIMPLE_OMP_RETURN: case GIMPLE_OMP_ATOMIC_LOAD: case GIMPLE_OMP_ATOMIC_STORE: case GIMPLE_OMP_CONTINUE: break; case GIMPLE_CALL: { tree decl = gimple_call_fndecl (stmt); unsigned i; for (i = 0; i < gimple_call_num_args (stmt); i++) { tree arg = gimple_call_arg (stmt, i); if (EXPR_P (arg)) TREE_SET_BLOCK (arg, data->block); } if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL) { if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP) { lower_builtin_setjmp (gsi); data->cannot_fallthru = false; return; } else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN && flag_tree_bit_ccp && gimple_builtin_call_types_compatible_p (stmt, decl)) { lower_builtin_posix_memalign (gsi); return; } } if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN)) { data->cannot_fallthru = true; gsi_next (gsi); return; } } break; case GIMPLE_OMP_PARALLEL: case GIMPLE_OMP_TASK: case GIMPLE_OMP_TARGET: case GIMPLE_OMP_TEAMS: case GIMPLE_OMP_GRID_BODY: data->cannot_fallthru = false; lower_omp_directive (gsi, data); data->cannot_fallthru = false; return; case GIMPLE_TRANSACTION: lower_sequence (gimple_transaction_body_ptr ( as_a <gtransaction *> (stmt)), data); break; default: gcc_unreachable (); } data->cannot_fallthru = false; gsi_next (gsi); }
static void lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data) { gimple stmt = gsi_stmt (*gsi); gimple_set_block (stmt, data->block); switch (gimple_code (stmt)) { case GIMPLE_BIND: lower_gimple_bind (gsi, data); /* Propagate fallthruness. */ return; case GIMPLE_COND: case GIMPLE_GOTO: case GIMPLE_SWITCH: data->cannot_fallthru = true; gsi_next (gsi); return; case GIMPLE_RETURN: if (data->cannot_fallthru) { gsi_remove (gsi, false); /* Propagate fallthruness. */ } else { lower_gimple_return (gsi, data); data->cannot_fallthru = true; } return; case GIMPLE_TRY: if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH) lower_try_catch (gsi, data); else { /* It must be a GIMPLE_TRY_FINALLY. */ bool cannot_fallthru; lower_sequence (gimple_try_eval_ptr (stmt), data); cannot_fallthru = data->cannot_fallthru; /* The finally clause is always executed after the try clause, so if it does not fall through, then the try-finally will not fall through. Otherwise, if the try clause does not fall through, then when the finally clause falls through it will resume execution wherever the try clause was going. So the whole try-finally will only fall through if both the try clause and the finally clause fall through. */ data->cannot_fallthru = false; lower_sequence (gimple_try_cleanup_ptr (stmt), data); data->cannot_fallthru |= cannot_fallthru; gsi_next (gsi); } return; case GIMPLE_EH_ELSE: lower_sequence (gimple_eh_else_n_body_ptr (stmt), data); lower_sequence (gimple_eh_else_e_body_ptr (stmt), data); break; case GIMPLE_NOP: case GIMPLE_ASM: case GIMPLE_ASSIGN: case GIMPLE_PREDICT: case GIMPLE_LABEL: case GIMPLE_EH_MUST_NOT_THROW: case GIMPLE_OMP_FOR: case GIMPLE_OMP_SECTIONS: case GIMPLE_OMP_SECTIONS_SWITCH: case GIMPLE_OMP_SECTION: case GIMPLE_OMP_SINGLE: case GIMPLE_OMP_MASTER: case GIMPLE_OMP_ORDERED: case GIMPLE_OMP_CRITICAL: case GIMPLE_OMP_RETURN: case GIMPLE_OMP_ATOMIC_LOAD: case GIMPLE_OMP_ATOMIC_STORE: case GIMPLE_OMP_CONTINUE: break; case GIMPLE_CALL: { tree decl = gimple_call_fndecl (stmt); unsigned i; for (i = 0; i < gimple_call_num_args (stmt); i++) { tree arg = gimple_call_arg (stmt, i); if (EXPR_P (arg)) TREE_SET_BLOCK (arg, data->block); } if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP) { lower_builtin_setjmp (gsi); data->cannot_fallthru = false; data->calls_builtin_setjmp = true; return; } if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN)) { data->cannot_fallthru = true; gsi_next (gsi); return; } } break; case GIMPLE_OMP_PARALLEL: case GIMPLE_OMP_TASK: data->cannot_fallthru = false; lower_omp_directive (gsi, data); data->cannot_fallthru = false; return; case GIMPLE_TRANSACTION: lower_sequence (gimple_transaction_body_ptr (stmt), data); break; default: gcc_unreachable (); } data->cannot_fallthru = false; gsi_next (gsi); }
static unsigned int tree_nrv (void) { tree result = DECL_RESULT (current_function_decl); tree result_type = TREE_TYPE (result); tree found = NULL; basic_block bb; gimple_stmt_iterator gsi; struct nrv_data data; /* If this function does not return an aggregate type in memory, then there is nothing to do. */ if (!aggregate_value_p (result, current_function_decl)) return 0; /* If a GIMPLE type is returned in memory, finalize_nrv_r might create non-GIMPLE. */ if (is_gimple_reg_type (result_type)) return 0; /* Look through each block for assignments to the RESULT_DECL. */ FOR_EACH_BB (bb) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple stmt = gsi_stmt (gsi); tree ret_val; if (gimple_code (stmt) == GIMPLE_RETURN) { /* In a function with an aggregate return value, the gimplifier has changed all non-empty RETURN_EXPRs to return the RESULT_DECL. */ ret_val = gimple_return_retval (stmt); if (ret_val) gcc_assert (ret_val == result); } else if (is_gimple_assign (stmt) && gimple_assign_lhs (stmt) == result) { tree rhs; if (!gimple_assign_copy_p (stmt)) return 0; rhs = gimple_assign_rhs1 (stmt); /* Now verify that this return statement uses the same value as any previously encountered return statement. */ if (found != NULL) { /* If we found a return statement using a different variable than previous return statements, then we can not perform NRV optimizations. */ if (found != rhs) return 0; } else found = rhs; /* The returned value must be a local automatic variable of the same type and alignment as the function's result. */ if (TREE_CODE (found) != VAR_DECL || TREE_THIS_VOLATILE (found) || DECL_CONTEXT (found) != current_function_decl || TREE_STATIC (found) || TREE_ADDRESSABLE (found) || DECL_ALIGN (found) > DECL_ALIGN (result) || !useless_type_conversion_p (result_type, TREE_TYPE (found))) return 0; } else if (is_gimple_assign (stmt)) { tree addr = get_base_address (gimple_assign_lhs (stmt)); /* If there's any MODIFY of component of RESULT, then bail out. */ if (addr && addr == result) return 0; } } } if (!found) return 0; /* If dumping details, then note once and only the NRV replacement. */ if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "NRV Replaced: "); print_generic_expr (dump_file, found, dump_flags); fprintf (dump_file, " with: "); print_generic_expr (dump_file, result, dump_flags); fprintf (dump_file, "\n"); } /* At this point we know that all the return statements return the same local which has suitable attributes for NRV. Copy debugging information from FOUND to RESULT. */ DECL_NAME (result) = DECL_NAME (found); DECL_SOURCE_LOCATION (result) = DECL_SOURCE_LOCATION (found); DECL_ABSTRACT_ORIGIN (result) = DECL_ABSTRACT_ORIGIN (found); TREE_ADDRESSABLE (result) = TREE_ADDRESSABLE (found); /* Now walk through the function changing all references to VAR to be RESULT. */ data.var = found; data.result = result; FOR_EACH_BB (bb) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); ) { gimple stmt = gsi_stmt (gsi); /* If this is a copy from VAR to RESULT, remove it. */ if (gimple_assign_copy_p (stmt) && gimple_assign_lhs (stmt) == result && gimple_assign_rhs1 (stmt) == found) gsi_remove (&gsi, true); else { struct walk_stmt_info wi; memset (&wi, 0, sizeof (wi)); wi.info = &data; walk_gimple_op (stmt, finalize_nrv_r, &wi); gsi_next (&gsi); } } } /* FOUND is no longer used. Ensure it gets removed. */ var_ann (found)->used = 0; return 0; }