static void lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data) { bool cannot_fallthru; gimple stmt = gsi_stmt (*gsi); gimple_stmt_iterator i; /* We don't handle GIMPLE_TRY_FINALLY. */ gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH); lower_sequence (gimple_try_eval_ptr (stmt), data); cannot_fallthru = data->cannot_fallthru; i = gsi_start (*gimple_try_cleanup_ptr (stmt)); switch (gimple_code (gsi_stmt (i))) { case GIMPLE_CATCH: /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a catch expression and a body. The whole try/catch may fall through iff any of the catch bodies falls through. */ for (; !gsi_end_p (i); gsi_next (&i)) { data->cannot_fallthru = false; lower_sequence (gimple_catch_handler_ptr ( as_a <gcatch *> (gsi_stmt (i))), data); if (!data->cannot_fallthru) cannot_fallthru = false; } break; case GIMPLE_EH_FILTER: /* The exception filter expression only matters if there is an exception. If the exception does not match EH_FILTER_TYPES, we will execute EH_FILTER_FAILURE, and we will fall through if that falls through. If the exception does match EH_FILTER_TYPES, the stack unwinder will continue up the stack, so we will not fall through. We don't know whether we will throw an exception which matches EH_FILTER_TYPES or not, so we just ignore EH_FILTER_TYPES and assume that we might throw an exception which doesn't match. */ data->cannot_fallthru = false; lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data); if (!data->cannot_fallthru) cannot_fallthru = false; break; default: /* This case represents statements to be executed when an exception occurs. Those statements are implicitly followed by a GIMPLE_RESX to resume execution after the exception. So in this case the try/catch never falls through. */ data->cannot_fallthru = false; lower_sequence (gimple_try_cleanup_ptr (stmt), data); break; } data->cannot_fallthru = cannot_fallthru; gsi_next (gsi); }
static bool gimple_try_catch_may_fallthru (gtry *stmt) { gimple_stmt_iterator i; /* We don't handle GIMPLE_TRY_FINALLY. */ gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH); /* If the TRY block can fall through, the whole TRY_CATCH can fall through. */ if (gimple_seq_may_fallthru (gimple_try_eval (stmt))) return true; i = gsi_start (*gimple_try_cleanup_ptr (stmt)); switch (gimple_code (gsi_stmt (i))) { case GIMPLE_CATCH: /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a catch expression and a body. The whole try/catch may fall through iff any of the catch bodies falls through. */ for (; !gsi_end_p (i); gsi_next (&i)) { if (gimple_seq_may_fallthru (gimple_catch_handler ( as_a <gcatch *> (gsi_stmt (i))))) return true; } return false; case GIMPLE_EH_FILTER: /* The exception filter expression only matters if there is an exception. If the exception does not match EH_FILTER_TYPES, we will execute EH_FILTER_FAILURE, and we will fall through if that falls through. If the exception does match EH_FILTER_TYPES, the stack unwinder will continue up the stack, so we will not fall through. We don't know whether we will throw an exception which matches EH_FILTER_TYPES or not, so we just ignore EH_FILTER_TYPES and assume that we might throw an exception which doesn't match. */ return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i))); default: /* This case represents statements to be executed when an exception occurs. Those statements are implicitly followed by a GIMPLE_RESX to resume execution after the exception. So in this case the try/catch never falls through. */ return false; } }
tree walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt, walk_tree_fn callback_op, struct walk_stmt_info *wi) { gimple *ret; tree tree_ret; gimple *stmt = gsi_stmt (*gsi); if (wi) { wi->gsi = *gsi; wi->removed_stmt = false; if (wi->want_locations && gimple_has_location (stmt)) input_location = gimple_location (stmt); } ret = NULL; /* Invoke the statement callback. Return if the callback handled all of STMT operands by itself. */ if (callback_stmt) { bool handled_ops = false; tree_ret = callback_stmt (gsi, &handled_ops, wi); if (handled_ops) return tree_ret; /* If CALLBACK_STMT did not handle operands, it should not have a value to return. */ gcc_assert (tree_ret == NULL); if (wi && wi->removed_stmt) return NULL; /* Re-read stmt in case the callback changed it. */ stmt = gsi_stmt (*gsi); } /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */ if (callback_op) { tree_ret = walk_gimple_op (stmt, callback_op, wi); if (tree_ret) return tree_ret; } /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */ switch (gimple_code (stmt)) { case GIMPLE_BIND: ret = walk_gimple_seq_mod (gimple_bind_body_ptr (as_a <gbind *> (stmt)), callback_stmt, callback_op, wi); if (ret) return wi->callback_result; break; case GIMPLE_CATCH: ret = walk_gimple_seq_mod (gimple_catch_handler_ptr ( as_a <gcatch *> (stmt)), callback_stmt, callback_op, wi); if (ret) return wi->callback_result; break; case GIMPLE_EH_FILTER: ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt, callback_op, wi); if (ret) return wi->callback_result; break; case GIMPLE_EH_ELSE: { geh_else *eh_else_stmt = as_a <geh_else *> (stmt); ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (eh_else_stmt), callback_stmt, callback_op, wi); if (ret) return wi->callback_result; ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (eh_else_stmt), callback_stmt, callback_op, wi); if (ret) return wi->callback_result; } break; case GIMPLE_TRY: ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op, wi); if (ret) return wi->callback_result; ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt, callback_op, wi); if (ret) return wi->callback_result; break; case GIMPLE_OMP_FOR: ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt, callback_op, wi); if (ret) return wi->callback_result; /* FALL THROUGH. */ case GIMPLE_OMP_CRITICAL: case GIMPLE_OMP_MASTER: case GIMPLE_OMP_TASKGROUP: case GIMPLE_OMP_ORDERED: case GIMPLE_OMP_SECTION: case GIMPLE_OMP_PARALLEL: case GIMPLE_OMP_TASK: case GIMPLE_OMP_SECTIONS: case GIMPLE_OMP_SINGLE: case GIMPLE_OMP_TARGET: case GIMPLE_OMP_TEAMS: ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt, callback_op, wi); if (ret) return wi->callback_result; break; case GIMPLE_WITH_CLEANUP_EXPR: ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt, callback_op, wi); if (ret) return wi->callback_result; break; case GIMPLE_TRANSACTION: ret = walk_gimple_seq_mod (gimple_transaction_body_ptr ( as_a <gtransaction *> (stmt)), callback_stmt, callback_op, wi); if (ret) return wi->callback_result; break; default: gcc_assert (!gimple_has_substatements (stmt)); break; } return NULL; }
static void lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data) { gimple stmt = gsi_stmt (*gsi); gimple_set_block (stmt, data->block); switch (gimple_code (stmt)) { case GIMPLE_BIND: lower_gimple_bind (gsi, data); /* Propagate fallthruness. */ return; case GIMPLE_COND: case GIMPLE_GOTO: case GIMPLE_SWITCH: data->cannot_fallthru = true; gsi_next (gsi); return; case GIMPLE_RETURN: if (data->cannot_fallthru) { gsi_remove (gsi, false); /* Propagate fallthruness. */ } else { lower_gimple_return (gsi, data); data->cannot_fallthru = true; } return; case GIMPLE_TRY: if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH) lower_try_catch (gsi, data); else { /* It must be a GIMPLE_TRY_FINALLY. */ bool cannot_fallthru; lower_sequence (gimple_try_eval_ptr (stmt), data); cannot_fallthru = data->cannot_fallthru; /* The finally clause is always executed after the try clause, so if it does not fall through, then the try-finally will not fall through. Otherwise, if the try clause does not fall through, then when the finally clause falls through it will resume execution wherever the try clause was going. So the whole try-finally will only fall through if both the try clause and the finally clause fall through. */ data->cannot_fallthru = false; lower_sequence (gimple_try_cleanup_ptr (stmt), data); data->cannot_fallthru |= cannot_fallthru; gsi_next (gsi); } return; case GIMPLE_EH_ELSE: lower_sequence (gimple_eh_else_n_body_ptr (stmt), data); lower_sequence (gimple_eh_else_e_body_ptr (stmt), data); break; case GIMPLE_NOP: case GIMPLE_ASM: case GIMPLE_ASSIGN: case GIMPLE_PREDICT: case GIMPLE_LABEL: case GIMPLE_EH_MUST_NOT_THROW: case GIMPLE_OMP_FOR: case GIMPLE_OMP_SECTIONS: case GIMPLE_OMP_SECTIONS_SWITCH: case GIMPLE_OMP_SECTION: case GIMPLE_OMP_SINGLE: case GIMPLE_OMP_MASTER: case GIMPLE_OMP_ORDERED: case GIMPLE_OMP_CRITICAL: case GIMPLE_OMP_RETURN: case GIMPLE_OMP_ATOMIC_LOAD: case GIMPLE_OMP_ATOMIC_STORE: case GIMPLE_OMP_CONTINUE: break; case GIMPLE_CALL: { tree decl = gimple_call_fndecl (stmt); unsigned i; for (i = 0; i < gimple_call_num_args (stmt); i++) { tree arg = gimple_call_arg (stmt, i); if (EXPR_P (arg)) TREE_SET_BLOCK (arg, data->block); } if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP) { lower_builtin_setjmp (gsi); data->cannot_fallthru = false; data->calls_builtin_setjmp = true; return; } if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN)) { data->cannot_fallthru = true; gsi_next (gsi); return; } } break; case GIMPLE_OMP_PARALLEL: case GIMPLE_OMP_TASK: data->cannot_fallthru = false; lower_omp_directive (gsi, data); data->cannot_fallthru = false; return; case GIMPLE_TRANSACTION: lower_sequence (gimple_transaction_body_ptr (stmt), data); break; default: gcc_unreachable (); } data->cannot_fallthru = false; gsi_next (gsi); }
static void lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data) { gimple *stmt = gsi_stmt (*gsi); gimple_set_block (stmt, data->block); switch (gimple_code (stmt)) { case GIMPLE_BIND: lower_gimple_bind (gsi, data); /* Propagate fallthruness. */ return; case GIMPLE_COND: case GIMPLE_GOTO: case GIMPLE_SWITCH: data->cannot_fallthru = true; gsi_next (gsi); return; case GIMPLE_RETURN: if (data->cannot_fallthru) { gsi_remove (gsi, false); /* Propagate fallthruness. */ } else { lower_gimple_return (gsi, data); data->cannot_fallthru = true; } return; case GIMPLE_TRY: if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH) lower_try_catch (gsi, data); else { /* It must be a GIMPLE_TRY_FINALLY. */ bool cannot_fallthru; lower_sequence (gimple_try_eval_ptr (stmt), data); cannot_fallthru = data->cannot_fallthru; /* The finally clause is always executed after the try clause, so if it does not fall through, then the try-finally will not fall through. Otherwise, if the try clause does not fall through, then when the finally clause falls through it will resume execution wherever the try clause was going. So the whole try-finally will only fall through if both the try clause and the finally clause fall through. */ data->cannot_fallthru = false; lower_sequence (gimple_try_cleanup_ptr (stmt), data); data->cannot_fallthru |= cannot_fallthru; gsi_next (gsi); } return; case GIMPLE_EH_ELSE: { geh_else *eh_else_stmt = as_a <geh_else *> (stmt); lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data); lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data); } break; case GIMPLE_DEBUG: gcc_checking_assert (cfun->debug_nonbind_markers); /* We can't possibly have debug bind stmts before lowering, we first emit them when entering SSA. */ gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt)); /* Propagate fallthruness. */ /* If the function (e.g. from PCH) had debug stmts, but they're disabled for this compilation, remove them. */ if (!MAY_HAVE_DEBUG_MARKER_STMTS) gsi_remove (gsi, true); else gsi_next (gsi); return; case GIMPLE_NOP: case GIMPLE_ASM: case GIMPLE_ASSIGN: case GIMPLE_PREDICT: case GIMPLE_LABEL: case GIMPLE_EH_MUST_NOT_THROW: case GIMPLE_OMP_FOR: case GIMPLE_OMP_SECTIONS: case GIMPLE_OMP_SECTIONS_SWITCH: case GIMPLE_OMP_SECTION: case GIMPLE_OMP_SINGLE: case GIMPLE_OMP_MASTER: case GIMPLE_OMP_TASKGROUP: case GIMPLE_OMP_ORDERED: case GIMPLE_OMP_CRITICAL: case GIMPLE_OMP_RETURN: case GIMPLE_OMP_ATOMIC_LOAD: case GIMPLE_OMP_ATOMIC_STORE: case GIMPLE_OMP_CONTINUE: break; case GIMPLE_CALL: { tree decl = gimple_call_fndecl (stmt); unsigned i; for (i = 0; i < gimple_call_num_args (stmt); i++) { tree arg = gimple_call_arg (stmt, i); if (EXPR_P (arg)) TREE_SET_BLOCK (arg, data->block); } if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL) { if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP) { lower_builtin_setjmp (gsi); data->cannot_fallthru = false; return; } else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN && flag_tree_bit_ccp && gimple_builtin_call_types_compatible_p (stmt, decl)) { lower_builtin_posix_memalign (gsi); return; } } if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN)) { data->cannot_fallthru = true; gsi_next (gsi); return; } } break; case GIMPLE_OMP_PARALLEL: case GIMPLE_OMP_TASK: case GIMPLE_OMP_TARGET: case GIMPLE_OMP_TEAMS: case GIMPLE_OMP_GRID_BODY: data->cannot_fallthru = false; lower_omp_directive (gsi, data); data->cannot_fallthru = false; return; case GIMPLE_TRANSACTION: lower_sequence (gimple_transaction_body_ptr ( as_a <gtransaction *> (stmt)), data); break; default: gcc_unreachable (); } data->cannot_fallthru = false; gsi_next (gsi); }