static void act_set_ast( thread_t thread, ast_t ast) { spl_t s = splsched(); if (thread == current_thread()) { thread_ast_set(thread, ast); ast_propagate(thread->ast); } else { processor_t processor; thread_lock(thread); thread_ast_set(thread, ast); processor = thread->last_processor; if ( processor != PROCESSOR_NULL && processor->state == PROCESSOR_RUNNING && processor->active_thread == thread ) cause_ast_check(processor); thread_unlock(thread); } splx(s); }
/* * install_special_handler_locked: * * Do the work of installing the special_handler. * * Called with the thread mutex and scheduling lock held. */ void install_special_handler_locked( thread_t thread) { /* * Temporarily undepress, so target has * a chance to do locking required to * block itself in special_handler(). */ if (thread->sched_flags & TH_SFLAG_DEPRESSED_MASK) thread_recompute_sched_pri(thread, TRUE); thread_ast_set(thread, AST_APC); if (thread == current_thread()) ast_propagate(thread->ast); else { processor_t processor = thread->last_processor; if ( processor != PROCESSOR_NULL && processor->state == PROCESSOR_RUNNING && processor->active_thread == thread ) cause_ast_check(processor); } }
/* * install_special_handler_locked: * * Do the work of installing the special_handler. * * Called with the thread mutex and scheduling lock held. */ void install_special_handler_locked( thread_t thread) { ReturnHandler **rh; /* The work handler must always be the last ReturnHandler on the list, because it can do tricky things like detach the thr_act. */ for (rh = &thread->handlers; *rh; rh = &(*rh)->next) continue; if (rh != &thread->special_handler.next) *rh = &thread->special_handler; /* * Temporarily undepress, so target has * a chance to do locking required to * block itself in special_handler(). */ if (thread->sched_flags & TH_SFLAG_DEPRESSED_MASK) SCHED(compute_priority)(thread, TRUE); thread_ast_set(thread, AST_APC); if (thread == current_thread()) ast_propagate(thread->ast); else { processor_t processor = thread->last_processor; if ( processor != PROCESSOR_NULL && processor->state == PROCESSOR_RUNNING && processor->active_thread == thread ) cause_ast_check(processor); } }
/* * thread_set_apc_ast_locked: * * Do the work of registering for the AST_APC callback. * * Called with the thread mutex and scheduling lock held. */ static void thread_set_apc_ast_locked(thread_t thread) { /* * Temporarily undepress, so target has * a chance to do locking required to * block itself in thread_suspended. * * Leaves the depress flag set so we can reinstate when it's blocked. */ if (thread->sched_flags & TH_SFLAG_DEPRESSED_MASK) thread_recompute_sched_pri(thread, TRUE); thread_ast_set(thread, AST_APC); if (thread == current_thread()) { ast_propagate(thread); } else { processor_t processor = thread->last_processor; if (processor != PROCESSOR_NULL && processor->state == PROCESSOR_RUNNING && processor->active_thread == thread) { cause_ast_check(processor); } } }