/* select the next thread in the ready list with top_prio */ static void select_next_thread(int top_prio) { list_node_t *pnode; /* otherwise, threads in a ready state, then run the highest * priority one. */ pnode = delete_front_list(&ready_list[top_prio]); if (is_empty_list(&ready_list[top_prio])) prio_exist_flag[top_prio] = false; next_thread = entry_list(pnode, thread_struct, node); next_thread->state = RUNNING; next_thread->time_quantum = TIME_QUANTUM; }
void start_curt() { cpu_sr_t cpu_sr; list_node_t *pnode; int top_prio; cpu_sr = save_cpu_sr(); is_start_os = true; /* examine the highest priority thread executed */ top_prio = get_top_prio(); pnode = delete_front_list(&ready_list[top_prio]); if (is_empty_list(&ready_list[top_prio])) prio_exist_flag[top_prio] = false; current_thread = entry_list(pnode, thread_struct, node); current_thread->state = RUNNING; restore_cpu_sr(cpu_sr); restore_context(); }
void sem_post(sem_struct *sem) { thread_struct *pthread; cpu_sr_t cpu_sr; cpu_sr = save_cpu_sr(); if (!is_empty_list(&sem->wait_list)) { pthread = entry_list(delete_front_list(&sem->wait_list), thread_struct, node); pthread->state = READY; insert_back_list(&ready_list[pthread->prio], &pthread->node); prio_exist_flag[pthread->prio] = true; restore_cpu_sr(cpu_sr); schedule(SCHED_THREAD_REQUEST); return; } sem->value++; restore_cpu_sr(cpu_sr); }
/** * @brief rountine for idle thread * @param data */ void idle_thread_func(void *data) { cpu_sr_t cpu_sr; thread_struct *pthread; list_node_t *pnode; while (1) { cpu_sr = save_cpu_sr(); /* check if there is any terminated thread to be recycled */ if (!is_empty_list(&termination_wait_list)) { pnode = delete_front_list(&termination_wait_list); pthread = entry_list(pnode, thread_struct, node); thread_table[pthread->tid] = NULL; total_thread_cnt--; } restore_cpu_sr(cpu_sr); /* always does this */ thread_yield(); } }