int pc_export_switch_2(void) { BPLONG sw,sw_ins_ids,sw_ins_id; SW_INS_PTR *curr_ins_ptr; sw = bpx_get_integer(bpx_get_call_arg(1,2)); sw_ins_ids = bpx_get_call_arg(2,2); if (sw >= max_sw_tab_size) expand_switch_table(sw + 1); if (sw >= sw_tab_size) sw_tab_size = sw + 1; curr_ins_ptr = &switches[sw]; while (bpx_is_list(sw_ins_ids)) { sw_ins_id = bpx_get_integer(bpx_get_car(sw_ins_ids)); sw_ins_ids = bpx_get_cdr(sw_ins_ids); if (sw_ins_id >= max_sw_ins_tab_size) expand_switch_instance_table(sw_ins_id + 1); if (sw_ins_id >= sw_ins_tab_size) sw_ins_tab_size = sw_ins_id + 1; switch_instances[sw_ins_id] = alloc_switch_instance(); switch_instances[sw_ins_id]->id = sw_ins_id; *curr_ins_ptr = switch_instances[sw_ins_id]; curr_ins_ptr = &switch_instances[sw_ins_id]->next; } *curr_ins_ptr = NULL; return BP_TRUE; }
int pc_crf_prepare_4(void) { TERM p_fact_list; int size; p_fact_list = bpx_get_call_arg(1,4); size = bpx_get_integer(bpx_get_call_arg(2,4)); num_goals = bpx_get_integer(bpx_get_call_arg(3,4)); failure_root_index = bpx_get_integer(bpx_get_call_arg(4,4)); failure_observed = (failure_root_index != -1); if (failure_root_index != -1) { failure_subgoal_id = prism_goal_id_get(failure_atom); if (failure_subgoal_id == -1) { emit_internal_error("no subgoal ID allocated to `failure'"); RET_INTERNAL_ERR; } } initialize_egraph_index(); alloc_sorted_egraph(size); RET_ON_ERR(sort_crf_egraphs(p_fact_list)); #ifndef MPI if (verb_graph) { print_egraph(0, PRINT_NEUTRAL); } #endif /* !(MPI) */ alloc_occ_switches(); alloc_num_sw_vals(); return BP_TRUE; }
int pc_compute_inside_2(void) { int gid; double prob; EG_NODE_PTR eg_ptr; gid = bpx_get_integer(bpx_get_call_arg(1,2)); initialize_egraph_index(); alloc_sorted_egraph(1); RET_ON_ERR(sort_one_egraph(gid, 0, 1)); if (verb_graph) { print_egraph(0, PRINT_NEUTRAL); } eg_ptr = expl_graph[gid]; if (log_scale) { RET_ON_ERR(compute_inside_scaling_log_exp()); prob = eg_ptr->inside; } else { RET_ON_ERR(compute_inside_scaling_none()); prob = eg_ptr->inside; } return bpx_unify(bpx_get_call_arg(2,2), bpx_build_float(prob)); }
/* * Note: parameters are always refreshed in advance by $pc_export_sw_info/1, * so it causes no problem to overwrite them temporarily */ int pc_compute_n_viterbi_rerank_4(void) { TERM p_n_viterbi_list; int n,l,goal_id; n = bpx_get_integer(bpx_get_call_arg(1,4)); l = bpx_get_integer(bpx_get_call_arg(2,4)); goal_id = bpx_get_integer(bpx_get_call_arg(3,4)); initialize_egraph_index(); alloc_sorted_egraph(1); /* INIT_MIN_MAX_NODE_NOS; */ RET_ON_ERR(sort_one_egraph(goal_id,0,1)); if (verb_graph) print_egraph(0,PRINT_NEUTRAL); alloc_occ_switches(); transfer_hyperparams_prolog(); get_param_means(); compute_n_max(l); get_n_most_likely_path_rerank(n,l,goal_id,&p_n_viterbi_list); release_occ_switches(); return bpx_unify(bpx_get_call_arg(4,4),p_n_viterbi_list); }
/* [Note] node copying is not required here even in computation without * inter-goal sharing, but we need to declare it explicitly. */ int pc_compute_viterbi_5(void) { TERM p_goal_path,p_subpath_goal,p_subpath_sw; int goal_id; double viterbi_prob; goal_id = bpx_get_integer(bpx_get_call_arg(1,5)); initialize_egraph_index(); alloc_sorted_egraph(1); /* INIT_MIN_MAX_NODE_NOS; */ RET_ON_ERR(sort_one_egraph(goal_id,0,1)); if (verb_graph) print_egraph(0,PRINT_NEUTRAL); compute_max(); if (debug_level) print_egraph(1,PRINT_VITERBI); get_most_likely_path(goal_id,&p_goal_path,&p_subpath_goal, &p_subpath_sw,&viterbi_prob); return bpx_unify(bpx_get_call_arg(2,5), p_goal_path) && bpx_unify(bpx_get_call_arg(3,5), p_subpath_goal) && bpx_unify(bpx_get_call_arg(4,5), p_subpath_sw) && bpx_unify(bpx_get_call_arg(5,5), bpx_build_float(viterbi_prob)); }
int pc_get_snode_feature_3(void) { int idx = bpx_get_integer(bpx_get_call_arg(1,3)); double val = switch_instances[idx]->inside_h; double lambda = switch_instances[idx]->inside; return bpx_unify(bpx_get_call_arg(2,3),bpx_build_float(val)) && bpx_unify(bpx_get_call_arg(3,3),bpx_build_float(lambda)); }
int pc_get_snode_inside_2(void) { int idx = bpx_get_integer(bpx_get_call_arg(1,2)); double val = switch_instances[idx]->inside; if (log_scale) val = log(val); return bpx_unify(bpx_get_call_arg(2,2),bpx_build_float(val)); }
int pc_prism_grd_2(void) { struct CRF_Engine crf_eng; RET_ON_ERR(run_grd(&crf_eng)); return bpx_unify(bpx_get_call_arg(1,2), bpx_build_integer(crf_eng.iterate)) && bpx_unify(bpx_get_call_arg(2,2), bpx_build_float(crf_eng.likelihood)); }
int pc_prism_vbvt_2(void) { struct VBVT_Engine vbvt_eng; RET_ON_ERR(check_smooth_vb()); RET_ON_ERR(run_vbvt(&vbvt_eng)); return bpx_unify(bpx_get_call_arg(1,2), bpx_build_integer(vbvt_eng.iterate)) && bpx_unify(bpx_get_call_arg(2,2), bpx_build_float(vbvt_eng.free_energy)); }
int pc_prism_vbem_2(void) { struct VBEM_Engine vb_eng; RET_ON_ERR(check_smooth_vb()); RET_ON_ERR(run_vbem(&vb_eng)); release_num_sw_vals(); return bpx_unify(bpx_get_call_arg(1,2), bpx_build_integer(vb_eng.iterate)) && bpx_unify(bpx_get_call_arg(2,2), bpx_build_float(vb_eng.free_energy)); }
int pc_prism_vt_4(void) { struct VT_Engine vt_eng; RET_ON_ERR(check_smooth(&vt_eng.smooth)); RET_ON_ERR(run_vt(&vt_eng)); return bpx_unify(bpx_get_call_arg(1,4), bpx_build_integer(vt_eng.iterate )) && bpx_unify(bpx_get_call_arg(2,4), bpx_build_float (vt_eng.lambda )) && bpx_unify(bpx_get_call_arg(3,4), bpx_build_float (vt_eng.likelihood)) && bpx_unify(bpx_get_call_arg(4,4), bpx_build_integer(vt_eng.smooth )) ; }
int pc_import_graph_stats_4(void) { int stats[4]; double avg_shared; graph_stats(stats); avg_shared = (double)(stats[3]) / stats[0]; return bpx_unify(bpx_get_call_arg(1,4), bpx_build_integer(stats[0])) && bpx_unify(bpx_get_call_arg(2,4), bpx_build_integer(stats[1])) && bpx_unify(bpx_get_call_arg(3,4), bpx_build_integer(stats[2])) && bpx_unify(bpx_get_call_arg(4,4), bpx_build_float(avg_shared)); }
/* * Export probabilities of switches from Prolog to C. Switches is * a list of switches, each of which takes the form: * * sw(Id,InstanceIds,Probs,SmoothCs,Fixed,FixedH), * * where * Id: identifier of the switch * InstanceIds: list of ids of the instances of the switch * Probs: current probabilities assigned to the instance switches * SmoothCs: current pseudo counts assigned to the instance switches * Fixed: probabilities fixed? * FixedH: pseudo counts fixed? * * The structures for switch instances have been allocated. This * function only fills out the initial probabilities. */ int pc_export_sw_info_1(void) { int sw_id,instance_id,fixed,fixed_h; double prob,smooth; TERM p_switches, p_switch; TERM p_instance_list,p_prob_list,p_smooth_list; TERM p_prob,p_smooth; p_switches = bpx_get_call_arg(1,1); while (bpx_is_list(p_switches)) { /* p_switch: sw(Id,InstList,ProbList,SmoothCList,FixedP,FixedH) */ p_switch = bpx_get_car(p_switches); sw_id = bpx_get_integer(bpx_get_arg(1,p_switch)); p_instance_list = bpx_get_arg(2,p_switch); p_prob_list = bpx_get_arg(3,p_switch); p_smooth_list = bpx_get_arg(4,p_switch); fixed = bpx_get_integer(bpx_get_arg(5,p_switch)); fixed_h = bpx_get_integer(bpx_get_arg(6,p_switch)); while (bpx_is_list(p_instance_list)) { instance_id = bpx_get_integer(bpx_get_car(p_instance_list)); p_prob = bpx_get_car(p_prob_list); p_smooth = bpx_get_car(p_smooth_list); if (bpx_is_integer(p_prob)) { prob = (double)bpx_get_integer(p_prob); } else if (bpx_is_float(p_prob)) { prob = bpx_get_float(p_prob); } else { RET_ERR(illegal_arguments); } if (bpx_is_integer(p_smooth)) { smooth = (double)bpx_get_integer(p_smooth); } else if (bpx_is_float(p_smooth)) { smooth = bpx_get_float(p_smooth); } else { RET_ERR(illegal_arguments); } switch_instances[instance_id]->inside = prob; switch_instances[instance_id]->fixed = fixed; switch_instances[instance_id]->fixed_h = fixed_h; switch_instances[instance_id]->smooth_prolog = smooth; p_instance_list = bpx_get_cdr(p_instance_list); p_prob_list = bpx_get_cdr(p_prob_list); p_smooth_list = bpx_get_cdr(p_smooth_list); } p_switches = bpx_get_cdr(p_switches); } return BP_TRUE; }
int pc_compute_fprobf_1(void) { int prmode; prmode = bpx_get_integer(bpx_get_call_arg(1,1)); failure_root_index = -1; initialize_weights(); /* [31 Mar 2008, by yuizumi] * compute_outside_scaling_*() needs to be called because * eg_ptr->outside computed by compute_expectation_scaling_*() * is different from the outside probability. */ if (log_scale) { RET_ON_ERR(compute_feature_scaling_log_exp()); if (prmode != 1) { RET_ON_ERR(compute_expectation_scaling_log_exp()); RET_ON_ERR(compute_outside_scaling_log_exp()); } } else { RET_ON_ERR(compute_feature_scaling_none()); if (prmode != 1) { RET_ON_ERR(compute_expectation_scaling_none()); RET_ON_ERR(compute_outside_scaling_none()); } } return BP_TRUE; }
int pc_add_egraph_path_3(void) { TERM p_node_id,p_children,p_sws; int node_id; /* children_prolog and sws_prolog must be in the table area */ p_node_id = bpx_get_call_arg(1,3); p_children = bpx_get_call_arg(2,3); p_sws = bpx_get_call_arg(3,3); if (!bpx_is_integer(p_node_id)) RET_ERR(err_invalid_goal_id); node_id = bpx_get_integer(p_node_id); XDEREF(p_children); XDEREF(p_sws); RET_ON_ERR(add_egraph_path(node_id,p_children,p_sws)); return BP_TRUE; }
/* * Sort the explanation graph such that no node sorted_expl_graph[i] calls * node sorted_expl_graph[j] if i < j. * * This function is used only for probf/1-2, so we don't have to consider * about scaling here. */ int pc_alloc_sort_egraph_1(void) { int root_id; root_id = bpx_get_integer(bpx_get_call_arg(1,1)); index_to_sort = 0; alloc_sorted_egraph(1); RET_ON_ERR(sort_one_egraph(root_id,0,1)); return BP_TRUE; }
int pc_compute_n_viterbi_3(void) { TERM p_n_viterbi_list; int n,goal_id; n = bpx_get_integer(bpx_get_call_arg(1,3)); goal_id = bpx_get_integer(bpx_get_call_arg(2,3)); initialize_egraph_index(); alloc_sorted_egraph(1); /* INIT_MIN_MAX_NODE_NOS; */ RET_ON_ERR(sort_one_egraph(goal_id,0,1)); if (verb_graph) print_egraph(0,PRINT_NEUTRAL); compute_n_max(n); if (debug_level) print_egraph(1,PRINT_VITERBI); get_n_most_likely_path(n,goal_id,&p_n_viterbi_list); return bpx_unify(bpx_get_call_arg(3,3),p_n_viterbi_list); }
int pc_prism_em_6(void) { struct EM_Engine em_eng; RET_ON_ERR(check_smooth(&em_eng.smooth)); RET_ON_ERR(run_em(&em_eng)); release_num_sw_vals(); return bpx_unify(bpx_get_call_arg(1,6), bpx_build_integer(em_eng.iterate )) && bpx_unify(bpx_get_call_arg(2,6), bpx_build_float (em_eng.lambda )) && bpx_unify(bpx_get_call_arg(3,6), bpx_build_float (em_eng.likelihood)) && bpx_unify(bpx_get_call_arg(4,6), bpx_build_float (em_eng.bic )) && bpx_unify(bpx_get_call_arg(5,6), bpx_build_float (em_eng.cs )) && bpx_unify(bpx_get_call_arg(6,6), bpx_build_integer(em_eng.smooth )) ; }
int pc_compute_probf_1(void) { EG_NODE_PTR eg_ptr; int prmode; prmode = bpx_get_integer(bpx_get_call_arg(1,1)); if (prmode == 3) { compute_max(); return BP_TRUE; } eg_ptr = expl_graph[roots[0]->id]; failure_root_index = -1; /* [31 Mar 2008, by yuizumi] * compute_outside_scaling_*() is needed to be called because * eg_ptr->outside computed by compute_expectation_scaling_*() * is different from the outside probability. */ if (log_scale) { RET_ON_ERR(compute_inside_scaling_log_exp()); if (prmode != 1) { RET_ON_ERR(compute_expectation_scaling_log_exp()); RET_ON_ERR(compute_outside_scaling_log_exp()); } } else { RET_ON_ERR(compute_inside_scaling_none()); if (prmode != 1) { RET_ON_ERR(compute_expectation_scaling_none()); RET_ON_ERR(compute_outside_scaling_none()); } } return BP_TRUE; }
int pc_import_occ_switches_3(void) { CACHE_REGS TERM p_sw_list,p_sw_list0,p_sw_list1; TERM p_sw_ins_list0,p_sw_ins_list1,sw,sw_ins; TERM p_num_sw, p_num_sw_ins; int i; int num_sw_ins; void release_occ_switches(); #ifdef __YAP_PROLOG__ TERM *hstart; restart: hstart = heap_top; #endif p_sw_list = bpx_get_call_arg(1,3); p_num_sw = bpx_get_call_arg(2,3); p_num_sw_ins = bpx_get_call_arg(3,3); p_sw_list0 = bpx_build_nil(); num_sw_ins = 0; for (i = 0; i < occ_switch_tab_size; i++) { SW_INS_PTR ptr; #ifdef __YAP_PROLOG__ if ( heap_top + 64*1024 >= local_top ) { H = hstart; /* running out of stack */ extern int Yap_gcl(UInt gc_lim, Int predarity, CELL *current_env, yamop *nextop); Yap_gcl(4*64*1024, 3, ENV, CP); goto restart; } #endif sw = bpx_build_structure("sw",2); bpx_unify(bpx_get_arg(1,sw), bpx_build_integer(i)); p_sw_ins_list0 = bpx_build_nil(); ptr = occ_switches[i]; while (ptr != NULL) { num_sw_ins++; if (ptr->inside <= 0.0) ptr->inside = 0.0; /* FIXME: quick hack */ sw_ins = bpx_build_structure("sw_ins",4); bpx_unify(bpx_get_arg(1,sw_ins),bpx_build_integer(ptr->id)); bpx_unify(bpx_get_arg(2,sw_ins),bpx_build_float(ptr->inside)); bpx_unify(bpx_get_arg(3,sw_ins),bpx_build_float(ptr->smooth)); bpx_unify(bpx_get_arg(4,sw_ins),bpx_build_float(ptr->total_expect)); p_sw_ins_list1 = bpx_build_list(); bpx_unify(bpx_get_car(p_sw_ins_list1),sw_ins); bpx_unify(bpx_get_cdr(p_sw_ins_list1),p_sw_ins_list0); p_sw_ins_list0 = p_sw_ins_list1; ptr = ptr->next; } bpx_unify(bpx_get_arg(2,sw),p_sw_ins_list0); p_sw_list1 = bpx_build_list(); bpx_unify(bpx_get_car(p_sw_list1),sw); bpx_unify(bpx_get_cdr(p_sw_list1),p_sw_list0); p_sw_list0 = p_sw_list1; } release_occ_switches(); return bpx_unify(p_sw_list, p_sw_list0) && bpx_unify(p_num_sw, bpx_build_integer(occ_switch_tab_size)) && bpx_unify(p_num_sw_ins, bpx_build_integer(num_sw_ins)); }
int pc_get_snode_expectation_2(void) { int idx = bpx_get_integer(bpx_get_call_arg(1,2)); return bpx_unify(bpx_get_call_arg(2,2), bpx_build_float(switch_instances[idx]->total_expect)); }
int pc_get_gnode_viterbi_2(void) { int idx = bpx_get_integer(bpx_get_call_arg(1,2)); return bpx_unify(bpx_get_call_arg(2,2), bpx_build_float(expl_graph[idx]->max)); }
int pc_import_sorted_graph_paths_2(void) { TERM paths0,paths1,glist,slist,t0,t1,p_tmp; EG_PATH_PTR path_ptr; EG_NODE_PTR *children; SW_INS_PTR *sws; int node_id,k,len; node_id = bpx_get_integer(bpx_get_call_arg(1,2)); path_ptr = sorted_expl_graph[node_id]->path_ptr; if (path_ptr == NULL) { if (explicit_empty_expls) { t0 = bpx_build_list(); t1 = bpx_build_list(); bpx_unify(bpx_get_car(t0),bpx_build_nil()); bpx_unify(bpx_get_cdr(t0),t1); bpx_unify(bpx_get_car(t1),bpx_build_nil()); bpx_unify(bpx_get_cdr(t1),bpx_build_nil()); paths0 = bpx_build_list(); bpx_unify(bpx_get_car(paths0),t0); bpx_unify(bpx_get_cdr(paths0),bpx_build_nil()); } else paths0 = bpx_build_nil(); } else { paths0 = bpx_build_nil(); while (path_ptr != NULL) { len = path_ptr->children_len; children = path_ptr->children; if (len > 0) { glist = bpx_build_list(); p_tmp = glist; for (k = 0; k < len; k++) { bpx_unify(bpx_get_car(p_tmp), bpx_build_integer(children[k]->id)); if (k == len - 1) { bpx_unify(bpx_get_cdr(p_tmp),bpx_build_nil()); } else { bpx_unify(bpx_get_cdr(p_tmp),bpx_build_list()); p_tmp = bpx_get_cdr(p_tmp); } } } else glist = bpx_build_nil(); len = path_ptr->sws_len; sws = path_ptr->sws; if (len > 0) { slist = bpx_build_list(); p_tmp = slist; for (k = 0; k < len; k++) { bpx_unify(bpx_get_car(p_tmp),bpx_build_integer(sws[k]->id)); if (k == len - 1) { bpx_unify(bpx_get_cdr(p_tmp),bpx_build_nil()); } else { bpx_unify(bpx_get_cdr(p_tmp),bpx_build_list()); p_tmp = bpx_get_cdr(p_tmp); } } } else slist = bpx_build_nil(); if (explicit_empty_expls || !bpx_is_nil(glist) || !bpx_is_nil(slist)) { t0 = bpx_build_list(); t1 = bpx_build_list(); bpx_unify(bpx_get_car(t0),glist); bpx_unify(bpx_get_cdr(t0),t1); bpx_unify(bpx_get_car(t1),slist); bpx_unify(bpx_get_cdr(t1),bpx_build_nil()); paths1 = bpx_build_list(); bpx_unify(bpx_get_car(paths1),t0); bpx_unify(bpx_get_cdr(paths1),paths0); paths0 = paths1; } path_ptr = path_ptr->next; } } return bpx_unify(bpx_get_call_arg(2,2),paths0); }
int pc_import_sorted_graph_gid_2(void) { int idx = bpx_get_integer(bpx_get_call_arg(1,2)); return bpx_unify(bpx_get_call_arg(2,2), bpx_build_integer(sorted_expl_graph[idx]->id)); }
int pc_import_sorted_graph_size_1(void) { return bpx_unify(bpx_get_call_arg(1,1), bpx_build_integer(sorted_egraph_size)); }
int pc_set_log_scale_1(void) { log_scale = bpx_get_integer(bpx_get_call_arg(1,1)); return BP_TRUE; }
int pc_set_explicit_empty_expls_1(void) { explicit_empty_expls = bpx_get_integer(bpx_get_call_arg(1,1)); return BP_TRUE; }
int pc_set_fix_init_order_1(void) { fix_init_order = bpx_get_integer(bpx_get_call_arg(1,1)); return BP_TRUE; }
int pc_set_init_method_1(void) { init_method = bpx_get_integer(bpx_get_call_arg(1,1)); return BP_TRUE; }
int pc_set_itemp_rate_1(void) { itemp_rate = bpx_get_float(bpx_get_call_arg(1,1)); return BP_TRUE; }