static jl_value_t *copy_ast(jl_value_t *expr, jl_tuple_t *sp, int do_sp) { if (jl_is_symbol(expr)) { if (!do_sp) return expr; // pre-evaluate certain static parameters to help type inference for(int i=0; i < jl_tuple_len(sp); i+=2) { assert(jl_is_typevar(jl_tupleref(sp,i))); if ((jl_sym_t*)expr == ((jl_tvar_t*)jl_tupleref(sp,i))->name) { jl_value_t *spval = jl_tupleref(sp,i+1); if (jl_is_long(spval)) return spval; } } } else if (jl_is_lambda_info(expr)) { jl_lambda_info_t *li = (jl_lambda_info_t*)expr; /* if (sp == jl_null && li->ast && jl_lam_capt((jl_expr_t*)li->ast)->length == 0) return expr; */ // TODO: avoid if above condition is true and decls have already // been evaluated. JL_GC_PUSH(&li); li = jl_add_static_parameters(li, sp); li->ast = jl_prepare_ast(li, li->sparams); JL_GC_POP(); return (jl_value_t*)li; } else if (jl_typeis(expr,jl_array_any_type)) { jl_array_t *a = (jl_array_t*)expr; jl_array_t *na = jl_alloc_cell_1d(jl_array_len(a)); JL_GC_PUSH(&na); size_t i; for(i=0; i < jl_array_len(a); i++) jl_cellset(na, i, copy_ast(jl_cellref(a,i), sp, do_sp)); JL_GC_POP(); return (jl_value_t*)na; } else if (jl_is_expr(expr)) { jl_expr_t *e = (jl_expr_t*)expr; jl_expr_t *ne = jl_exprn(e->head, jl_array_len(e->args)); JL_GC_PUSH(&ne); size_t i; if (e->head == lambda_sym) { jl_exprarg(ne, 0) = copy_ast(jl_exprarg(e,0), sp, 0); jl_exprarg(ne, 1) = copy_ast(jl_exprarg(e,1), sp, 0); jl_exprarg(ne, 2) = copy_ast(jl_exprarg(e,2), sp, 1); } else { for(i=0; i < jl_array_len(e->args); i++) jl_exprarg(ne, i) = copy_ast(jl_exprarg(e,i), sp, 1); } JL_GC_POP(); return (jl_value_t*)ne; } return expr; }
// given a new lambda_info with static parameter values, make a copy // of the tree with declared types evaluated and static parameters passed // on to all enclosed functions. // this tree can then be further mutated by optimization passes. DLLEXPORT jl_value_t *jl_prepare_ast(jl_lambda_info_t *li, jl_tuple_t *sparams) { jl_tuple_t *spenv = NULL; jl_value_t *ast = li->ast; if (ast == NULL) return NULL; JL_GC_PUSH(&spenv, &ast); spenv = jl_tuple_tvars_to_symbols(sparams); if (!jl_is_expr(ast)) { ast = jl_uncompress_ast(li, ast); ast = dont_copy_ast(ast, sparams, 1); } else { ast = copy_ast(ast, sparams, 1); } jl_module_t *last_m = jl_current_module; JL_TRY { jl_current_module = li->module; eval_decl_types(jl_lam_vinfo((jl_expr_t*)ast), spenv); eval_decl_types(jl_lam_capt((jl_expr_t*)ast), spenv); } JL_CATCH { jl_current_module = last_m; jl_rethrow(); } jl_current_module = last_m; JL_GC_POP(); return ast; }
errval_t set_record(struct ast_object* ast, uint64_t mode, struct oct_query_state* sqs) { assert(ast != NULL); assert(sqs != NULL); assert(mode == 0); for (size_t i = 0; i < MAX_RECORDS; i++) { struct record* entry = &record_storage[i]; if (entry->name == NULL) { continue; } OCT_DEBUG("found record: %s\n", entry->name); if (strcmp(RECORD_NAME(ast), entry->name) == 0) { assert(entry->record == NULL); copy_ast(&entry->record, ast); wakeup_clients(entry); return SYS_ERR_OK; } } for (size_t i = 0; i < MAX_RECORDS; i++) { struct record* entry = &record_storage[i]; if (entry->name == NULL) { entry->name = strdup(RECORD_NAME(ast)); copy_ast(&entry->record, ast); assert(entry->waiting_parties == NULL); return SYS_ERR_OK; } } assert(!"No more storage space!"); return OCT_ERR_NO_RECORD; }
// given a new lambda_info with static parameter values, make a copy // of the tree with declared types evaluated and static parameters passed // on to all enclosed functions. // this tree can then be further mutated by optimization passes. DLLEXPORT jl_value_t *jl_prepare_ast(jl_lambda_info_t *li, jl_tuple_t *sparams) { jl_tuple_t *spenv = NULL; jl_value_t *l_ast = li->ast; if (l_ast == NULL) return NULL; jl_value_t *ast = l_ast; JL_GC_PUSH(&spenv, &ast); if (jl_is_tuple(ast)) ast = jl_uncompress_ast((jl_tuple_t*)ast); spenv = jl_tuple_tvars_to_symbols(sparams); ast = copy_ast(ast, sparams); eval_decl_types(jl_lam_vinfo((jl_expr_t*)ast), spenv); eval_decl_types(jl_lam_capt((jl_expr_t*)ast), spenv); JL_GC_POP(); return ast; }
static jl_value_t *dont_copy_ast(jl_value_t *expr, jl_tuple_t *sp, int do_sp) { if (jl_is_symbol(expr) || jl_is_lambda_info(expr)) { return copy_ast(expr, sp, do_sp); } else if (jl_is_expr(expr)) { jl_expr_t *e = (jl_expr_t*)expr; if (e->head == lambda_sym) { jl_exprarg(e, 0) = dont_copy_ast(jl_exprarg(e,0), sp, 0); jl_exprarg(e, 1) = dont_copy_ast(jl_exprarg(e,1), sp, 0); jl_exprarg(e, 2) = dont_copy_ast(jl_exprarg(e,2), sp, 1); } else { for(size_t i=0; i < jl_array_len(e->args); i++) jl_exprarg(e, i) = dont_copy_ast(jl_exprarg(e,i), sp, 1); } return (jl_value_t*)e; } return expr; }
static void copy_ast(struct ast_object** copy, struct ast_object* ast) { if (ast == NULL) { return; } *copy = malloc(sizeof(struct ast_object)); memcpy(*copy, ast, sizeof(struct ast_object)); switch (ast->type) { case nodeType_Object: copy_ast(&(*copy)->u.on.name, ast->u.on.name); copy_ast(&(*copy)->u.on.attrs, ast->u.on.attrs); break; case nodeType_Attribute: copy_ast(&(*copy)->u.an.attr, ast->u.an.attr); copy_ast(&(*copy)->u.an.next, ast->u.an.next); break; case nodeType_Pair: copy_ast(&(*copy)->u.pn.left, ast->u.pn.left); copy_ast(&(*copy)->u.pn.right, ast->u.pn.right); break; case nodeType_Ident: (*copy)->u.in.str = strdup(ast->u.in.str); break; case nodeType_String: (*copy)->u.sn.str = strdup(ast->u.sn.str); break; case nodeType_Constant: // Nothing to copy break; default: OCT_DEBUG("node is: %d\n", ast->type); assert(!"Unsupported Node!"); break; } }
static jl_value_t *copy_ast(jl_value_t *expr, jl_tuple_t *sp, int do_sp) { if (jl_is_symbol(expr)) { if (!do_sp) return expr; // pre-evaluate certain static parameters to help type inference for(int i=0; i < jl_tuple_len(sp); i+=2) { assert(jl_is_typevar(jl_tupleref(sp,i))); if ((jl_sym_t*)expr == ((jl_tvar_t*)jl_tupleref(sp,i))->name) { jl_value_t *spval = jl_tupleref(sp,i+1); if (jl_is_long(spval)) return spval; } } } else if (jl_is_lambda_info(expr)) { jl_lambda_info_t *li = (jl_lambda_info_t*)expr; /* if (sp == jl_null && li->ast && jl_array_len(jl_lam_capt((jl_expr_t*)li->ast)) == 0) return expr; */ // TODO: avoid if above condition is true and decls have already // been evaluated. JL_GC_PUSH1(&li); li = jl_add_static_parameters(li, sp); // inner lambda does not need the "def" link. it leads to excess object // retention, for example pointing to the original uncompressed AST // of a top-level thunk that gets type inferred. li->def = li; li->ast = jl_prepare_ast(li, li->sparams); JL_GC_POP(); return (jl_value_t*)li; } else if (jl_typeis(expr,jl_array_any_type)) { jl_array_t *a = (jl_array_t*)expr; jl_array_t *na = jl_alloc_cell_1d(jl_array_len(a)); JL_GC_PUSH1(&na); size_t i; for(i=0; i < jl_array_len(a); i++) jl_cellset(na, i, copy_ast(jl_cellref(a,i), sp, do_sp)); JL_GC_POP(); return (jl_value_t*)na; } else if (jl_is_expr(expr)) { jl_expr_t *e = (jl_expr_t*)expr; jl_expr_t *ne = jl_exprn(e->head, jl_array_len(e->args)); JL_GC_PUSH1(&ne); if (e->head == lambda_sym) { jl_exprarg(ne, 0) = copy_ast(jl_exprarg(e,0), sp, 0); jl_exprarg(ne, 1) = copy_ast(jl_exprarg(e,1), sp, 0); jl_exprarg(ne, 2) = copy_ast(jl_exprarg(e,2), sp, 1); } else { for(size_t i=0; i < jl_array_len(e->args); i++) jl_exprarg(ne, i) = copy_ast(jl_exprarg(e,i), sp, 1); } JL_GC_POP(); return (jl_value_t*)ne; } return expr; }