static void make_dispatch(compile_t* c, gentype_t* g) { // Do nothing if we're not an actor. if(g->underlying != TK_ACTOR) return; // Create a dispatch function. const char* dispatch_name = genname_dispatch(g->type_name); g->dispatch_fn = codegen_addfun(c, dispatch_name, c->dispatch_type); LLVMSetFunctionCallConv(g->dispatch_fn, LLVMCCallConv); codegen_startfun(c, g->dispatch_fn, false); LLVMBasicBlockRef unreachable = codegen_block(c, "unreachable"); // Read the message ID. LLVMValueRef msg = LLVMGetParam(g->dispatch_fn, 2); LLVMValueRef id_ptr = LLVMBuildStructGEP(c->builder, msg, 1, ""); LLVMValueRef id = LLVMBuildLoad(c->builder, id_ptr, "id"); // Store a reference to the dispatch switch. When we build behaviours, we // will add cases to this switch statement based on message ID. g->dispatch_switch = LLVMBuildSwitch(c->builder, id, unreachable, 0); // Mark the default case as unreachable. LLVMPositionBuilderAtEnd(c->builder, unreachable); LLVMBuildUnreachable(c->builder); codegen_finishfun(c); }
void gencall_throw(compile_t* c) { LLVMValueRef func = LLVMGetNamedFunction(c->module, "pony_throw"); if(c->frame->invoke_target != NULL) invoke_fun(c, func, NULL, 0, "", false); else LLVMBuildCall(c->builder, func, NULL, 0, ""); LLVMBuildUnreachable(c->builder); }
LLVMValueRef gen_expr(compile_t* c, ast_t* ast) { LLVMValueRef ret; bool has_scope = ast_has_scope(ast); bool has_source = codegen_hassource(c); if(has_scope) { codegen_pushscope(c); // Dwarf a new lexical scope, if necessary. if(has_source) dwarf_lexicalscope(&c->dwarf, ast); } switch(ast_id(ast)) { case TK_SEQ: ret = gen_seq(c, ast); break; case TK_FVARREF: case TK_FLETREF: ret = gen_fieldload(c, ast); break; case TK_PARAMREF: ret = gen_param(c, ast); break; case TK_VAR: case TK_LET: ret = gen_localdecl(c, ast); break; case TK_VARREF: case TK_LETREF: ret = gen_localload(c, ast); break; case TK_IF: ret = gen_if(c, ast); break; case TK_WHILE: ret = gen_while(c, ast); break; case TK_REPEAT: ret = gen_repeat(c, ast); break; case TK_TRY: case TK_TRY_NO_CHECK: ret = gen_try(c, ast); break; case TK_MATCH: ret = gen_match(c, ast); break; case TK_CALL: ret = gen_call(c, ast); break; case TK_CONSUME: ret = gen_expr(c, ast_childidx(ast, 1)); break; case TK_RECOVER: ret = gen_expr(c, ast_childidx(ast, 1)); break; case TK_BREAK: ret = gen_break(c, ast); break; case TK_CONTINUE: ret = gen_continue(c, ast); break; case TK_RETURN: ret = gen_return(c, ast); break; case TK_ERROR: ret = gen_error(c, ast); break; case TK_IS: ret = gen_is(c, ast); break; case TK_ISNT: ret = gen_isnt(c, ast); break; case TK_ASSIGN: ret = gen_assign(c, ast); break; case TK_THIS: ret = gen_this(c, ast); break; case TK_TRUE: ret = LLVMConstInt(c->i1, 1, false); break; case TK_FALSE: ret = LLVMConstInt(c->i1, 0, false); break; case TK_INT: ret = gen_int(c, ast); break; case TK_FLOAT: ret = gen_float(c, ast); break; case TK_STRING: ret = gen_string(c, ast); break; case TK_TUPLE: ret = gen_tuple(c, ast); break; case TK_FFICALL: ret = gen_ffi(c, ast); break; case TK_AMP: ret = gen_addressof(c, ast); break; case TK_IDENTITY: ret = gen_identity(c, ast); break; case TK_DONTCARE: ret = GEN_NOVALUE; break; case TK_COMPILER_INTRINSIC: ast_error(ast, "unimplemented compiler intrinsic"); LLVMBuildUnreachable(c->builder); ret = GEN_NOVALUE; break; default: ast_error(ast, "not implemented (codegen unknown)"); return NULL; } if(has_scope) { codegen_popscope(c); if(has_source) dwarf_finish(&c->dwarf); } return ret; }
/* * Create a function that deforms a tuple of type desc up to natts columns. */ LLVMValueRef slot_compile_deform(LLVMJitContext *context, TupleDesc desc, int natts) { char *funcname; LLVMModuleRef mod; LLVMBuilderRef b; LLVMTypeRef deform_sig; LLVMValueRef v_deform_fn; LLVMBasicBlockRef b_entry; LLVMBasicBlockRef b_adjust_unavail_cols; LLVMBasicBlockRef b_find_start; LLVMBasicBlockRef b_out; LLVMBasicBlockRef b_dead; LLVMBasicBlockRef *attcheckattnoblocks; LLVMBasicBlockRef *attstartblocks; LLVMBasicBlockRef *attisnullblocks; LLVMBasicBlockRef *attcheckalignblocks; LLVMBasicBlockRef *attalignblocks; LLVMBasicBlockRef *attstoreblocks; LLVMValueRef v_offp; LLVMValueRef v_tupdata_base; LLVMValueRef v_tts_values; LLVMValueRef v_tts_nulls; LLVMValueRef v_slotoffp; LLVMValueRef v_slowp; LLVMValueRef v_nvalidp; LLVMValueRef v_nvalid; LLVMValueRef v_maxatt; LLVMValueRef v_slot; LLVMValueRef v_tupleheaderp; LLVMValueRef v_tuplep; LLVMValueRef v_infomask1; LLVMValueRef v_infomask2; LLVMValueRef v_bits; LLVMValueRef v_hoff; LLVMValueRef v_hasnulls; /* last column (0 indexed) guaranteed to exist */ int guaranteed_column_number = -1; /* current known alignment */ int known_alignment = 0; /* if true, known_alignment describes definite offset of column */ bool attguaranteedalign = true; int attnum; mod = llvm_mutable_module(context); funcname = llvm_expand_funcname(context, "deform"); /* * Check which columns do have to exist, so we don't have to check the * rows natts unnecessarily. */ for (attnum = 0; attnum < desc->natts; attnum++) { Form_pg_attribute att = TupleDescAttr(desc, attnum); /* * If the column is possibly missing, we can't rely on its (or * subsequent) NOT NULL constraints to indicate minimum attributes in * the tuple, so stop here. */ if (att->atthasmissing) break; /* * Column is NOT NULL and there've been no preceding missing columns, * it's guaranteed that all columns up to here exist at least in the * NULL bitmap. */ if (att->attnotnull) guaranteed_column_number = attnum; } /* Create the signature and function */ { LLVMTypeRef param_types[1]; param_types[0] = l_ptr(StructTupleTableSlot); deform_sig = LLVMFunctionType(LLVMVoidType(), param_types, lengthof(param_types), 0); } v_deform_fn = LLVMAddFunction(mod, funcname, deform_sig); LLVMSetLinkage(v_deform_fn, LLVMInternalLinkage); LLVMSetParamAlignment(LLVMGetParam(v_deform_fn, 0), MAXIMUM_ALIGNOF); llvm_copy_attributes(AttributeTemplate, v_deform_fn); b_entry = LLVMAppendBasicBlock(v_deform_fn, "entry"); b_adjust_unavail_cols = LLVMAppendBasicBlock(v_deform_fn, "adjust_unavail_cols"); b_find_start = LLVMAppendBasicBlock(v_deform_fn, "find_startblock"); b_out = LLVMAppendBasicBlock(v_deform_fn, "outblock"); b_dead = LLVMAppendBasicBlock(v_deform_fn, "deadblock"); b = LLVMCreateBuilder(); attcheckattnoblocks = palloc(sizeof(LLVMBasicBlockRef) * natts); attstartblocks = palloc(sizeof(LLVMBasicBlockRef) * natts); attisnullblocks = palloc(sizeof(LLVMBasicBlockRef) * natts); attcheckalignblocks = palloc(sizeof(LLVMBasicBlockRef) * natts); attalignblocks = palloc(sizeof(LLVMBasicBlockRef) * natts); attstoreblocks = palloc(sizeof(LLVMBasicBlockRef) * natts); known_alignment = 0; LLVMPositionBuilderAtEnd(b, b_entry); /* perform allocas first, llvm only converts those to registers */ v_offp = LLVMBuildAlloca(b, TypeSizeT, "v_offp"); v_slot = LLVMGetParam(v_deform_fn, 0); v_tts_values = l_load_struct_gep(b, v_slot, FIELDNO_TUPLETABLESLOT_VALUES, "tts_values"); v_tts_nulls = l_load_struct_gep(b, v_slot, FIELDNO_TUPLETABLESLOT_ISNULL, "tts_ISNULL"); v_slotoffp = LLVMBuildStructGEP(b, v_slot, FIELDNO_TUPLETABLESLOT_OFF, ""); v_slowp = LLVMBuildStructGEP(b, v_slot, FIELDNO_TUPLETABLESLOT_SLOW, ""); v_nvalidp = LLVMBuildStructGEP(b, v_slot, FIELDNO_TUPLETABLESLOT_NVALID, ""); v_tupleheaderp = l_load_struct_gep(b, v_slot, FIELDNO_TUPLETABLESLOT_TUPLE, "tupleheader"); v_tuplep = l_load_struct_gep(b, v_tupleheaderp, FIELDNO_HEAPTUPLEDATA_DATA, "tuple"); v_bits = LLVMBuildBitCast(b, LLVMBuildStructGEP(b, v_tuplep, FIELDNO_HEAPTUPLEHEADERDATA_BITS, ""), l_ptr(LLVMInt8Type()), "t_bits"); v_infomask1 = l_load_struct_gep(b, v_tuplep, FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK, "infomask1"); v_infomask2 = l_load_struct_gep(b, v_tuplep, FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK2, "infomask2"); /* t_infomask & HEAP_HASNULL */ v_hasnulls = LLVMBuildICmp(b, LLVMIntNE, LLVMBuildAnd(b, l_int16_const(HEAP_HASNULL), v_infomask1, ""), l_int16_const(0), "hasnulls"); /* t_infomask2 & HEAP_NATTS_MASK */ v_maxatt = LLVMBuildAnd(b, l_int16_const(HEAP_NATTS_MASK), v_infomask2, "maxatt"); v_hoff = l_load_struct_gep(b, v_tuplep, FIELDNO_HEAPTUPLEHEADERDATA_HOFF, "t_hoff"); v_tupdata_base = LLVMBuildGEP(b, LLVMBuildBitCast(b, v_tuplep, l_ptr(LLVMInt8Type()), ""), &v_hoff, 1, "v_tupdata_base"); /* * Load tuple start offset from slot. Will be reset below in case there's * no existing deformed columns in slot. */ { LLVMValueRef v_off_start; v_off_start = LLVMBuildLoad(b, v_slotoffp, "v_slot_off"); v_off_start = LLVMBuildZExt(b, v_off_start, TypeSizeT, ""); LLVMBuildStore(b, v_off_start, v_offp); } /* build the basic block for each attribute, need them as jump target */ for (attnum = 0; attnum < natts; attnum++) { attcheckattnoblocks[attnum] = l_bb_append_v(v_deform_fn, "block.attr.%d.attcheckattno", attnum); attstartblocks[attnum] = l_bb_append_v(v_deform_fn, "block.attr.%d.start", attnum); attisnullblocks[attnum] = l_bb_append_v(v_deform_fn, "block.attr.%d.attisnull", attnum); attcheckalignblocks[attnum] = l_bb_append_v(v_deform_fn, "block.attr.%d.attcheckalign", attnum); attalignblocks[attnum] = l_bb_append_v(v_deform_fn, "block.attr.%d.align", attnum); attstoreblocks[attnum] = l_bb_append_v(v_deform_fn, "block.attr.%d.store", attnum); } /* * Check if's guaranteed the all the desired attributes are available in * tuple. If so, we can start deforming. If not, need to make sure to * fetch the missing columns. */ if ((natts - 1) <= guaranteed_column_number) { /* just skip through unnecessary blocks */ LLVMBuildBr(b, b_adjust_unavail_cols); LLVMPositionBuilderAtEnd(b, b_adjust_unavail_cols); LLVMBuildBr(b, b_find_start); } else { LLVMValueRef v_params[3]; /* branch if not all columns available */ LLVMBuildCondBr(b, LLVMBuildICmp(b, LLVMIntULT, v_maxatt, l_int16_const(natts), ""), b_adjust_unavail_cols, b_find_start); /* if not, memset tts_isnull of relevant cols to true */ LLVMPositionBuilderAtEnd(b, b_adjust_unavail_cols); v_params[0] = v_slot; v_params[1] = LLVMBuildZExt(b, v_maxatt, LLVMInt32Type(), ""); v_params[2] = l_int32_const(natts); LLVMBuildCall(b, llvm_get_decl(mod, FuncSlotGetmissingattrs), v_params, lengthof(v_params), ""); LLVMBuildBr(b, b_find_start); } LLVMPositionBuilderAtEnd(b, b_find_start); v_nvalid = LLVMBuildLoad(b, v_nvalidp, ""); /* * Build switch to go from nvalid to the right startblock. Callers * currently don't have the knowledge, but it'd be good for performance to * avoid this check when it's known that the slot is empty (e.g. in scan * nodes). */ if (true) { LLVMValueRef v_switch = LLVMBuildSwitch(b, v_nvalid, b_dead, natts); for (attnum = 0; attnum < natts; attnum++) { LLVMValueRef v_attno = l_int32_const(attnum); LLVMAddCase(v_switch, v_attno, attcheckattnoblocks[attnum]); } } else { /* jump from entry block to first block */ LLVMBuildBr(b, attcheckattnoblocks[0]); } LLVMPositionBuilderAtEnd(b, b_dead); LLVMBuildUnreachable(b); /* * Iterate over each attribute that needs to be deformed, build code to * deform it. */ for (attnum = 0; attnum < natts; attnum++) { Form_pg_attribute att = TupleDescAttr(desc, attnum); LLVMValueRef v_incby; int alignto; LLVMValueRef l_attno = l_int16_const(attnum); LLVMValueRef v_attdatap; LLVMValueRef v_resultp; /* build block checking whether we did all the necessary attributes */ LLVMPositionBuilderAtEnd(b, attcheckattnoblocks[attnum]); /* * If this is the first attribute, slot->tts_nvalid was 0. Therefore * reset offset to 0 to, it be from a previous execution. */ if (attnum == 0) { LLVMBuildStore(b, l_sizet_const(0), v_offp); } /* * Build check whether column is available (i.e. whether the tuple has * that many columns stored). We can avoid the branch if we know * there's a subsequent NOT NULL column. */ if (attnum <= guaranteed_column_number) { LLVMBuildBr(b, attstartblocks[attnum]); } else { LLVMValueRef v_islast; v_islast = LLVMBuildICmp(b, LLVMIntUGE, l_attno, v_maxatt, "heap_natts"); LLVMBuildCondBr(b, v_islast, b_out, attstartblocks[attnum]); } LLVMPositionBuilderAtEnd(b, attstartblocks[attnum]); /* * Check for nulls if necessary. No need to take missing attributes * into account, because in case they're present the heaptuple's natts * would have indicated that a slot_getmissingattrs() is needed. */ if (!att->attnotnull) { LLVMBasicBlockRef b_ifnotnull; LLVMBasicBlockRef b_ifnull; LLVMBasicBlockRef b_next; LLVMValueRef v_attisnull; LLVMValueRef v_nullbyteno; LLVMValueRef v_nullbytemask; LLVMValueRef v_nullbyte; LLVMValueRef v_nullbit; b_ifnotnull = attcheckalignblocks[attnum]; b_ifnull = attisnullblocks[attnum]; if (attnum + 1 == natts) b_next = b_out; else b_next = attcheckattnoblocks[attnum + 1]; v_nullbyteno = l_int32_const(attnum >> 3); v_nullbytemask = l_int8_const(1 << ((attnum) & 0x07)); v_nullbyte = l_load_gep1(b, v_bits, v_nullbyteno, "attnullbyte"); v_nullbit = LLVMBuildICmp(b, LLVMIntEQ, LLVMBuildAnd(b, v_nullbyte, v_nullbytemask, ""), l_int8_const(0), "attisnull"); v_attisnull = LLVMBuildAnd(b, v_hasnulls, v_nullbit, ""); LLVMBuildCondBr(b, v_attisnull, b_ifnull, b_ifnotnull); LLVMPositionBuilderAtEnd(b, b_ifnull); /* store null-byte */ LLVMBuildStore(b, l_int8_const(1), LLVMBuildGEP(b, v_tts_nulls, &l_attno, 1, "")); /* store zero datum */ LLVMBuildStore(b, l_sizet_const(0), LLVMBuildGEP(b, v_tts_values, &l_attno, 1, "")); LLVMBuildBr(b, b_next); attguaranteedalign = false; } else {
LLVMValueRef gen_if(compile_t* c, ast_t* ast) { bool needed = is_result_needed(ast); ast_t* type = ast_type(ast); AST_GET_CHILDREN(ast, cond, left, right); ast_t* left_type = ast_type(left); ast_t* right_type = ast_type(right); // We will have no type if both branches have return statements. reach_type_t* phi_type = NULL; if(!is_control_type(type)) phi_type = reach_type(c->reach, type); LLVMValueRef c_value = gen_expr(c, cond); if(c_value == NULL) return NULL; // If the conditional is constant, generate only one branch. bool gen_left = true; bool gen_right = true; if(LLVMIsAConstantInt(c_value)) { int value = (int)LLVMConstIntGetZExtValue(c_value); if(value == 0) gen_left = false; else gen_right = false; } LLVMBasicBlockRef then_block = codegen_block(c, "if_then"); LLVMBasicBlockRef else_block = codegen_block(c, "if_else"); LLVMBasicBlockRef post_block = NULL; // If both branches return, we have no post block. if(!is_control_type(type)) post_block = codegen_block(c, "if_post"); LLVMValueRef test = LLVMBuildTrunc(c->builder, c_value, c->i1, ""); LLVMBuildCondBr(c->builder, test, then_block, else_block); // Left branch. LLVMPositionBuilderAtEnd(c->builder, then_block); LLVMValueRef l_value; if(gen_left) { l_value = gen_expr(c, left); } else if(phi_type != NULL) { l_value = LLVMConstNull(phi_type->use_type); } else { LLVMBuildUnreachable(c->builder); l_value = GEN_NOVALUE; } if(l_value != GEN_NOVALUE) { if(needed) l_value = gen_assign_cast(c, phi_type->use_type, l_value, left_type); if(l_value == NULL) return NULL; then_block = LLVMGetInsertBlock(c->builder); LLVMBuildBr(c->builder, post_block); } // Right branch. LLVMPositionBuilderAtEnd(c->builder, else_block); LLVMValueRef r_value; if(gen_right) { r_value = gen_expr(c, right); } else if(phi_type != NULL) { r_value = LLVMConstNull(phi_type->use_type); } else { LLVMBuildUnreachable(c->builder); r_value = GEN_NOVALUE; } // If the right side returns, we don't branch to the post block. if(r_value != GEN_NOVALUE) { if(needed) r_value = gen_assign_cast(c, phi_type->use_type, r_value, right_type); if(r_value == NULL) return NULL; else_block = LLVMGetInsertBlock(c->builder); LLVMBuildBr(c->builder, post_block); } // If both sides return, we return a sentinal value. if(is_control_type(type)) return GEN_NOVALUE; // Continue in the post block. LLVMPositionBuilderAtEnd(c->builder, post_block); if(needed) { LLVMValueRef phi = LLVMBuildPhi(c->builder, phi_type->use_type, ""); if(l_value != GEN_NOVALUE) LLVMAddIncoming(phi, &l_value, &then_block, 1); if(r_value != GEN_NOVALUE) LLVMAddIncoming(phi, &r_value, &else_block, 1); return phi; } return GEN_NOTNEEDED; }