static rtx moxie_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p) { rtx addr, mem; if (incoming_p) addr = plus_constant (Pmode, arg_pointer_rtx, 2 * UNITS_PER_WORD); else addr = plus_constant (Pmode, stack_pointer_rtx, -UNITS_PER_WORD); mem = gen_rtx_MEM (Pmode, addr); MEM_NOTRAP_P (mem) = 1; return mem; }
static rtx moxie_static_chain (const_tree fndecl, bool incoming_p) { rtx addr, mem; if (!DECL_STATIC_CHAIN (fndecl)) return NULL; if (incoming_p) addr = plus_constant (arg_pointer_rtx, 2 * UNITS_PER_WORD); else addr = plus_constant (stack_pointer_rtx, -UNITS_PER_WORD); mem = gen_rtx_MEM (Pmode, addr); MEM_NOTRAP_P (mem) = 1; return mem; }
void expand_builtin_cilk_detach (tree exp) { rtx_insn *insn; tree fptr = get_frame_arg (exp); if (fptr == NULL_TREE) return; tree parent = cilk_dot (fptr, CILK_TI_FRAME_PARENT, 0); tree worker = cilk_dot (fptr, CILK_TI_FRAME_WORKER, 0); tree tail = cilk_arrow (worker, CILK_TI_WORKER_TAIL, 1); rtx wreg = expand_expr (worker, NULL_RTX, Pmode, EXPAND_NORMAL); if (GET_CODE (wreg) != REG) wreg = copy_to_reg (wreg); rtx preg = expand_expr (parent, NULL_RTX, Pmode, EXPAND_NORMAL); /* TMP <- WORKER.TAIL *TMP <- PARENT TMP <- TMP + 1 WORKER.TAIL <- TMP */ HOST_WIDE_INT worker_tail_offset = tree_to_shwi (DECL_FIELD_OFFSET (cilk_trees[CILK_TI_WORKER_TAIL])) + tree_to_shwi (DECL_FIELD_BIT_OFFSET (cilk_trees[CILK_TI_WORKER_TAIL])) / BITS_PER_UNIT; rtx tmem0 = gen_rtx_MEM (Pmode, plus_constant (Pmode, wreg, worker_tail_offset)); set_mem_attributes (tmem0, tail, 0); MEM_NOTRAP_P (tmem0) = 1; gcc_assert (MEM_VOLATILE_P (tmem0)); rtx treg = copy_to_mode_reg (Pmode, tmem0); rtx tmem1 = gen_rtx_MEM (Pmode, treg); set_mem_attributes (tmem1, TREE_TYPE (TREE_TYPE (tail)), 0); MEM_NOTRAP_P (tmem1) = 1; emit_move_insn (tmem1, preg); emit_move_insn (treg, plus_constant (Pmode, treg, GET_MODE_SIZE (Pmode))); /* There is a release barrier (st8.rel, membar #StoreStore, sfence, lwsync, etc.) between the two stores. On x86 normal volatile stores have proper semantics; the sfence would only be needed for nontemporal stores (which we could generate using the storent optab, for no benefit in this case). The predicate may return false even for a REG if this is the limited release operation that only stores 0. */ enum insn_code icode = direct_optab_handler (sync_lock_release_optab, Pmode); if (icode != CODE_FOR_nothing && insn_data[icode].operand[1].predicate (treg, Pmode) && (insn = GEN_FCN (icode) (tmem0, treg)) != NULL_RTX) emit_insn (insn); else emit_move_insn (tmem0, treg); /* The memory barrier inserted above should not prevent the load of flags from being moved before the stores, but in practice it does because it is implemented with unspec_volatile. In-order RISC machines should explicitly load flags earlier. */ tree flags = cilk_dot (fptr, CILK_TI_FRAME_FLAGS, 0); expand_expr (build2 (MODIFY_EXPR, void_type_node, flags, build2 (BIT_IOR_EXPR, TREE_TYPE (flags), flags, build_int_cst (TREE_TYPE (flags), CILK_FRAME_DETACHED))), const0_rtx, VOIDmode, EXPAND_NORMAL); }