size_t delete_methods( std::vector<DexClass*>& scope, std::unordered_set<DexMethod*>& removable, std::function<DexMethod*(DexMethod*, MethodSearch search)> resolver) { // if a removable candidate is invoked do not delete walk_opcodes(scope, [](DexMethod* meth) { return true; }, [&](DexMethod* meth, DexInstruction* insn) { if (is_invoke(insn->opcode())) { const auto mop = static_cast<DexOpcodeMethod*>(insn); auto callee = resolver(mop->get_method(), opcode_to_search(insn)); if (callee != nullptr) { removable.erase(callee); } } }); size_t deleted = 0; for (auto callee : removable) { if (!callee->is_concrete()) continue; if (do_not_strip(callee)) continue; auto cls = type_class(callee->get_class()); always_assert_log(cls != nullptr, "%s is concrete but does not have a DexClass\n", SHOW(callee)); if (callee->is_virtual()) { cls->get_vmethods().remove(callee); } else { cls->get_dmethods().remove(callee); } deleted++; TRACE(DELMET, 4, "removing %s\n", SHOW(callee)); } return deleted; }
bool IRInstruction::src_is_wide(size_t i) const { always_assert(i < srcs_size()); if (is_invoke(m_opcode)) { return invoke_src_is_wide(i); } switch (opcode()) { case OPCODE_MOVE_WIDE: case OPCODE_RETURN_WIDE: return i == 0; case OPCODE_CMPL_DOUBLE: case OPCODE_CMPG_DOUBLE: case OPCODE_CMP_LONG: return i == 0 || i == 1; case OPCODE_APUT_WIDE: case OPCODE_IPUT_WIDE: case OPCODE_SPUT_WIDE: return i == 0; case OPCODE_NEG_LONG: case OPCODE_NOT_LONG: case OPCODE_NEG_DOUBLE: case OPCODE_LONG_TO_INT: case OPCODE_LONG_TO_FLOAT: case OPCODE_LONG_TO_DOUBLE: case OPCODE_DOUBLE_TO_INT: case OPCODE_DOUBLE_TO_LONG: case OPCODE_DOUBLE_TO_FLOAT: return i == 0; case OPCODE_ADD_LONG: case OPCODE_SUB_LONG: case OPCODE_MUL_LONG: case OPCODE_DIV_LONG: case OPCODE_REM_LONG: case OPCODE_AND_LONG: case OPCODE_OR_LONG: case OPCODE_XOR_LONG: case OPCODE_ADD_DOUBLE: case OPCODE_SUB_DOUBLE: case OPCODE_MUL_DOUBLE: case OPCODE_DIV_DOUBLE: case OPCODE_REM_DOUBLE: return i == 0 || i == 1; case OPCODE_SHL_LONG: case OPCODE_SHR_LONG: case OPCODE_USHR_LONG: return i == 0; default: return false; } }
void MethodBlock::invoke(IROpcode opcode, DexMethodRef* meth, const std::vector<Location>& args) { always_assert(is_invoke(opcode)); auto invk = new IRInstruction(opcode); uint16_t arg_count = static_cast<uint16_t>(args.size()); invk->set_method(meth)->set_arg_word_count(arg_count); for (uint16_t i = 0; i < arg_count; i++) { auto arg = args.at(i); invk->set_src(i, arg.get_reg()); } push_instruction(invk); }
void MultiMethodInliner::inline_callees( InlineContext& inline_context, std::vector<DexMethod*>& callees) { size_t found = 0; auto caller = inline_context.caller; auto insns = caller->get_code()->get_instructions(); // walk the caller opcodes collecting all candidates to inline // Build a callee to opcode map std::vector<std::pair<DexMethod*, DexOpcodeMethod*>> inlinables; for (auto insn = insns.begin(); insn != insns.end(); ++insn) { if (!is_invoke((*insn)->opcode())) continue; auto mop = static_cast<DexOpcodeMethod*>(*insn); auto callee = resolver(mop->get_method(), opcode_to_search(*insn)); if (callee == nullptr) continue; if (std::find(callees.begin(), callees.end(), callee) == callees.end()) { continue; } always_assert(callee->is_concrete()); found++; inlinables.push_back(std::make_pair(callee, mop)); if (found == callees.size()) break; } if (found != callees.size()) { always_assert(found <= callees.size()); info.not_found += callees.size() - found; } // attempt to inline all inlinable candidates for (auto inlinable : inlinables) { auto callee = inlinable.first; auto mop = inlinable.second; if (!is_inlinable(callee, caller)) continue; auto op = mop->opcode(); if (is_invoke_range(op)) { info.invoke_range++; continue; } TRACE(MMINL, 4, "inline %s (%d) in %s (%d)\n", SHOW(callee), caller->get_code()->get_registers_size(), SHOW(caller), callee->get_code()->get_registers_size() - callee->get_code()->get_ins_size()); change_visibility(callee); MethodTransform::inline_16regs(inline_context, callee, mop); info.calls_inlined++; inlined.insert(callee); } }
void MethodBlock::invoke(DexOpcode opcode, DexMethod* meth, std::vector<Location>& args) { always_assert(is_invoke(opcode)); auto invk = new DexOpcodeMethod(opcode, meth, 0); uint16_t arg_count = static_cast<uint16_t>(args.size()); invk->set_arg_word_count(arg_count); for (uint16_t i = 0; i < arg_count; i++) { auto arg = args[i]; invk->set_src(i, reg_num(arg)); } if (arg_count > mc->out_count) mc->out_count = arg_count; push_instruction(invk); }
/** * Ensure the structures in DelSuperTest.java are as expected * following a redex transformation. */ TEST_F(PostVerify, DelSuper) { std::cout << "Loaded classes: " << classes.size() << std::endl ; // Should have C1 and 2 C2 still auto c1 = find_class_named( classes, "Lcom/facebook/redex/test/instr/DelSuperTest$C1;"); ASSERT_NE(nullptr, c1); auto c2 = find_class_named( classes, "Lcom/facebook/redex/test/instr/DelSuperTest$C2;"); ASSERT_NE(nullptr, c2); // C2.optimized1 and C2.optimized2 should be gone // XXX: optimized2() doesn't get delsuper treatment due to inlining of C1.optimize2(?) auto&& m2 = !m::any_vmethods( m::named<DexMethod>("optimized1")/* || m::named<DexMethod>("optimized2")*/); ASSERT_TRUE(m2.matches(c2)); // C1 and C2 should both have 4 notOptimized* methods auto&& m3 = m::any_vmethods(m::named<DexMethod>("notOptimized1")) && m::any_vmethods(m::named<DexMethod>("notOptimized2")) && m::any_vmethods(m::named<DexMethod>("notOptimized3")) && m::any_vmethods(m::named<DexMethod>("notOptimized4")); ASSERT_TRUE(m3.matches(c1)); ASSERT_TRUE(m3.matches(c2)); // check that the invoke instructions are fixed up as well auto test_class = find_class_named( classes, "Lcom/facebook/redex/test/instr/DelSuperTest;"); auto test_opt_1 = find_vmethod_named(*test_class, "testOptimized1"); int optimized1_count = 0; for (auto& insn : test_opt_1->get_code()->get_instructions()) { if (is_invoke(insn->opcode())) { auto mop = static_cast<DexOpcodeMethod*>(insn); auto m = mop->get_method(); if (strcmp(m->get_name()->c_str(), "optimized1") == 0) { ASSERT_STREQ(m->get_class()->get_name()->c_str(), "Lcom/facebook/redex/test/instr/DelSuperTest$C1;"); ++optimized1_count; } } } ASSERT_EQ(optimized1_count, 3); }
reg_t max_value_for_src(const IRInstruction* insn, size_t src_index, bool src_is_wide) { auto max_value = max_unsigned_value(src_bit_width(insn->opcode(), src_index)); auto op = insn->opcode(); if (opcode::has_range_form(op) && insn->srcs_size() == 1) { // An `invoke {v0}` opcode can always be rewritten as `invoke/range {v0}` return max_unsigned_value(16); } else if (is_invoke(op) && src_is_wide) { // invoke instructions need to address both pairs of a wide register in // their denormalized form. We are dealing with the normalized form // here, so we need to reserve one register for denormalization. I.e. // `invoke-static {v14} LFoo.a(J)` will expand into // `invoke-static {v14, v15} LFoo.a(J)` after denormalization. --max_value; } return max_value; }
void IRInstruction::denormalize_registers() { if (is_invoke(m_opcode)) { auto& args = get_method()->get_proto()->get_args()->get_type_list(); std::vector<uint16_t> srcs; size_t args_idx {0}; size_t srcs_idx {0}; if (m_opcode != OPCODE_INVOKE_STATIC) { srcs.push_back(src(srcs_idx++)); } bool has_wide {false}; for (; args_idx < args.size(); ++args_idx, ++srcs_idx) { srcs.push_back(src(srcs_idx)); if (is_wide_type(args.at(args_idx))) { srcs.push_back(src(srcs_idx) + 1); has_wide = true; } } if (has_wide) { m_srcs = srcs; } } }
void IRInstruction::normalize_registers() { if (is_invoke(opcode())) { auto& args = get_method()->get_proto()->get_args()->get_type_list(); size_t old_srcs_idx{0}; size_t srcs_idx{0}; if (m_opcode != OPCODE_INVOKE_STATIC) { ++srcs_idx; ++old_srcs_idx; } for (size_t args_idx = 0; args_idx < args.size(); ++args_idx) { always_assert_log( old_srcs_idx < srcs_size(), "Invalid arg indices in %s args_idx %d old_srcs_idx %d\n", SHOW(this), args_idx, old_srcs_idx); set_src(srcs_idx++, src(old_srcs_idx)); old_srcs_idx += is_wide_type(args.at(args_idx)) ? 2 : 1; } always_assert(old_srcs_idx == srcs_size()); set_arg_word_count(srcs_idx); } }
// Check that visibility / accessibility changes to the current method // won't need to change a referenced method into a virtual or static one. bool gather_invoked_methods_that_prevent_relocation( const DexMethod* method, std::unordered_set<DexMethodRef*>* methods_preventing_relocation) { auto code = method->get_code(); always_assert(code); bool can_relocate = true; for (const auto& mie : InstructionIterable(code)) { auto insn = mie.insn; auto opcode = insn->opcode(); if (is_invoke(opcode)) { auto meth = resolve_method(insn->get_method(), opcode_to_search(insn)); if (!meth && opcode == OPCODE_INVOKE_VIRTUAL && unknown_virtuals::is_method_known_to_be_public(insn->get_method())) { continue; } if (meth) { always_assert(meth->is_def()); if (meth->is_external() && !is_public(meth)) { meth = nullptr; } else if (opcode == OPCODE_INVOKE_DIRECT && !is_init(meth)) { meth = nullptr; } } if (!meth) { can_relocate = false; if (!methods_preventing_relocation) { break; } methods_preventing_relocation->emplace(insn->get_method()); } } } return can_relocate; }
/** * Add to the list the single called. */ void SimpleInlinePass::select_single_called( Scope& scope, std::unordered_set<DexMethod*>& methods) { std::unordered_map<DexMethod*, int> calls; for (const auto& method : methods) { calls[method] = 0; } // count call sites for each method walk_opcodes(scope, [](DexMethod* meth) { return true; }, [&](DexMethod* meth, DexInstruction* insn) { if (is_invoke(insn->opcode())) { auto mop = static_cast<DexOpcodeMethod*>(insn); auto callee = resolve_method( mop->get_method(), opcode_to_search(insn), resolved_refs); if (callee != nullptr && callee->is_concrete() && methods.count(callee) > 0) { calls[callee]++; } } }); // pick methods with a single call site and add to candidates. // This vector usage is only because of logging we should remove it // once the optimization is "closed" std::vector<std::vector<DexMethod*>> calls_group(MAX_COUNT); for (auto call_it : calls) { if (call_it.second >= MAX_COUNT) { calls_group[MAX_COUNT - 1].push_back(call_it.first); continue; } calls_group[call_it.second].push_back(call_it.first); } assert(method_breakup(calls_group)); for (auto callee : calls_group[1]) { inlinable.insert(callee); } }
void exclude_referenced_bridgee(DexMethod* code_method, const DexCode& code) { auto const& insts = code.get_instructions(); for (auto inst : insts) { if (!is_invoke(inst->opcode())) continue; auto method = static_cast<DexOpcodeMethod*>(inst)->get_method(); auto range = m_potential_bridgee_refs.equal_range( MethodRef(method->get_class(), method->get_name(), method->get_proto())); for (auto it = range.first; it != range.second; ++it) { auto referenced_bridge = it->second; // Don't count the bridge itself if (referenced_bridge == code_method) continue; TRACE(BRIDGE, 5, "Rejecting, reference `%s.%s.%s' in `%s' blocks `%s'\n", SHOW(method->get_class()), SHOW(method->get_name()), SHOW(method->get_proto()), SHOW(code_method), SHOW(referenced_bridge)); m_bridges_to_bridgees.erase(referenced_bridge); } } }
MultiMethodInliner::MultiMethodInliner( std::vector<DexClass*>& scope, DexClasses& primary_dex, std::unordered_set<DexMethod*>& candidates, std::function<DexMethod*(DexMethod*, MethodSearch)> resolver) : resolver(resolver) { for (const auto& cls : primary_dex) { primary.insert(cls->get_type()); } // walk every opcode in scope looking for calls to inlinable candidates // and build a map of callers to callees and the reverse callees to callers walk_opcodes(scope, [](DexMethod* meth) { return true; }, [&](DexMethod* meth, DexInstruction* opcode) { if (is_invoke(opcode->opcode())) { auto mop = static_cast<DexOpcodeMethod*>(opcode); auto callee = resolver(mop->get_method(), opcode_to_search(opcode)); if (callee != nullptr && callee->is_concrete() && candidates.find(callee) != candidates.end()) { callee_caller[callee].push_back(meth); caller_callee[meth].push_back(callee); } } }); }
void exclude_referenced_bridgee(DexMethod* code_method, IRCode& code) { for (auto& mie : InstructionIterable(&code)) { auto inst = mie.insn; if (!is_invoke(inst->opcode())) continue; auto method = inst->get_method(); auto range = m_potential_bridgee_refs.equal_range( MethodRef(method->get_class(), method->get_name(), method->get_proto())); for (auto it = range.first; it != range.second; ++it) { auto referenced_bridge = it->second; // Don't count the bridge itself if (referenced_bridge == code_method) continue; TRACE(BRIDGE, 5, "Rejecting, reference `%s.%s.%s' in `%s' blocks `%s'\n", SHOW(method->get_class()), SHOW(method->get_name()), SHOW(method->get_proto()), SHOW(code_method), SHOW(referenced_bridge)); m_bridges_to_bridgees.erase(referenced_bridge); } } }