void VarOffset::dump(PrintStream& out) const { switch (m_kind) { case VarKind::Invalid: out.print("invalid"); return; case VarKind::Scope: out.print(scopeOffset()); return; case VarKind::Stack: out.print(stackOffset()); return; case VarKind::DirectArgument: out.print(capturedArgumentsOffset()); return; } RELEASE_ASSERT_NOT_REACHED(); }
const MemoryLocation &DataflowAnalyzer::computeMemoryLocation(const Term *term, const ReachingDefinitions &definitions) { return dataflow().setMemoryLocation(term, [&]() -> MemoryLocation { switch (term->kind()) { case Term::MEMORY_LOCATION_ACCESS: { return term->asMemoryLocationAccess()->memoryLocation(); } case Term::DEREFERENCE: { auto dereference = term->asDereference(); auto addressValue = computeValue(dereference->address(), definitions); if (addressValue->abstractValue().isConcrete()) { if (dereference->domain() == MemoryDomain::MEMORY) { return MemoryLocation( dereference->domain(), addressValue->abstractValue().asConcrete().value() * CHAR_BIT, dereference->size()); } else { return MemoryLocation( dereference->domain(), addressValue->abstractValue().asConcrete().value(), dereference->size()); } } else if (addressValue->isStackOffset()) { return MemoryLocation(MemoryDomain::STACK, addressValue->stackOffset() * CHAR_BIT, dereference->size()); } else { return MemoryLocation(); } break; } default: { log_.warning(tr("%1: Term kind %2 cannot have a memory location.").arg(Q_FUNC_INFO).arg(term->kind())); return MemoryLocation(); } } }()); }
Value *DataflowAnalyzer::computeValue(const Term *term, const MemoryLocation &memoryLocation, const ReachingDefinitions &definitions) { assert(term); assert(term->isRead()); assert(memoryLocation || definitions.empty()); auto value = dataflow().getValue(term); if (definitions.empty()) { return value; } auto byteOrder = architecture()->getByteOrder(memoryLocation.domain()); /* * Merge abstract values. */ auto abstractValue = value->abstractValue(); foreach (const auto &chunk, definitions.chunks()) { assert(memoryLocation.covers(chunk.location())); /* * Mask of bits inside abstractValue which are covered by chunk's location. */ auto mask = bitMask<ConstantValue>(chunk.location().size()); if (byteOrder == ByteOrder::LittleEndian) { mask = bitShift(mask, chunk.location().addr() - memoryLocation.addr()); } else { mask = bitShift(mask, memoryLocation.endAddr() - chunk.location().endAddr()); } foreach (auto definition, chunk.definitions()) { auto definitionLocation = dataflow().getMemoryLocation(definition); assert(definitionLocation.covers(chunk.location())); auto definitionValue = dataflow().getValue(definition); auto definitionAbstractValue = definitionValue->abstractValue(); /* * Shift definition's abstract value to match term's location. */ if (byteOrder == ByteOrder::LittleEndian) { definitionAbstractValue.shift(definitionLocation.addr() - memoryLocation.addr()); } else { definitionAbstractValue.shift(memoryLocation.endAddr() - definitionLocation.endAddr()); } /* Project the value to the defined location. */ definitionAbstractValue.project(mask); /* Update term's value. */ abstractValue.merge(definitionAbstractValue); } } value->setAbstractValue(abstractValue.resize(term->size())); /* * Merge stack offset and product flags. * * Heuristic: merge information only from terms that define lower bits of the term's value. */ const std::vector<const Term *> *lowerBitsDefinitions = nullptr; if (byteOrder == ByteOrder::LittleEndian) { if (definitions.chunks().front().location().addr() == memoryLocation.addr()) { lowerBitsDefinitions = &definitions.chunks().front().definitions(); } } else { if (definitions.chunks().back().location().endAddr() == memoryLocation.endAddr()) { lowerBitsDefinitions = &definitions.chunks().back().definitions(); } } if (lowerBitsDefinitions) { foreach (auto definition, *lowerBitsDefinitions) { auto definitionValue = dataflow().getValue(definition); if (definitionValue->isNotStackOffset()) { value->makeNotStackOffset(); } else if (definitionValue->isStackOffset()) { value->makeStackOffset(definitionValue->stackOffset()); } if (definitionValue->isNotProduct()) { value->makeNotProduct(); } else if (definitionValue->isProduct()) { value->makeProduct(); } } } /* * Merge return address flag. */ if (definitions.chunks().front().location() == memoryLocation) { foreach (auto definition, definitions.chunks().front().definitions()) { auto definitionValue = dataflow().getValue(definition); if (definitionValue->isNotReturnAddress()) { value->makeNotReturnAddress(); } else if (definitionValue->isReturnAddress()) { value->makeReturnAddress(); } } }