/* ===================================================================== */ VOID Instruction(INS ins, VOID *v) { /* if (INS_RegWContain(ins, REG_STACK_PTR)) { if (INS_IsSub(ins)) { INS_InsertCall(ins, IPOINT_BEFORE, (AFUNPTR)StackRegSubBefore, IARG_INST_PTR, IARG_ADDRINT, ins.q(), IARG_REG_VALUE, REG_STACK_PTR, IARG_END); INS_InsertCall(ins, IPOINT_AFTER, (AFUNPTR)StackRegSubAfter, IARG_REG_VALUE, REG_STACK_PTR, IARG_END); } if (INS_Opcode(ins) == XED_ICLASS_ADD) { INS_InsertCall(ins, IPOINT_BEFORE, (AFUNPTR)StackRegAddBefore, IARG_INST_PTR, IARG_ADDRINT, ins.q(), IARG_REG_VALUE, REG_STACK_PTR, IARG_END); INS_InsertCall(ins, IPOINT_AFTER, (AFUNPTR)StackRegAddAfter, IARG_REG_VALUE, REG_STACK_PTR, IARG_END); } } */ UINT32 memOperands = INS_MemoryOperandCount(ins); // Instrument each memory operand. If the operand is both read and written // it will be processed twice. // Iterating over memory operands ensures that instructions on IA-32 with // two read operands (such as SCAS and CMPS) are correctly handled. for (UINT32 memOp = 0; memOp < memOperands; memOp++) { const UINT32 size = INS_MemoryOperandSize(ins, memOp); // const BOOL single = (size <= 4); if (INS_MemoryOperandIsRead(ins, memOp)) { INS_InsertCall(ins, IPOINT_BEFORE, (AFUNPTR)MemReadBefore, IARG_INST_PTR, IARG_MEMORYOP_EA, memOp, IARG_ADDRINT, size, IARG_ADDRINT, ins.q(), IARG_REG_VALUE, REG_STACK_PTR, IARG_REG_VALUE, REG_GBP, IARG_BOOL, INS_IsStackRead(ins), IARG_END); } if (INS_MemoryOperandIsWritten(ins, memOp)) { INS_InsertCall(ins, IPOINT_BEFORE, (AFUNPTR)MemWriteBefore, IARG_INST_PTR, IARG_MEMORYOP_EA, memOp, IARG_ADDRINT, size, IARG_ADDRINT, ins.q(), IARG_REG_VALUE, REG_STACK_PTR, IARG_REG_VALUE, REG_GBP, IARG_BOOL, INS_IsStackWrite(ins), IARG_END); } } }
static void Instruction(INS ins, void *v) { INS_InsertPredicatedCall( ins, IPOINT_BEFORE, (AFUNPTR)do_count, IARG_END); // Filters out non memory reference instructions. if (!INS_IsMemoryRead(ins) && !INS_IsMemoryWrite(ins)) return; // Filters out references to stack. if (INS_IsStackRead(ins) || INS_IsStackWrite(ins)) return; // Filters out instructions out of main executable. IMG img = IMG_FindByAddress(INS_Address(ins)); if (!IMG_Valid(img) || !IMG_IsMainExecutable(img)) return; unsigned i; unsigned int mem_op = INS_MemoryOperandCount(ins); for (i = 0; i < mem_op; i++) { INS_InsertPredicatedCall( ins, IPOINT_BEFORE, (AFUNPTR)check_addr, IARG_INST_PTR, IARG_MEMORYOP_EA, i, IARG_END); } }
bool BBLContainMemOp(BBL bbl) { for (INS ins = BBL_InsHead(bbl); INS_Valid(ins); ins = INS_Next(ins)) { if (INS_IsStackRead(ins) || INS_IsStackWrite(ins)) continue; if (INS_IsMemoryRead(ins) || INS_IsMemoryWrite(ins)) return true; } return false; }
void Scheduler::HandlePostInstrumentTrace(TRACE trace) { ExecutionControl::HandlePostInstrumentTrace(trace); for (BBL bbl = TRACE_BblHead(trace); BBL_Valid(bbl); bbl = BBL_Next(bbl)) { for (INS ins = BBL_InsHead(bbl); INS_Valid(ins); ins = INS_Next(ins)) { if (INS_IsMemoryRead(ins) || INS_IsMemoryWrite(ins)) { if (INS_IsStackRead(ins) || INS_IsStackWrite(ins)) continue; // skip stack accesses INS_InsertCall(ins, IPOINT_BEFORE, AFUNPTR(__Change), IARG_UINT32, 1, IARG_END); } } } }
VOID Routine(RTN rtn, VOID *v) { std::string name = PIN_UndecorateSymbolName(RTN_Name(rtn).c_str(), UNDECORATION_NAME_ONLY); std::vector<std::string>::iterator it; for (it = userFuncs.begin(); it != userFuncs.end(); ++it) { std::string userFunc = *it; if (name.find(userFunc) == std::string::npos) continue; RTN_Open(rtn); // For each instruction of the routine for (INS ins = RTN_InsHead(rtn); INS_Valid(ins); ins = INS_Next(ins)) { UINT32 memOperands = INS_MemoryOperandCount(ins); // Iterate over each memory operand of the instruction. for (UINT32 memOp = 0; memOp < memOperands; memOp++) { if (INS_IsStackRead(ins) || INS_IsStackWrite(ins)) break; if (INS_MemoryOperandIsRead(ins, memOp)) { INS_InsertPredicatedCall( ins, IPOINT_BEFORE, (AFUNPTR)RecordMemRead, IARG_INST_PTR, IARG_MEMORYOP_EA, memOp, IARG_THREAD_ID, IARG_END); } if (INS_MemoryOperandIsWritten(ins, memOp)) { INS_InsertPredicatedCall( ins, IPOINT_BEFORE, (AFUNPTR)RecordMemWrite, IARG_INST_PTR, IARG_MEMORYOP_EA, memOp, IARG_THREAD_ID, IARG_END); } } } RTN_Close(rtn); } }
/* * Instruction * Catches Syscall, Return, Store functions and calls appropriate handler */ VOID Instruction(INS ins, VOID *v) { if (INS_IsSyscall(ins) && INS_HasFallThrough(ins)) { // Arguments and syscall number is only available before INS_InsertCall(ins, IPOINT_BEFORE, AFUNPTR(SysBefore), IARG_INST_PTR, IARG_SYSCALL_NUMBER, IARG_SYSARG_VALUE, 0, IARG_SYSARG_VALUE, 1, IARG_SYSARG_VALUE, 2, IARG_SYSARG_VALUE, 3, IARG_SYSARG_VALUE, 4, IARG_SYSARG_VALUE, 5, IARG_REG_VALUE, REG_STACK_PTR, IARG_END); } else if (INS_Valid(ins)) { if(INS_IsRet(ins)) { INS_InsertCall(ins, IPOINT_TAKEN_BRANCH, AFUNPTR(Return), IARG_REG_VALUE, REG_STACK_PTR, IARG_END); } else if(INS_IsStackWrite(ins)) { INS_InsertCall(ins, IPOINT_BEFORE, AFUNPTR(StackWrite), IARG_MEMORYWRITE_EA, IARG_MEMORYWRITE_SIZE, IARG_END); } else if(INS_IsMemoryWrite(ins) && !(INS_IsBranchOrCall(ins))) { INS_InsertCall(ins, IPOINT_BEFORE, AFUNPTR(MemoryWrite), IARG_MEMORYWRITE_EA, IARG_MEMORYWRITE_SIZE, IARG_END); } } }
VOID instrumentTrace( TRACE trace, VOID *v ) { for ( BBL bbl = TRACE_BblHead( trace ); BBL_Valid( bbl ); bbl = BBL_Next( bbl ) ) { INS ins = BBL_InsHead( bbl ); INS_InsertCall( ins, IPOINT_BEFORE, (AFUNPTR) startBasicBlock, IARG_THREAD_ID, IARG_CONTEXT, IARG_UINT32, BBL_NumIns( bbl ), IARG_END ); UINT32 instPos = 0; for ( ; INS_Valid( ins ); ins = INS_Next( ins ) ) { if ( INS_IsMemoryRead( ins ) ) { INS_InsertCall( ins, IPOINT_BEFORE, (AFUNPTR) memOp, IARG_THREAD_ID, IARG_UINT32, instPos, IARG_MEMORYREAD_EA, IARG_MEMORYREAD_SIZE, IARG_BOOL, true, IARG_BOOL, INS_IsStackRead( ins ), IARG_END ); } if ( INS_IsMemoryWrite( ins ) ) { INS_InsertCall( ins, IPOINT_BEFORE, (AFUNPTR) memOp, IARG_THREAD_ID, IARG_UINT32, instPos, IARG_MEMORYWRITE_EA, IARG_MEMORYWRITE_SIZE, IARG_BOOL, false, IARG_BOOL, INS_IsStackWrite( ins ), IARG_END ); } instPos++; } } }
VOID Instruction(INS ins, void * v) { // track the write operations if ( INS_IsStackWrite(ins) ) { // map sparse INS addresses to dense IDs //const ADDRINT iaddr = INS_Address(ins); #ifdef STACK const UINT32 size = INS_MemoryWriteSize(ins); const BOOL single = (size <= 4); if( single ) { INS_InsertPredicatedCall( ins, IPOINT_BEFORE, (AFUNPTR) StoreSingle, IARG_MEMORYWRITE_EA, IARG_ADDRINT, INS_Address(ins), IARG_END); } else { INS_InsertPredicatedCall( ins, IPOINT_BEFORE, (AFUNPTR) StoreMulti, IARG_MEMORYWRITE_EA, IARG_MEMORYWRITE_SIZE, IARG_END); } #endif ; } else if( INS_IsMemoryWrite(ins) ) { #ifdef HEAP const UINT32 size = INS_MemoryWriteSize(ins); const BOOL single = (size <= 4); if( single ) { INS_InsertPredicatedCall( ins, IPOINT_BEFORE, (AFUNPTR) StoreSingleH, IARG_MEMORYWRITE_EA, IARG_ADDRINT, INS_Address(ins), IARG_END); } else { INS_InsertPredicatedCall( ins, IPOINT_BEFORE, (AFUNPTR) StoreMultiH, IARG_MEMORYWRITE_EA, IARG_MEMORYWRITE_SIZE, IARG_END); } #endif ; } #ifdef STACK // track the frame allocation/deallocation // record the count of function entry and exit via "CALL" and "Execution of the return address-instruction" // assume that the entry instruction will be executed once within each frame INS_InsertPredicatedCall( ins, IPOINT_BEFORE, (AFUNPTR) CallEnd, IARG_ADDRINT, INS_Address(ins), IARG_END); if( INS_Opcode(ins) == XED_ICLASS_CALL_NEAR ) { ADDRINT nextAddr = INS_NextAddress(ins); //cerr << hex << nextAddr; //ADDRINT callee = INS_DirectBranchOrCallTargetAddress(ins); //cerr << "->" << callee << endl; INS_InsertPredicatedCall( ins, IPOINT_BEFORE, (AFUNPTR) CallBegin, IARG_ADDRINT, nextAddr, IARG_BRANCH_TARGET_ADDR, IARG_END); } #endif }