Esempio n. 1
0
void StackInterpreter::process_p2wsh(const BinaryData& scriptHash)
{
   //get witness data
   auto witnessData = txStubPtr_->getWitnessData(inputIndex_);
   BinaryData witBD(witnessData);

   //prepare stack
   BinaryRefReader brr(witnessData);
   auto itemCount = brr.get_uint8_t();
   
   for (unsigned i = 0; i < itemCount; i++)
   {
      auto len = brr.get_var_int();
      stack_.push_back(brr.get_BinaryData(len));
   }

   if (brr.getSizeRemaining() != 0)
      throw ScriptException("witness size mismatch");

   flags_ |= SCRIPT_VERIFY_P2SH_SHA256;

   //construct output script
   auto&& swScript = BtcUtils::getP2WSHScript(scriptHash);
   processScript(swScript, true);
}
Esempio n. 2
0
Cube::Cube(const position_type &where, std::shared_ptr<Texture> texture )
		: Object3D(where) {
	// 8 points, 6 surfaces

	position_type
			    blf(-1.0, -1.0, 1.0)    // front rectangle
	,       brf(1.0, -1.0, 1.0)
	,       trf(1.0, 1.0, 1.0)
	,       tlf(-1.0, 1.0, 1.0)
	,       blr(-1.0, -1.0, -1.0)    // rear rectangle
	,       brr(1.0, -1.0, -1.0)
	,       trr(1.0, 1.0, -1.0)
	,       tlr(-1.0, 1.0, -1.0);

  position_type
    negZ(0.0, 0.0, -1.0)
    , posZ(0.0, 0.0, 1.0)
    , negY(0.0, -1.0, 0.0)
    , posY(0.0, 1.0, 0.0)
    , negX(-1.0, 0.0, 0.0)
    , posX(1.0, 0.0, 0.0);

	// if changing the order, make sure to change accessors so they grab the correct object
  this->add_child(new Rectangle(POSITION_INHERIT, { blf, blr, brr, brf }, { negY, negY, negY, negY }, texture));    // bottom negY
  this->add_child(new Rectangle(POSITION_INHERIT, { trr, brr, blr, tlr }, { negZ, negZ, negZ, negZ }, texture));    // rear   negZ
  this->add_child(new Rectangle(POSITION_INHERIT, { trf, brf, brr, trr }, { posX, posX, posX, posX }, texture));    // right  posX
  this->add_child(new Rectangle(POSITION_INHERIT, { tlr, blr, blf, tlf }, { negX, negX, negX, negX }, texture));    // left   negX
  this->add_child(new Rectangle(POSITION_INHERIT, { tlf, blf, brf, trf }, { posZ, posZ, posZ, posZ }, texture));    // front  posZ
  this->add_child(new Rectangle(POSITION_INHERIT, { tlr, tlf, trf, trr }, { posY, posY, posY, posY }, texture));    // top    posY

}    // Cube
Esempio n. 3
0
void TxIOPair::unserialize(const BinaryDataRef& val)
{
   BinaryRefReader brr(val);

   BitUnpacker<uint8_t> bitunpack(brr);
   isTxOutFromSelf_  = bitunpack.getBit();
   isFromCoinbase_   = bitunpack.getBit();
   bool isSpent      = bitunpack.getBit();
   isMultisig_       = bitunpack.getBit();
   isUTXO_           = bitunpack.getBit();
   isFromSameBlock_     = bitunpack.getBit();

   // We always include the 8-byte value
   amount_ = brr.get_uint64_t();

   setTxOut(val.getSliceRef(9, 8));
   if (val.getSize() == 25)
      setTxIn(val.getSliceRef(17, 8));

   //the key always carries the full txout ref
   /*if (!isSpent)
      setTxOut(key);
   else
   {
      //spent subssh, txout key      
      setTxOut(val.getSliceRef(9, 8));

      //when keyed by txins, the top bit in the tx index is always flipped
      BinaryData txinKey(key);
      txinKey.getPtr()[4] &= 0x7F;
      
      //last 8 bytes carry the txin key
      setTxIn(txinKey);
   }*/
}
Esempio n. 4
0
void StackInterpreter::processSW(BinaryDataRef outputScript)
{
   if (flags_ & SCRIPT_VERIFY_SEGWIT)
   {
      //set sig hash object to sw if it's missing
      sigHashDataObject_ = SHD_SW_;

      BinaryRefReader brr(outputScript);
      auto versionByte = brr.get_uint8_t();

      switch (versionByte)
      {
      case 0:
      {
         auto&& scriptSize = brr.get_uint8_t();
         auto&& scriptHash = brr.get_BinaryDataRef(scriptSize);

         if (brr.getSizeRemaining() > 0)
            throw ScriptException("invalid v0 SW ouput size");

         switch (scriptSize)
         {
         case 20:
         {
            //P2WPKH
            process_p2wpkh(scriptHash);
            break;
         }

         case 32:
         {
            //P2WSH
            process_p2wsh(scriptHash);
            break;
         }

         default:
            throw ScriptException("invalid data size for version 0 SW");
         }

         break;
      }

      default:
         throw ScriptException("unsupported SW versions");
      }
   }
   else
      throw ScriptException("not flagged for SW parsing");
}
Esempio n. 5
0
   void getFileAndPosForBlockHash(BlockHeader& blk)
   {
      BlockFilePosition filePos = { 0, 0 };

      //we dont have the file position for this header, let's find it
      class StopReading : public std::exception
      {
      };

      const BinaryData& thisHash = blk.getThisHash();

      const auto stopIfBlkHeaderRecognized =
         [&thisHash, &filePos](
         const BinaryData &blockheader,
         const BlockFilePosition &pos,
         uint32_t blksize
         )
      {
         filePos = pos;

         BlockHeader block;
         BinaryRefReader brr(blockheader);
         block.unserialize(brr);

         const HashString blockhash = block.getThisHash();
         if (blockhash == thisHash)
            throw StopReading();
      };

      try
      {
         //at this point, the last blkFile has been scanned for block, so skip it
         for (int32_t i = blkFiles_.size() - 2; i > -1; i--)
         {
            readHeadersFromFile(
               blkFiles_[i],
               0,
               stopIfBlkHeaderRecognized
               );
         }
      }
      catch (StopReading&)
      {
         // we're fine
      }

      blk.setBlockFileNum(filePos.first);
      blk.setBlockFileOffset(filePos.second);
   }
Esempio n. 6
0
bool TxIOPair::setTxOut(const BinaryData& dbKey8B)
{
   if (dbKey8B.getSize() == 8)
   {
      BinaryRefReader brr(dbKey8B);
      BinaryDataRef txKey6B = brr.get_BinaryDataRef(6);
      uint16_t      txOutIdx = brr.get_uint16_t(BIGENDIAN);
      return setTxOut(TxRef(txKey6B), (uint32_t)txOutIdx);
   }
   else
   {
      //pass 0 byte dbkey to reset the txout
      setTxOut(TxRef(), 0);
      return false;
   }
}
Esempio n. 7
0
Cube::Cube( const position_type &where, Texture::pointer_type texture )
	: Object( where, texture )
{
	glm::vec3
		blf( -1.0, -1.0, 1.0 )    // front rectangle
		, brf( 1.0, -1.0, 1.0 )
		, trf( 1.0, 1.0, 1.0 )
		, tlf( -1.0, 1.0, 1.0 )
		, blr( -1.0, -1.0, -1.0 )    // rear rectangle
		, brr( 1.0, -1.0, -1.0 )
		, trr( 1.0, 1.0, -1.0 )
		, tlr( -1.0, 1.0, -1.0 )
		;

	glm::vec3
		negZ( 0.0, 0.0, -1.0 )
		, posZ( 0.0, 0.0, 1.0 )
		, negY( 0.0, -1.0, 0.0 )
		, posY( 0.0, 1.0, 0.0 )
		, negX( -1.0, 0.0, 0.0 )
		, posX( 1.0, 0.0, 0.0 )
		;

	// gl_triangle strip
	// for ccw winding:  top left, bottom left, top right, bottom right

	auto add_components = [ &]( const std::vector<Component::pointer_type>& vec )
	{
		for ( auto& v : vec )
			this->components.emplace_back( v );
	};

	add_components( Triangle::from_quad( std::vector<glm::vec3>( { blf, blr, brf, brr } ), negY, texture ) );	// bottom
	add_components( Triangle::from_quad( std::vector<glm::vec3>( { trr, brr, tlr, blr } ), negZ, texture ) );	// rear
	add_components( Triangle::from_quad( std::vector<glm::vec3>( { trf, brf, trr, brr } ), posX, texture ) );    // right  posX
	add_components( Triangle::from_quad( std::vector<glm::vec3>( { tlr, blr, tlf, blf } ), negX, texture ) );    // left   negX
	add_components( Triangle::from_quad( std::vector<glm::vec3>( { tlf, blf, trf, brf } ), posZ, texture ) );    // front  posZ
	add_components( Triangle::from_quad( std::vector<glm::vec3>( { tlr, tlf, trr, trf } ), posY, texture ) );    // top    posY
}
Esempio n. 8
0
vector<BinaryDataRef> SigHashData::tokenize(
   const BinaryData& data, uint8_t token)
{
   vector<BinaryDataRef> tokens;

   BinaryRefReader brr(data.getRef());
   size_t start = 0;
   StackInterpreter ss;
   
   while (brr.getSizeRemaining())
   {
      auto offset = ss.seekToOpCode(brr, (OPCODETYPE)token);
      auto len = offset - start;

      BinaryDataRef bdr(data.getPtr() + start, len);
      tokens.push_back(move(bdr));

      start = brr.getPosition();
   }

   return tokens;
}
Esempio n. 9
0
void StackInterpreter::processOpCode(const OpCode& oc)
{
   ++opcount_;

   //handle push data by itself, doesn't play well with switch
   if (oc.opcode_ == 0)
   {
      op_0();
      return;
   }

   if (oc.opcode_ <= 75)
   {
      stack_.push_back(oc.dataRef_);
      return;
   }

   if (oc.opcode_ < 79)
   {
      //op push data
      stack_.push_back(oc.dataRef_);
      return;
   }

   if (oc.opcode_ == OP_1NEGATE)
   {
      op_1negate();
      return;
   }

   if (oc.opcode_ <= 96 && oc.opcode_ >= 81)
   {
      //op_1 - op_16
      uint8_t val = oc.opcode_ - 80;
      stack_.push_back(move(intToRawBinary(val)));
      return;
   }

   //If we got this far this op code is not push data. If this is the input
   //script, set the flag as per P2SH parsing rules (only push data in inputs)
   if (outputScriptRef_.getSize() == 0)
      onlyPushDataInInput_ = false;

   switch (oc.opcode_)
   {
   case OP_NOP:
      break;

   case OP_IF:
   {
      BinaryRefReader brr(oc.dataRef_);
      op_if(brr, false);
      break;
   }

   case OP_NOTIF:
   {
      op_not();
      BinaryRefReader brr(oc.dataRef_);
      op_if(brr, false);
      break;
   }

   case OP_ELSE:
      //processed by opening if statement
      throw ScriptException("a wild else appears");

   case OP_ENDIF:
      //processed by opening if statement
      throw ScriptException("a wild endif appears");

   case OP_VERIFY:
      op_verify();
      break;

   case OP_TOALTSTACK:
      op_toaltstack();
      break;

   case OP_FROMALTSTACK:
      op_fromaltstack();
      break;

   case OP_IFDUP:
      op_ifdup();
      break;

   case OP_2DROP:
   {
      stack_.pop_back();
      stack_.pop_back();
      break;
   }

   case OP_2DUP:
      op_2dup();
      break;

   case OP_3DUP:
      op_3dup();
      break;

   case OP_2OVER:
      op_2over();
      break;

   case OP_DEPTH:
      op_depth();
      break;

   case OP_DROP:
      stack_.pop_back();
      break;

   case OP_DUP:
      op_dup();
      break;

   case OP_NIP:
      op_nip();
      break;

   case OP_OVER:
      op_over();
      break;

   case OP_PICK:
      op_pick();
      break;

   case OP_ROLL:
      op_roll();
      break;

   case OP_ROT:
      op_rot();
      break;

   case OP_SWAP:
      op_swap();
      break;

   case OP_TUCK:
      op_tuck();
      break;

   case OP_SIZE:
      op_size();
      break;

   case OP_EQUAL:
   {
      op_equal();
      if (onlyPushDataInInput_ && p2shScript_.getSize() != 0)
      {
         //check the op_equal result
         op_verify();
         if (!isValid_)
            break;

         if (flags_ & SCRIPT_VERIFY_SEGWIT)
            if (p2shScript_.getSize() == 22 ||
               p2shScript_.getSize() == 34)
            {
               auto versionByte = p2shScript_.getPtr();
               if (*versionByte <= 16)
               {
                  processSW(p2shScript_);
                  return;
               }
            }

         processScript(p2shScript_, true);
      }
      break;
   }

   case OP_EQUALVERIFY:
   {
      op_equal();
      op_verify();
      break;
   }

   case OP_1ADD:
      op_1add();
      break;

   case OP_1SUB:
      op_1sub();
      break;

   case OP_NEGATE:
      op_negate();
      break;

   case OP_ABS:
      op_abs();
      break;

   case OP_NOT:
      op_not();
      break;

   case OP_0NOTEQUAL:
      op_0notequal();
      break;

   case OP_ADD:
      op_add();
      break;

   case OP_SUB:
      op_sub();
      break;

   case OP_BOOLAND:
      op_booland();
      break;

   case OP_BOOLOR:
      op_boolor();
      break;

   case OP_NUMEQUAL:
      op_numequal();
      break;

   case OP_NUMEQUALVERIFY:
   {
      op_numequal();
      op_verify();
      break;
   }

   case OP_NUMNOTEQUAL:
      op_numnotequal();
      break;

   case OP_LESSTHAN:
      op_lessthan();
      break;

   case OP_GREATERTHAN:
      op_greaterthan();
      break;

   case OP_LESSTHANOREQUAL:
      op_lessthanorequal();
      break;

   case OP_GREATERTHANOREQUAL:
      op_greaterthanorequal();
      break;

   case OP_MIN:
      op_min();
      break;

   case OP_MAX:
      op_max();
      break;

   case OP_WITHIN:
      op_within();
      break;

   case OP_RIPEMD160:
      op_ripemd160();
      break;

   case OP_SHA256:
   {
      //save the script if this output is a possible p2sh
      if (flags_ & SCRIPT_VERIFY_P2SH_SHA256)
         if (opcount_ == 1 && onlyPushDataInInput_)
            p2shScript_ = stack_back();

      op_sha256();
      break;
   }

   case OP_HASH160:
   {
      //save the script if this output is a possible p2sh
      if (flags_ & SCRIPT_VERIFY_P2SH)
         if (opcount_ == 1 && onlyPushDataInInput_)
            p2shScript_ = stack_back();

      op_hash160();
      break;
   }

   case OP_HASH256:
      op_hash256();
      break;

   case OP_CODESEPARATOR:
   {
      opcount_ = 0;
      if (outputScriptRef_.getSize() != 0)
         txStubPtr_->setLastOpCodeSeparator(inputIndex_, oc.offset_);
      break;
   }

   case OP_CHECKSIG:
      op_checksig();
      break;

   case OP_CHECKSIGVERIFY:
   {
      op_checksig();
      op_verify();
      break;
   }

   case OP_CHECKMULTISIG:
      op_checkmultisig();
      break;

   case OP_CHECKMULTISIGVERIFY:
   {
      op_checkmultisig();
      op_verify();
   }

   case OP_NOP1:
      break;

   case OP_NOP2:
   {
      if (!(flags_ & SCRIPT_VERIFY_CHECKLOCKTIMEVERIFY))
         break; // not enabled; treat as a NOP

      //CLTV mechanics
      throw ScriptException("OP_CLTV not supported");
   }

   case OP_NOP3:
   {
      if (!(flags_ & SCRIPT_VERIFY_CHECKSEQUENCEVERIFY))
         break; // not enabled; treat as a NOP

      //CSV mechanics
      throw ScriptException("OP_CSV not supported");
   }

   case OP_NOP4:
      break;

   case OP_NOP5:
      break;

   case OP_NOP6:
      break;

   case OP_NOP7:
      break;

   case OP_NOP8:
      break;

   case OP_NOP9:
      break;

   case OP_NOP10:
      break;

   default:
   {
      stringstream ss;
      ss << "unknown opcode: " << (unsigned)oc.opcode_;
      throw runtime_error(ss.str());
   }
   }
}
Esempio n. 10
0
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
//// StackInterpreter
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
void StackInterpreter::processScript(
   const BinaryDataRef& script, bool isOutputScript)
{
   BinaryRefReader brr(script);
   processScript(brr, isOutputScript);
}
Esempio n. 11
0
shared_ptr<StackItem> StackItem::deserialize(const BinaryDataRef& dataRef)
{
   shared_ptr<StackItem> itemPtr;

   BinaryRefReader brr(dataRef);

   auto id = brr.get_uint32_t();
   auto prefix = brr.get_uint8_t();

   switch (prefix)
   {
   case STACKITEM_PUSHDATA_PREFIX:
   {
      auto len = brr.get_var_int();
      auto&& data = brr.get_BinaryData(len);

      itemPtr = make_shared<StackItem_PushData>(id, move(data));
      break;
   }

   case STACKITEM_SIG_PREFIX:
   {
      auto len = brr.get_var_int();
      SecureBinaryData data(brr.get_BinaryData(len));

      itemPtr = make_shared<StackItem_Sig>(id, move(data));
      break;
   }

   case STACKITEM_MULTISIG_PREFIX:
   {
      auto m = brr.get_uint16_t();
      auto item_ms = make_shared<StackItem_MultiSig>(id, m);

      auto count = brr.get_var_int();
      for (unsigned i = 0; i < count; i++)
      {
         auto pos = brr.get_uint16_t();
         auto len = brr.get_var_int();
         SecureBinaryData data(brr.get_BinaryData(len));

         item_ms->setSig(pos, data);
      }

      itemPtr = item_ms;
      break;
   }

   case STACKITEM_OPCODE_PREFIX:
   {
      auto opcode = brr.get_uint8_t();

      itemPtr = make_shared<StackItem_OpCode>(id, opcode);
      break;
   }

   case STACKITEM_SERSCRIPT_PREFIX:
   {
      auto len = brr.get_var_int();
      auto&& data = brr.get_BinaryData(len);

      itemPtr = make_shared<StackItem_SerializedScript>(id, move(data));
      break;
   }

   default:
      throw runtime_error("unexpected stack item prefix");
   }

   return itemPtr;
}
void BlockchainScanner::undo(Blockchain::ReorganizationState& reorgState)
{
   //dont undo subssh, these are skipped by dupID when loading history

   BlockHeader* blockPtr = reorgState.prevTopBlock;
   map<uint32_t, BlockFileMapPointer> fileMaps_;

   map<DB_SELECT, set<BinaryData>> keysToDelete;
   map<BinaryData, StoredScriptHistory> sshMap;
   set<BinaryData> undoSpentness; //TODO: add spentness DB

   //TODO: sanity checks on header ptrs from reorgState
   if (reorgState.prevTopBlock->getBlockHeight() <=
       reorgState.reorgBranchPoint->getBlockHeight())
      throw runtime_error("invalid reorg state");

   while (blockPtr != reorgState.reorgBranchPoint)
   {
      auto currentHeight = blockPtr->getBlockHeight();
      auto currentDupId  = blockPtr->getDuplicateID();

      //create tx to pull subssh data
      LMDBEnv::Transaction sshTx;
      db_->beginDBTransaction(&sshTx, SUBSSH, LMDB::ReadOnly);

      //grab blocks from previous top until branch point
      if (blockPtr == nullptr)
         throw runtime_error("reorg failed while tracing back to "
         "branch point");

      auto filenum = blockPtr->getBlockFileNum();
      auto fileIter = fileMaps_.find(filenum);
      if (fileIter == fileMaps_.end())
      {
         fileIter = fileMaps_.insert(make_pair(
            filenum,
            move(blockDataLoader_.get(filenum, false)))).first;
      }

      auto& filemap = fileIter->second;

      BlockData bdata;
      bdata.deserialize(filemap.get()->getPtr() + blockPtr->getOffset(),
         blockPtr->getBlockSize(), blockPtr);

      auto& txns = bdata.getTxns();
      for (unsigned i = 0; i < txns.size(); i++)
      {
         auto& txn = txns[i];

         //undo tx outs added by this block
         for (unsigned y = 0; y < txn->txouts_.size(); y++)
         {
            auto& txout = txn->txouts_[y];

            BinaryRefReader brr(
               txn->data_ + txout.first, txout.second);
            brr.advance(8);
            unsigned scriptSize = (unsigned)brr.get_var_int();
            auto&& scrAddr = BtcUtils::getTxOutScrAddr(
               brr.get_BinaryDataRef(scriptSize));

            if (!scrAddrFilter_->hasScrAddress(scrAddr))
               continue;

            //update ssh value and txio count
            auto& ssh = sshMap[scrAddr];
            if (!ssh.isInitialized())
               db_->getStoredScriptHistorySummary(ssh, scrAddr);

            if (ssh.alreadyScannedUpToBlk_ < currentHeight)
               continue;
            
            brr.resetPosition();
            uint64_t value = brr.get_uint64_t();
            ssh.totalUnspent_ -= value;
            ssh.totalTxioCount_--;
            
            //mark stxo key for deletion
            auto&& txoutKey = DBUtils::getBlkDataKey(
               currentHeight, currentDupId,
               i, y);
            keysToDelete[STXO].insert(txoutKey);

            //decrement summary count at height, remove entry if necessary
            auto& sum = ssh.subsshSummary_[currentHeight];
            sum--;
            if (sum <= 0)
               ssh.subsshSummary_.erase(currentHeight);
         }

         //undo spends from this block
         for (unsigned y = 0; y < txn->txins_.size(); y++)
         {
            auto& txin = txn->txins_[y];

            BinaryDataRef outHash(
               txn->data_ + txin.first, 32);

            auto&& txKey = db_->getDBKeyForHash(outHash, currentDupId);
            if (txKey.getSize() != 6)
               continue;

            uint16_t txOutId = (uint16_t)READ_UINT32_LE(
               txn->data_ + txin.first + 32);
            txKey.append(WRITE_UINT16_BE(txOutId));

            StoredTxOut stxo;
            if (!db_->getStoredTxOut(stxo, txKey))
               continue;

            //update ssh value and txio count
            auto& scrAddr = stxo.getScrAddress();
            auto& ssh = sshMap[scrAddr];
            if (!ssh.isInitialized())
               db_->getStoredScriptHistorySummary(ssh, scrAddr);

            if (ssh.alreadyScannedUpToBlk_ < currentHeight)
               continue;

            ssh.totalUnspent_ += stxo.getValue();
            ssh.totalTxioCount_--;

            //mark txout key for undoing spentness
            undoSpentness.insert(txKey);

            //decrement summary count at height, remove entry if necessary
            auto& sum = ssh.subsshSummary_[currentHeight];
            sum--;
            if (sum <= 0)
               ssh.subsshSummary_.erase(currentHeight);
         }
      }

      //set blockPtr to prev block
      blockPtr = &blockchain_->getHeaderByHash(blockPtr->getPrevHashRef());
   }

   //at this point we have a map of updated ssh, as well as a 
   //set of keys to delete from the DB and spentness to undo by stxo key

   //stxo
   {
      LMDBEnv::Transaction tx;
      db_->beginDBTransaction(&tx, STXO, LMDB::ReadWrite);

      //grab stxos and revert spentness
      map<BinaryData, StoredTxOut> stxos;
      for (auto& stxoKey : undoSpentness)
      {
         auto& stxo = stxos[stxoKey];
         if (!db_->getStoredTxOut(stxo, stxoKey))
            continue;

         stxo.spentByTxInKey_.clear();
         stxo.spentness_ = TXOUT_UNSPENT;
      }

      //put updated stxos
      for (auto& stxo : stxos)
      {
         if (stxo.second.isInitialized())
            db_->putStoredTxOut(stxo.second);
      }

      //delete invalidated stxos
      auto& stxoKeysToDelete = keysToDelete[STXO];
      for (auto& key : stxoKeysToDelete)
         db_->deleteValue(STXO, key);
   }

   auto branchPointHeight = 
      reorgState.reorgBranchPoint->getBlockHeight();

   //ssh
   {
      LMDBEnv::Transaction tx;
      db_->beginDBTransaction(&tx, SSH, LMDB::ReadWrite);

      //go thourgh all ssh in scrAddrFilter
      auto& scrAddrMap = scrAddrFilter_->getScrAddrMap();
      for (auto& scrAddr : scrAddrMap)
      {
         auto& ssh = sshMap[scrAddr.first];
         
         //if the ssh isn't in our map, pull it from DB
         if (!ssh.isInitialized())
         {
            db_->getStoredScriptHistorySummary(ssh, scrAddr.first);
            if (ssh.uniqueKey_.getSize() == 0)
            {
               sshMap.erase(scrAddr.first);
               continue;
            }
         }

         //update alreadyScannedUpToBlk_ to branch point height
         if (ssh.alreadyScannedUpToBlk_ > branchPointHeight)
            ssh.alreadyScannedUpToBlk_ = branchPointHeight;
      }

      //write it all up
      for (auto& ssh : sshMap)
      {
         if (!scrAddrFilter_->hasScrAddress(ssh.second.uniqueKey_))
         {
            LOGWARN << "invalid scrAddr during undo";
            continue;
         }

         BinaryWriter bw;
         ssh.second.serializeDBValue(bw, ARMORY_DB_BARE, DB_PRUNE_NONE);
         db_->putValue(SSH,
            ssh.second.getDBKey().getRef(),
            bw.getDataRef());
      }

      //update SSH sdbi      
      StoredDBInfo sdbi;
      db_->getStoredDBInfo(SSH, sdbi);
      sdbi.topScannedBlkHash_ = reorgState.reorgBranchPoint->getThisHash();
      sdbi.topBlkHgt_ = branchPointHeight;
      db_->putStoredDBInfo(SSH, sdbi);
   }
}
void BlockchainScanner::scanBlockData(shared_ptr<BlockDataBatch> batch)
{
   //getBlock lambda
   auto getBlock = [&](unsigned height)->BlockData
   {
      auto iter = batch->blocks_.find(height);
      if (iter == batch->blocks_.end())
      {
         //TODO: encapsulate in try block to catch deser errors and signal pull thread
         //termination before exiting scope. cant have the scan thread hanging if this
         //one fails. Also update batch->end_ if we didn't go as far as that block height

         //grab block file map
         BlockHeader* blockheader = nullptr;
         blockheader = &blockchain_->getHeaderByHeight(height);

         auto filenum = blockheader->getBlockFileNum();
         auto mapIter = batch->fileMaps_.find(filenum);
         if (mapIter == batch->fileMaps_.end())
         {
            //we haven't grabbed that file map yet
            auto insertPair = batch->fileMaps_.insert(
               make_pair(filenum, move(blockDataLoader_.get(filenum, true))));

            mapIter = insertPair.first;
         }

         auto filemap = mapIter->second.get();

         //find block and deserialize it
         try
         {
            BlockData bdata;
            bdata.deserialize(
               filemap->getPtr() + blockheader->getOffset(),
               blockheader->getBlockSize(),
               blockheader, false);

            auto insertPair = batch->blocks_.insert(make_pair(height, move(bdata)));
            iter = insertPair.first;
         }
         catch (...)
         {
            LOGERR << "unknown block deser error during scan at height #" << height;
            batch->exceptionPtr_ = current_exception();
            return BlockData();
         }
      }

      return iter->second;
   };

   //parser lambda
   auto blockDataLoop = [&](function<void(const BlockData&)> callback)
   {
      auto currentBlock = batch->start_;

      while (currentBlock <= batch->end_)
      {
         BlockData&& bdata = getBlock(currentBlock);
         if (!bdata.isInitialized())
            return;

         callback(bdata);

         currentBlock += totalThreadCount_;
      }
   };

   //txout lambda
   auto txoutParser = [&](const BlockData& blockdata)->void
   {
      //TODO: flag isMultisig

      const BlockHeader* header = blockdata.header();

      //update processed height
      auto topHeight = header->getBlockHeight();
      batch->highestProcessedHeight_ = topHeight;

      auto& txns = blockdata.getTxns();
      for (unsigned i = 0; i < txns.size(); i++)
      {
         const BCTX& txn = *(txns[i].get());
         for (unsigned y = 0; y < txn.txouts_.size(); y++)
         {
            auto& txout = txn.txouts_[y];

            BinaryRefReader brr(
               txn.data_ + txout.first, txout.second);
            brr.advance(8);
            unsigned scriptSize = (unsigned)brr.get_var_int();
            auto&& scrAddr = BtcUtils::getTxOutScrAddr(
               brr.get_BinaryDataRef(scriptSize));

            if (!scrAddrFilter_->hasScrAddress(scrAddr))
               continue;

            //if we got this far, this txout is ours
            //get tx hash
            auto& txHash = txn.getHash();

            //construct StoredTxOut
            StoredTxOut stxo;
            stxo.dataCopy_ = BinaryData(
               txn.data_ + txout.first, txout.second);
            stxo.parentHash_ = txHash;
            stxo.blockHeight_ = header->getBlockHeight();
            stxo.duplicateID_ = header->getDuplicateID();
            stxo.txIndex_ = i;
            stxo.txOutIndex_ = y;
            stxo.scrAddr_ = scrAddr;
            stxo.spentness_ = TXOUT_UNSPENT;
            stxo.parentTxOutCount_ = txn.txouts_.size();
            stxo.isCoinbase_ = txn.isCoinbase_;
            auto value = stxo.getValue();

            auto&& hgtx = DBUtils::heightAndDupToHgtx(
               stxo.blockHeight_, stxo.duplicateID_);
            
            auto&& txioKey = DBUtils::getBlkDataKeyNoPrefix(
               stxo.blockHeight_, stxo.duplicateID_,
               i, y);

            //update utxos_
            auto& stxoHashMap = batch->utxos_[txHash];
            stxoHashMap.insert(make_pair(y, move(stxo)));

            //update ssh_
            auto& ssh = batch->ssh_[scrAddr];
            auto& subssh = ssh.subHistMap_[hgtx];
            
            //deal with txio count in subssh at serialization
            TxIOPair txio;
            txio.setValue(value);
            txio.setTxOut(txioKey);
            txio.setFromCoinbase(txn.isCoinbase_);
            subssh.txioMap_.insert(make_pair(txioKey, move(txio)));
         }
      }
   };

   //txin lambda
   auto txinParser = [&](const BlockData& blockdata)->void
   {
      const BlockHeader* header = blockdata.header();
      auto& txns = blockdata.getTxns();

      for (unsigned i = 0; i < txns.size(); i++)
      {
         const BCTX& txn = *(txns[i].get());

         for (unsigned y = 0; y < txn.txins_.size(); y++)
         {
            auto& txin = txn.txins_[y];
            BinaryDataRef outHash(
               txn.data_ + txin.first, 32);

            auto utxoIter = utxoMap_.find(outHash);
            if (utxoIter == utxoMap_.end())
               continue;

            unsigned txOutId = READ_UINT32_LE(
               txn.data_ + txin.first + 32);

            auto idIter = utxoIter->second.find(txOutId);
            if (idIter == utxoIter->second.end())
               continue;

            //if we got this far, this txins consumes one of our utxos

            //create spent txout
            auto&& hgtx = DBUtils::getBlkDataKeyNoPrefix(
               header->getBlockHeight(), header->getDuplicateID());

            auto&& txinkey = DBUtils::getBlkDataKeyNoPrefix(
               header->getBlockHeight(), header->getDuplicateID(),
               i, y);

            StoredTxOut stxo = idIter->second;
            stxo.spentness_ = TXOUT_SPENT;
            stxo.spentByTxInKey_ = txinkey;

            //if this tx's hash was never pulled, let's add it to the stxo's
            //parent hash, in order to keep track of this tx in the hint db
            if (txn.txHash_.getSize() == 0)
               stxo.parentHash_ = move(txn.getHash());

            //add to ssh_
            auto& ssh = batch->ssh_[stxo.getScrAddress()];
            auto& subssh = ssh.subHistMap_[hgtx];

            //deal with txio count in subssh at serialization
            TxIOPair txio;
            auto&& txoutkey = stxo.getDBKey(false);
            txio.setTxOut(txoutkey);
            txio.setTxIn(txinkey);
            txio.setValue(stxo.getValue());
            subssh.txioMap_[txoutkey] = move(txio);
            
            //add to spentTxOuts_
            batch->spentTxOuts_.push_back(move(stxo));
         }
      }
   };

   //txout loop
   blockDataLoop(txoutParser);

   //done with txouts, fill the future flag and wait on the mutex 
   //to move to txins processing
   batch->flagUtxoScanDone();
   unique_lock<mutex> txinLock(batch->parseTxinMutex_);

   //txins loop
   blockDataLoop(txinParser);
}
Esempio n. 14
0
   // find the location of the first block that is not in @p bc
   BlockFilePosition findFirstUnrecognizedBlockHeader(
      Blockchain &bc
   ) 
   {
      map<HashString, BlockHeader> &allHeaders = bc.allHeaders();
      
      size_t index=0;
      
      for (; index < blkFiles_.size(); index++)
      {
         const BinaryData hash = getFirstHash(blkFiles_[index]);

         if (allHeaders.find(hash) == allHeaders.end())
         { // not found in this file
            if (index == 0)
               return { 0, 0 };
            
            break;
         }
      }
      
      if (index == 0)
         return { 0, 0 };
      index--;
      
      // ok, now "index" is for the last blkfile that we found a header in
      // now lets linearly search this file until we find an unrecognized blk
      
      BlockFilePosition foundAtPosition{ 0, 0 };
            
      bool foundTopBlock = false;
      auto topBlockHash = bc.top().getThisHash();

      const auto stopIfBlkHeaderRecognized =
      [&allHeaders, &foundAtPosition, &foundTopBlock, &topBlockHash] (
         const BinaryData &blockheader,
         const BlockFilePosition &pos,
         uint32_t blksize
      )
      {
         // always set our position so that eventually it's at the end
         foundAtPosition = pos;
         
         BlockHeader block;
         BinaryRefReader brr(blockheader);
         block.unserialize(brr);
         
         const HashString blockhash = block.getThisHash();
         auto bhIter = allHeaders.find(blockhash);
         
         if(bhIter == allHeaders.end())
            throw StopReading();

         if (bhIter->second.getThisHash() == topBlockHash)
            foundTopBlock = true;

         bhIter->second.setBlockFileNum(pos.first);
         bhIter->second.setBlockFileOffset(pos.second);
      };
      
      uint64_t returnedOffset = UINT64_MAX;
      try
      {
         returnedOffset = readHeadersFromFile(
            blkFiles_[index],
            0,
            stopIfBlkHeaderRecognized
         );
         
      }
      catch (StopReading&)
      {
         // we're fine
      }

      // but we never find the genesis block, because
      // it always appears in Blockchain even if unloaded, and
      // we need to load it
      if (foundAtPosition.first == 0 && foundAtPosition.second==293)
         return { 0, 0 };
      if (returnedOffset != UINT64_MAX)
         foundAtPosition.second = returnedOffset;


      if (!foundTopBlock)
      {
         LOGWARN << "Couldn't find top block hash in last seen blk file."
            " Searching for it further down the chain";

         //Couldn't find the top header in the last seen blk file. Since Core
         //0.10, this can be an indicator of missing hashes. Let's find the
         //the top block header in file.
         BlockFilePosition topBlockPos(0, 0);
         auto checkBlkHash = [&topBlockHash, &topBlockPos]
            (const BinaryData &rawBlock,
            const BlockFilePosition &pos,
            uint32_t blksize)->void
         {
            BlockHeader bhUnser(rawBlock);
            if (bhUnser.getThisHash() == topBlockHash)
            {
               topBlockPos = pos;
               throw StopReading();
            }
         };

         bool _foundTopBlock = false;
         int32_t fnum = blkFiles_.size();
         if (fnum > 0)
            fnum--;
         try
         {
            for (; fnum > -1; fnum--)
               readHeadersFromFile(blkFiles_[fnum], 0, 
                                   checkBlkHash);
         }
         catch (StopReading&)
         {
            _foundTopBlock = true;
            // we're fine
         }

         if (!_foundTopBlock)
         {
            //can't find the top header, let's just rescan all headers
            LOGERR << "Failed to find last known top block hash in "
               "blk files. Rescanning all headers";
            
            return BlockFilePosition(0, 0);
         }

         //Check this file to see if we are missing any block hashes in there
         auto& f = blkFiles_[foundAtPosition.first];
         try
         {
            readHeadersFromFile(f, 0, stopIfBlkHeaderRecognized);
         }
         catch (StopReading&)
         {
            //so we are indeed missing some block headers. Let's just scan the 
            //blocks folder for headers
            foundAtPosition.first = 0;
            foundAtPosition.second = 0;

            LOGWARN << "Inconsistent headers DB, attempting repairs";
         }
      }

      return foundAtPosition;
   }