void send_history_result(const std::error_code& ec, const blockchain::history_list& history, const incoming_message& request, queue_send_callback queue_send) { constexpr size_t row_size = 36 + 4 + 8 + 36 + 4; data_chunk result(4 + row_size * history.size()); auto serial = make_serializer(result.begin()); write_error_code(serial, ec); for (const blockchain::history_row row: history) { serial.write_hash(row.output.hash); serial.write_4_bytes(row.output.index); serial.write_4_bytes(row.output_height); serial.write_8_bytes(row.value); serial.write_hash(row.spend.hash); serial.write_4_bytes(row.spend.index); serial.write_4_bytes(row.spend_height); } BITCOIN_ASSERT(serial.iterator() == result.end()); // TODO: Slows down queries! //log_debug(LOG_WORKER) // << "*.fetch_history() finished. Sending response."; outgoing_message response(request, result); queue_send(response); }
void history_fetched(const std::error_code& ec, const blockchain::history_list& history) { if (ec) { log_error() << "Failed to fetch history: " << ec.message(); return; } #define LOG_RESULT "result" uint64_t total_recv = 0, balance = 0; for (const auto& row: history) { uint64_t value = row.value; BITCOIN_ASSERT(value >= 0); total_recv += value; if (row.spend.hash == null_hash) balance += value; } log_debug(LOG_RESULT) << "Queried " << history.size() << " outpoints, values and their spends."; log_debug(LOG_RESULT) << "Total received: " << total_recv; log_debug(LOG_RESULT) << "Balance: " << balance; log_info(LOG_RESULT) << "History fetched"; }