void XMLRowOutputStream::writeRowsBeforeLimitAtLeast() { if (applied_limit) { writeCString("\t<rows_before_limit_at_least>", *ostr); writeIntText(rows_before_limit, *ostr); writeCString("</rows_before_limit_at_least>\n", *ostr); } }
void JSONRowOutputStream::writeRowsBeforeLimitAtLeast() { if (applied_limit) { writeCString(",\n\n", *ostr); writeCString("\t\"rows_before_limit_at_least\": ", *ostr); writeIntText(rows_before_limit, *ostr); } }
void PrettySpaceBlockOutputStream::writeSuffix() { if (total_rows >= max_rows) { writeCString("\nShowed first ", ostr); writeIntText(max_rows, ostr); writeCString(".\n", ostr); } total_rows = 0; writeTotals(); writeExtremes(); }
void PrettyCompactMonoBlockOutputStream::writeSuffix() { if (blocks.empty()) return; Widths_t max_widths; Widths_t name_widths; for (size_t i = 0; i < blocks.size(); ++i) calculateWidths(blocks[i], max_widths, name_widths); writeHeader(blocks.front(), max_widths, name_widths); size_t row_count = 0; for (size_t block_id = 0; block_id < blocks.size() && row_count < max_rows; ++block_id) { const Block & block = blocks[block_id]; size_t rows = block.rows(); for (size_t i = 0; i < rows && row_count < max_rows; ++i) { writeRow(i, block, max_widths, name_widths); ++row_count; } } writeBottom(max_widths); if (total_rows >= max_rows) { writeCString(" Showed first ", ostr); writeIntText(max_rows, ostr); writeCString(".\n", ostr); } total_rows = 0; if (totals) { writeCString("\nTotals:\n", ostr); PrettyCompactBlockOutputStream::write(totals); } if (extremes) { writeCString("\nExtremes:\n", ostr); PrettyCompactBlockOutputStream::write(extremes); } }
void VerticalRowOutputStream::writeRowStartDelimiter() { ++row_number; if (row_number > format_settings.pretty.max_rows) return; writeCString("Row ", ostr); writeIntText(row_number, ostr); writeCString(":\n", ostr); size_t width = log10(row_number + 1) + 1 + strlen("Row :"); for (size_t i = 0; i < width; ++i) writeCString("─", ostr); writeChar('\n', ostr); }
void VerticalRowOutputStream::writeSuffix() { if (row_number > format_settings.pretty.max_rows) { writeCString("Showed first ", ostr); writeIntText(format_settings.pretty.max_rows, ostr); writeCString(".\n", ostr); } if (totals || extremes) { writeCString("\n", ostr); writeTotals(); writeExtremes(); } }
void XMLRowOutputStream::writeSuffix() { writeCString("\t</data>\n", *ostr); writeTotals(); writeExtremes(); writeCString("\t<rows>", *ostr); writeIntText(row_count, *ostr); writeCString("</rows>\n", *ostr); writeRowsBeforeLimitAtLeast(); writeCString("</result>\n", *ostr); ostr->next(); }
void JSONRowOutputStream::writeSuffix() { writeChar('\n', *ostr); writeCString("\t]", *ostr); writeTotals(); writeExtremes(); writeCString(",\n\n", *ostr); writeCString("\t\"rows\": ", *ostr); writeIntText(row_count, *ostr); writeRowsBeforeLimitAtLeast(); if (write_statistics) writeStatistics(); writeChar('\n', *ostr); writeCString("}\n", *ostr); ostr->next(); }
void FileChecker::save() const { { WriteBufferFromFile out(tmp_files_info_path); /// Столь сложная структура JSON-а - для совместимости со старым форматом. writeCString("{\"yandex\":{", out); for (auto it = map.begin(); it != map.end(); ++it) { if (it != map.begin()) writeString(",", out); /// escapeForFileName на самом деле не нужен. Но он оставлен для совместимости со старым кодом. writeJSONString(escapeForFileName(it->first), out); writeString(":{\"size\":\"", out); writeIntText(it->second, out); writeString("\"}", out); } writeCString("}}", out); out.next(); } Poco::File current_file(files_info_path); if (current_file.exists()) { std::string old_file_name = files_info_path + ".old"; current_file.renameTo(old_file_name); Poco::File(tmp_files_info_path).renameTo(files_info_path); Poco::File(old_file_name).remove(); } else Poco::File(tmp_files_info_path).renameTo(files_info_path); }
void MergedBlockOutputStream::writeSuffixAndFinalizePart( MergeTreeData::MutableDataPartPtr & new_part, const NamesAndTypesList * total_column_list, MergeTreeData::DataPart::Checksums * additional_column_checksums) { if (!total_column_list) total_column_list = &columns_list; /// Finish write and get checksums. MergeTreeData::DataPart::Checksums checksums; if (additional_column_checksums) checksums = std::move(*additional_column_checksums); if (index_stream) { index_stream->next(); checksums.files["primary.idx"].file_size = index_stream->count(); checksums.files["primary.idx"].file_hash = index_stream->getHash(); index_stream = nullptr; } for (ColumnStreams::iterator it = column_streams.begin(); it != column_streams.end(); ++it) { it->second->finalize(); it->second->addToChecksums(checksums); } column_streams.clear(); if (storage.format_version >= MERGE_TREE_DATA_MIN_FORMAT_VERSION_WITH_CUSTOM_PARTITIONING) { new_part->partition.store(storage, part_path, checksums); if (new_part->minmax_idx.initialized) new_part->minmax_idx.store(storage, part_path, checksums); WriteBufferFromFile count_out(part_path + "count.txt", 4096); HashingWriteBuffer count_out_hashing(count_out); writeIntText(rows_count, count_out_hashing); count_out_hashing.next(); checksums.files["count.txt"].file_size = count_out_hashing.count(); checksums.files["count.txt"].file_hash = count_out_hashing.getHash(); } { /// Write a file with a description of columns. WriteBufferFromFile out(part_path + "columns.txt", 4096); total_column_list->writeText(out); } { /// Write file with checksums. WriteBufferFromFile out(part_path + "checksums.txt", 4096); checksums.write(out); } new_part->rows_count = rows_count; new_part->marks_count = marks_count; new_part->modification_time = time(nullptr); new_part->columns = *total_column_list; new_part->index.assign(std::make_move_iterator(index_columns.begin()), std::make_move_iterator(index_columns.end())); new_part->checksums = checksums; new_part->bytes_on_disk = MergeTreeData::DataPart::calculateTotalSizeOnDisk(new_part->getFullPath()); }