bool VelocyPackHelper::velocyPackToFile(char const* filename, VPackSlice const& slice, bool syncFile) { std::string const tmp = std::string(filename) + ".tmp"; // remove a potentially existing temporary file if (TRI_ExistsFile(tmp.c_str())) { TRI_UnlinkFile(tmp.c_str()); } int fd = TRI_CREATE(tmp.c_str(), O_CREAT | O_TRUNC | O_EXCL | O_RDWR | TRI_O_CLOEXEC, S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP); if (fd < 0) { TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG(ERR) << "cannot create json file '" << tmp << "': " << TRI_LAST_ERROR_STR; return false; } if (!PrintVelocyPack(fd, slice, true)) { TRI_CLOSE(fd); TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG(ERR) << "cannot write to json file '" << tmp << "': " << TRI_LAST_ERROR_STR; TRI_UnlinkFile(tmp.c_str()); return false; } if (syncFile) { LOG(TRACE) << "syncing tmp file '" << tmp << "'"; if (!TRI_fsync(fd)) { TRI_CLOSE(fd); TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG(ERR) << "cannot sync saved json '" << tmp << "': " << TRI_LAST_ERROR_STR; TRI_UnlinkFile(tmp.c_str()); return false; } } int res = TRI_CLOSE(fd); if (res < 0) { TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG(ERR) << "cannot close saved file '" << tmp << "': " << TRI_LAST_ERROR_STR; TRI_UnlinkFile(tmp.c_str()); return false; } res = TRI_RenameFile(tmp.c_str(), filename); if (res != TRI_ERROR_NO_ERROR) { TRI_set_errno(res); LOG(ERR) << "cannot rename saved file '" << tmp << "' to '" << filename << "': " << TRI_LAST_ERROR_STR; TRI_UnlinkFile(tmp.c_str()); return false; } return true; }
void spit (string const& filename, StringBuffer const& content) { int fd = TRI_CREATE(filename.c_str(), O_WRONLY | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR | S_IRGRP); if (fd == -1) { LOG_TRACE("open failed for '%s' with %s", filename.c_str(), strerror(errno)); THROW_FILE_OPEN_ERROR("open", filename, "O_WRONLY | O_CREAT | O_TRUNC", errno); } char const* ptr = content.c_str(); size_t len = content.length(); while (0 < len) { ssize_t n = TRI_WRITE(fd, ptr, len); if (n < 1) { TRI_CLOSE(fd); LOG_TRACE("write failed for '%s' with %s and result %d on %d", filename.c_str(), strerror(errno), (int) n, fd); THROW_FILE_FUNC_ERROR("write", "", errno); } ptr += n; len -= n; } TRI_CLOSE(fd); return; }
string slurp (string const& filename) { int fd = TRI_OPEN(filename.c_str(), O_RDONLY); if (fd == -1) { TRI_set_errno(errno); THROW_FILE_OPEN_ERROR("open", filename, "O_RDONLY", errno); } char buffer[10240]; StringBuffer result(TRI_CORE_MEM_ZONE); while (true) { ssize_t n = TRI_READ(fd, buffer, sizeof(buffer)); if (n == 0) { break; } if (n < 0) { TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG_TRACE("read failed for '%s' with %s and result %d on fd %d", filename.c_str(), strerror(errno), (int) n, fd); TRI_CLOSE(fd); THROW_FILE_FUNC_ERROR("read", "", errno); } result.appendText(buffer, n); } TRI_CLOSE(fd); string r(result.c_str(), result.length()); return r; }
void slurp (string const& filename, StringBuffer& result) { int fd = TRI_OPEN(filename.c_str(), O_RDONLY); if (fd == -1) { THROW_FILE_OPEN_ERROR("open", filename, "O_RDONLY", errno); } // reserve space in the output buffer off_t fileSize = size(filename); if (fileSize > 0) { result.reserve((size_t) fileSize); } char buffer[10240]; while (true) { ssize_t n = TRI_READ(fd, buffer, sizeof(buffer)); if (n == 0) { break; } if (n < 0) { TRI_CLOSE(fd); LOG_TRACE("read failed for '%s' with %s and result %d on fd %d", filename.c_str(), strerror(errno), (int) n, fd); THROW_FILE_FUNC_ERROR("read", "", errno); } result.appendText(buffer, n); } TRI_CLOSE(fd); }
bool TRI_SaveJson (char const* filename, TRI_json_t const* object) { bool ok; char* tmp; int fd; int res; ssize_t m; tmp = TRI_Concatenate2String(filename, ".tmp"); if (tmp == NULL) { return false; } fd = TRI_CREATE(tmp, O_CREAT | O_EXCL | O_RDWR, S_IRUSR | S_IWUSR); if (fd < 0) { TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG_ERROR("cannot create json file '%s': '%s'", tmp, TRI_LAST_ERROR_STR); TRI_FreeString(TRI_CORE_MEM_ZONE, tmp); return false; } ok = TRI_PrintJson(fd, object); if (! ok) { TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG_ERROR("cannot write to json file '%s': '%s'", tmp, TRI_LAST_ERROR_STR); TRI_UnlinkFile(tmp); TRI_FreeString(TRI_CORE_MEM_ZONE, tmp); return false; } m = TRI_WRITE(fd, "\n", 1); if (m <= 0) { TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG_ERROR("cannot write to json file '%s': '%s'", tmp, TRI_LAST_ERROR_STR); TRI_UnlinkFile(tmp); TRI_FreeString(TRI_CORE_MEM_ZONE, tmp); return false; } ok = TRI_fsync(fd); if (! ok) { TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG_ERROR("cannot sync saved json '%s': '%s'", tmp, TRI_LAST_ERROR_STR); TRI_UnlinkFile(tmp); TRI_FreeString(TRI_CORE_MEM_ZONE, tmp); return false; } res = TRI_CLOSE(fd); if (res < 0) { TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG_ERROR("cannot close saved file '%s': '%s'", tmp, TRI_LAST_ERROR_STR); TRI_UnlinkFile(tmp); TRI_FreeString(TRI_CORE_MEM_ZONE, tmp); return false; } res = TRI_RenameFile(tmp, filename); if (res != TRI_ERROR_NO_ERROR) { LOG_ERROR("cannot rename saved file '%s' to '%s': '%s'", tmp, filename, TRI_LAST_ERROR_STR); TRI_UnlinkFile(tmp); TRI_FreeString(TRI_CORE_MEM_ZONE, tmp); return res; } TRI_FreeString(TRI_CORE_MEM_ZONE, tmp); return ok; }
bool TRI_SaveJson (char const* filename, TRI_json_t const* object, bool syncFile) { char* tmp; int fd; int res; tmp = TRI_Concatenate2String(filename, ".tmp"); if (tmp == NULL) { return false; } // remove a potentially existing temporary file if (TRI_ExistsFile(tmp)) { TRI_UnlinkFile(tmp); } fd = TRI_CREATE(tmp, O_CREAT | O_TRUNC | O_EXCL | O_RDWR, S_IRUSR | S_IWUSR); if (fd < 0) { TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG_ERROR("cannot create json file '%s': %s", tmp, TRI_LAST_ERROR_STR); TRI_FreeString(TRI_CORE_MEM_ZONE, tmp); return false; } if (! TRI_PrintJson(fd, object, true)) { TRI_CLOSE(fd); TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG_ERROR("cannot write to json file '%s': %s", tmp, TRI_LAST_ERROR_STR); TRI_UnlinkFile(tmp); TRI_FreeString(TRI_CORE_MEM_ZONE, tmp); return false; } if (syncFile) { LOG_TRACE("syncing tmp file '%s'", tmp); if (! TRI_fsync(fd)) { TRI_CLOSE(fd); TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG_ERROR("cannot sync saved json '%s': %s", tmp, TRI_LAST_ERROR_STR); TRI_UnlinkFile(tmp); TRI_FreeString(TRI_CORE_MEM_ZONE, tmp); return false; } } res = TRI_CLOSE(fd); if (res < 0) { TRI_set_errno(TRI_ERROR_SYS_ERROR); LOG_ERROR("cannot close saved file '%s': %s", tmp, TRI_LAST_ERROR_STR); TRI_UnlinkFile(tmp); TRI_FreeString(TRI_CORE_MEM_ZONE, tmp); return false; } res = TRI_RenameFile(tmp, filename); if (res != TRI_ERROR_NO_ERROR) { TRI_set_errno(res); LOG_ERROR("cannot rename saved file '%s' to '%s': %s", tmp, filename, TRI_LAST_ERROR_STR); TRI_UnlinkFile(tmp); TRI_FreeString(TRI_CORE_MEM_ZONE, tmp); return false; } TRI_FreeString(TRI_CORE_MEM_ZONE, tmp); return true; }
bool ImportHelper::importDelimited (string const& collectionName, string const& fileName, DelimitedImportType typeImport) { _collectionName = collectionName; _firstLine = ""; _numberLines = 0; _numberOk = 0; _numberError = 0; _outputBuffer.clear(); _lineBuffer.clear(); _errorMessage = ""; _hasError = false; // read and convert int fd; int64_t totalLength; if (fileName == "-") { // we don't have a filesize totalLength = 0; fd = STDIN_FILENO; } else { // read filesize totalLength = TRI_SizeFile(fileName.c_str()); fd = TRI_OPEN(fileName.c_str(), O_RDONLY); if (fd < 0) { _errorMessage = TRI_LAST_ERROR_STR; return false; } } // progress display control variables int64_t totalRead = 0; double nextProgress = ProgressStep; size_t separatorLength; char* separator = TRI_UnescapeUtf8StringZ(TRI_UNKNOWN_MEM_ZONE, _separator.c_str(), _separator.size(), &separatorLength); if (separator == 0) { if (fd != STDIN_FILENO) { TRI_CLOSE(fd); } _errorMessage = "out of memory"; return false; } TRI_csv_parser_t parser; TRI_InitCsvParser(&parser, TRI_UNKNOWN_MEM_ZONE, ProcessCsvBegin, ProcessCsvAdd, ProcessCsvEnd); TRI_SetSeparatorCsvParser(&parser, separator[0]); // in csv, we'll use the quote char if set // in tsv, we do not use the quote char if (typeImport == ImportHelper::CSV && _quote.size() > 0) { TRI_SetQuoteCsvParser(&parser, _quote[0], true); } else { TRI_SetQuoteCsvParser(&parser, '\0', false); } parser._dataAdd = this; _rowOffset = 0; _rowsRead = 0; char buffer[32768]; while (! _hasError) { ssize_t n = TRI_READ(fd, buffer, sizeof(buffer)); if (n < 0) { TRI_Free(TRI_UNKNOWN_MEM_ZONE, separator); TRI_DestroyCsvParser(&parser); if (fd != STDIN_FILENO) { TRI_CLOSE(fd); } _errorMessage = TRI_LAST_ERROR_STR; return false; } else if (n == 0) { break; } totalRead += (int64_t) n; reportProgress(totalLength, totalRead, nextProgress); TRI_ParseCsvString2(&parser, buffer, n); } if (_outputBuffer.length() > 0) { sendCsvBuffer(); } TRI_DestroyCsvParser(&parser); TRI_Free(TRI_UNKNOWN_MEM_ZONE, separator); if (fd != STDIN_FILENO) { TRI_CLOSE(fd); } _outputBuffer.clear(); return !_hasError; }
bool ImportHelper::importJson (const string& collectionName, const string& fileName) { _collectionName = collectionName; _firstLine = ""; _numberLines = 0; _numberOk = 0; _numberError = 0; _outputBuffer.clear(); _errorMessage = ""; _hasError = false; // read and convert int fd; int64_t totalLength; if (fileName == "-") { // we don't have a filesize totalLength = 0; fd = STDIN_FILENO; } else { // read filesize totalLength = TRI_SizeFile(fileName.c_str()); fd = TRI_OPEN(fileName.c_str(), O_RDONLY); if (fd < 0) { _errorMessage = TRI_LAST_ERROR_STR; return false; } } bool isArray = false; bool checkedFront = false; // progress display control variables int64_t totalRead = 0; double nextProgress = ProgressStep; static const int BUFFER_SIZE = 32768; while (! _hasError) { // reserve enough room to read more data if (_outputBuffer.reserve(BUFFER_SIZE) == TRI_ERROR_OUT_OF_MEMORY) { _errorMessage = TRI_errno_string(TRI_ERROR_OUT_OF_MEMORY); if (fd != STDIN_FILENO) { TRI_CLOSE(fd); } return false; } // read directly into string buffer ssize_t n = TRI_READ(fd, _outputBuffer.end(), BUFFER_SIZE - 1); if (n < 0) { _errorMessage = TRI_LAST_ERROR_STR; if (fd != STDIN_FILENO) { TRI_CLOSE(fd); } return false; } else if (n == 0) { // we're done break; } // adjust size of the buffer by the size of the chunk we just read _outputBuffer.increaseLength(n); if (! checkedFront) { // detect the import file format (single lines with individual JSON objects // or a JSON array with all documents) char const* p = _outputBuffer.begin(); char const* e = _outputBuffer.end(); while (p < e && (*p == ' ' || *p == '\r' || *p == '\n' || *p == '\t' || *p == '\f' || *p == '\b')) { ++p; } isArray = (*p == '['); checkedFront = true; } totalRead += (int64_t) n; reportProgress(totalLength, totalRead, nextProgress); if (_outputBuffer.length() > _maxUploadSize) { if (isArray) { if (fd != STDIN_FILENO) { TRI_CLOSE(fd); } _errorMessage = "import file is too big. please increase the value of --batch-size (currently " + StringUtils::itoa(_maxUploadSize) + ")"; return false; } // send all data before last '\n' char const* first = _outputBuffer.c_str(); char* pos = (char*) memrchr(first, '\n', _outputBuffer.length()); if (pos != 0) { size_t len = pos - first + 1; sendJsonBuffer(first, len, isArray); _outputBuffer.erase_front(len); } } } if (_outputBuffer.length() > 0) { sendJsonBuffer(_outputBuffer.c_str(), _outputBuffer.length(), isArray); } _numberLines = _numberError + _numberOk; if (fd != STDIN_FILENO) { TRI_CLOSE(fd); } _outputBuffer.clear(); return ! _hasError; }
bool ImportHelper::importJson (const string& collectionName, const string& fileName) { _collectionName = collectionName; _firstLine = ""; _numberLines = 0; _numberOk = 0; _numberError = 0; _outputBuffer.clear(); _errorMessage = ""; _hasError = false; // read and convert int fd; int64_t totalLength; if (fileName == "-") { // we don't have a filesize totalLength = 0; fd = STDIN_FILENO; } else { // read filesize totalLength = TRI_SizeFile(fileName.c_str()); fd = TRI_OPEN(fileName.c_str(), O_RDONLY); if (fd < 0) { _errorMessage = TRI_LAST_ERROR_STR; return false; } } char buffer[32768]; bool isArray = false; bool checkedFront = false; // progress display control variables int64_t totalRead = 0; double nextProgress = ProgressStep; while (! _hasError) { ssize_t n = TRI_READ(fd, buffer, sizeof(buffer)); if (n < 0) { _errorMessage = TRI_LAST_ERROR_STR; if (fd != STDIN_FILENO) { TRI_CLOSE(fd); } return false; } else if (n == 0) { // we're done break; } if (! checkedFront) { // detect the import file format (single lines with individual JSON objects // or a JSON array with all documents) const string firstChar = StringUtils::lTrim(string(buffer, n), "\r\n\t\f\b ").substr(0, 1); isArray = (firstChar == "["); checkedFront = true; } _outputBuffer.appendText(buffer, n); totalRead += (int64_t) n; reportProgress(totalLength, totalRead, nextProgress); if (_outputBuffer.length() > _maxUploadSize) { if (isArray) { if (fd != STDIN_FILENO) { TRI_CLOSE(fd); } _errorMessage = "import file is too big."; return false; } // send all data before last '\n' const char* first = _outputBuffer.c_str(); char* pos = (char*) memrchr(first, '\n', _outputBuffer.length()); if (pos != 0) { size_t len = pos - first + 1; sendJsonBuffer(first, len, isArray); _outputBuffer.erase_front(len); } } } if (_outputBuffer.length() > 0) { sendJsonBuffer(_outputBuffer.c_str(), _outputBuffer.length(), isArray); } _numberLines = _numberError + _numberOk; if (fd != STDIN_FILENO) { TRI_CLOSE(fd); } _outputBuffer.clear(); return ! _hasError; }
~RandomDeviceCombined () { TRI_CLOSE(fd); }
~RandomDeviceDirect () { TRI_CLOSE(fd); }
~RandomDeviceCombined () { if (fd >= 0) { TRI_CLOSE(fd); } }
~RandomDeviceDirect () { if (fd >= 0) { TRI_CLOSE(fd); } }