TEST_F(GPWriterTest, ReGenerateKeyName) { string url = "https://s3-us-west-2.amazonaws.com/s3test.pivotal.io/dataset1/normal"; S3Params p = InitConfig(url + " config=data/s3test.conf"); p.setAutoCompress(false); MockS3RESTfulService mockRESTfulService(p); MockGPWriter gpwriter(p, &mockRESTfulService); EXPECT_CALL(mockRESTfulService, head(_, _)).WillOnce(Return(200)).WillOnce(Return(404)); uint8_t xml[] = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" "<InitiateMultipartUploadResult" " xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">" "<Bucket>example-bucket</Bucket>" "<Key>example-object</Key>" "<UploadId>VXBsb2FkIElEIGZvciA2aWWpbmcncyBteS1tb3ZpZS5tMnRzIHVwbG9hZA</UploadId>" "</InitiateMultipartUploadResult>"; vector<uint8_t> raw(xml, xml + sizeof(xml) - 1); Response response(RESPONSE_OK, raw); EXPECT_CALL(mockRESTfulService, post(_, _, vector<uint8_t>())).WillOnce(Return(response)); gpwriter.open(p); // expect the restfulService->head() was called twice }
void CheckEssentialConfig(const S3Params& params) { if (params.getCred().accessID.empty()) { S3_CHECK_OR_DIE(false, S3ConfigError, "\"FATAL: access id not set\"", "accessid"); } if (params.getCred().secret.empty()) { S3_CHECK_OR_DIE(false, S3ConfigError, "\"FATAL: secret id not set\"", "secret"); } if (s3ext_segnum <= 0) { S3_CHECK_OR_DIE(false, S3ConfigError, "\"FATAL: segment info is invalid\"", "segment"); } }
S3Params S3BucketReader::constructReaderParams(BucketContent& key) { // encode the key name but leave the "/" // "/encoded_path/encoded_name" string keyEncoded = UriEncode(key.getName()); FindAndReplace(keyEncoded, "%2F", "/"); S3Params readerParams = this->params.setPrefix(keyEncoded); readerParams.setKeySize(key.getSize()); S3DEBUG("key: %s, size: %" PRIu64, readerParams.getS3Url().getFullUrlForCurl().c_str(), readerParams.getKeySize()); return readerParams; }
// invoked by s3_export(), need to be exception safe GPWriter* writer_init(const char* url_with_options, const char* format) { GPWriter* writer = NULL; s3extErrorMessage.clear(); try { if (!url_with_options) { return NULL; } string urlWithOptions(url_with_options); S3Params params = InitConfig(urlWithOptions); InitRemoteLog(); // Prepare memory to be used for thread chunk buffer. PrepareS3MemContext(params); string extName = params.isAutoCompress() ? string(format) + ".gz" : format; writer = new GPWriter(params, extName); if (writer == NULL) { return NULL; } memoryContextHolder = new S3MemoryContext(params.getMemoryContext()); writer->open(params); return writer; } catch (S3Exception& e) { if (writer != NULL) { delete writer; delete memoryContextHolder; } s3extErrorMessage = "writer_init caught a " + e.getType() + " exception: " + e.getFullMessage(); S3ERROR("writer_init caught %s: %s", e.getType().c_str(), s3extErrorMessage.c_str()); return NULL; } catch (...) { if (writer != NULL) { delete writer; delete memoryContextHolder; } S3ERROR("Caught an unexpected exception."); s3extErrorMessage = "Caught an unexpected exception."; return NULL; } }
void S3KeyReader::open(const S3Params& params) { S3_CHECK_OR_DIE(this->s3Interface != NULL, S3RuntimeError, "s3Interface must not be NULL"); this->sharedError = false; this->numOfChunks = params.getNumOfChunks(); S3_CHECK_OR_DIE(this->numOfChunks > 0, S3RuntimeError, "numOfChunks must not be zero"); this->offsetMgr.setKeySize(params.getKeySize()); this->offsetMgr.setChunkSize(params.getChunkSize()); S3_CHECK_OR_DIE(params.getChunkSize() > 0, S3RuntimeError, "chunk size must be greater than zero"); this->chunkBuffers.reserve(this->numOfChunks); for (uint64_t i = 0; i < this->numOfChunks; i++) { this->chunkBuffers.emplace_back(params.getS3Url(), *this, params.getMemoryContext()); } for (uint64_t i = 0; i < this->numOfChunks; i++) { this->chunkBuffers[i].setS3InterfaceService(this->s3Interface); pthread_t thread; pthread_create(&thread, NULL, DownloadThreadFunc, &this->chunkBuffers[i]); this->threads.push_back(thread); } }
S3RESTfulService::S3RESTfulService(const S3Params ¶ms) : s3MemContext(const_cast<S3MemoryContext &>(params.getMemoryContext())) { // This function is not thread safe, must NOT call it when any other // threads are running, that is, do NOT put it in threads. curl_global_init(CURL_GLOBAL_ALL); this->lowSpeedLimit = params.getLowSpeedLimit(); this->lowSpeedTime = params.getLowSpeedTime(); this->debugCurl = params.isDebugCurl(); this->chunkBufferSize = params.getChunkSize(); this->verifyCert = params.isVerifyCert(); }
void S3CommonReader::open(const S3Params ¶ms) { this->keyReader.setS3InterfaceService(s3InterfaceService); S3CompressionType compressionType = s3InterfaceService->checkCompressionType(params.getS3Url()); switch (compressionType) { case S3_COMPRESSION_GZIP: this->upstreamReader = &this->decompressReader; this->decompressReader.setReader(&this->keyReader); break; case S3_COMPRESSION_PLAIN: this->upstreamReader = &this->keyReader; break; default: S3_CHECK_OR_DIE(false, S3RuntimeError, "unknown file type"); }; this->upstreamReader->open(params); }
S3Params S3BucketReader::constructReaderParams(BucketContent& key) { S3Params readerParams = this->params; // encode the key name but leave the "/" // "/encoded_path/encoded_name" string keyEncoded = uri_encode(key.getName()); find_replace(keyEncoded, "%2F", "/"); readerParams.setKeyUrl(this->getKeyURL(keyEncoded)); readerParams.setRegion(this->region); readerParams.setKeySize(key.getSize()); S3DEBUG("key: %s, size: %" PRIu64, readerParams.getKeyUrl().c_str(), readerParams.getKeySize()); return readerParams; }