BlobMetadata LocalStore::WriteBatch::putBlob(const Hash& id, const Blob* blob) { const IOBuf& contents = blob->getContents(); BlobMetadata metadata{Hash::sha1(contents), contents.computeChainDataLength()}; SerializedBlobMetadata metadataBytes(metadata); auto hashSlice = id.getBytes(); // Add a git-style blob prefix auto prefix = folly::to<string>("blob ", contents.computeChainDataLength()); prefix.push_back('\0'); std::vector<ByteRange> bodySlices; bodySlices.emplace_back(StringPiece(prefix)); // Add all of the IOBuf chunks Cursor cursor(&contents); while (true) { auto bytes = cursor.peekBytes(); if (bytes.empty()) { break; } bodySlices.push_back(bytes); cursor.skip(bytes.size()); } put(LocalStore::KeySpace::BlobFamily, hashSlice, bodySlices); put(LocalStore::KeySpace::BlobMetaDataFamily, hashSlice, metadataBytes.slice()); return metadata; }
git_oid GitBackingStore::hash2Oid(const Hash& hash) { git_oid oid; static_assert( Hash::RAW_SIZE == GIT_OID_RAWSZ, "git hash size and eden hash size do not match"); memcpy(oid.id, hash.getBytes().data(), GIT_OID_RAWSZ); return oid; }
folly::Future<std::unique_ptr<Tree>> LocalStore::getTree(const Hash& id) const { return getFuture(KeySpace::TreeFamily, id.getBytes()) .thenValue([id](StoreResult&& data) { if (!data.isValid()) { return std::unique_ptr<Tree>(nullptr); } return deserializeGitTree(id, data.bytes()); }); }
folly::Future<std::unique_ptr<Blob>> LocalStore::getBlob(const Hash& id) const { return getFuture(KeySpace::BlobFamily, id.getBytes()) .thenValue([id](StoreResult&& data) { if (!data.isValid()) { return std::unique_ptr<Blob>(nullptr); } auto buf = data.extractIOBuf(); return deserializeGitBlob(id, &buf); }); }
folly::Future<optional<BlobMetadata>> LocalStore::getBlobMetadata( const Hash& id) const { return getFuture(KeySpace::BlobMetaDataFamily, id.getBytes()) .thenValue([id](StoreResult&& data) -> optional<BlobMetadata> { if (!data.isValid()) { return std::nullopt; } else { return SerializedBlobMetadata::parse(id, data); } }); }
int Config::synchronizeKeystore( std::string* keystore_file, std::string* private_key_file ) { // Getting all private keys for hostnames // Generating new keypairs for hostnames on flag create-private-key Json::Value pks; std::fstream f_pk( *private_key_file ); if ( !f_pk && !vm_.count("create-private-key") ) { std::cerr << "[E] Could not open the private key file. Please check " << "your permissions" << std::endl; return 1; } else { Json::Reader json_reader; if ( !json_reader.parse(f_pk, pks) ) { std::cerr << "[E] error parsing " << *private_key_file << std::endl; return 1; } f_pk.close(); } // Writing keypairs for hostnames on flag create-private-key if ( vm_.count("create-private-key") ) { f_pk.open( *private_key_file, std::fstream::out ); std::cout << "config: Generating keys for hosts" << std::endl; for (std::vector<host_t>::iterator i = hosts_.begin(); i != hosts_.end(); ++i) { if ( !pks.isMember(i->endpoint) ) { i->keypair = zmqpp::curve::generate_keypair(); i->uid = Hash(i->keypair.public_key).getString(); pks[i->endpoint]["public_key"] = i->keypair.public_key; pks[i->endpoint]["private_key"] = i->keypair.secret_key; } else { i->keypair.public_key = pks[i->endpoint].get("public_key", "").asString(); i->keypair.secret_key = pks[i->endpoint].get("private_key", "").asString(); i->uid = Hash(i->keypair.public_key).getString(); } } f_pk << pks << std::endl; std::cout << "config: Successfully written keys to " << *private_key_file << std::endl; f_pk.close(); return 1; } else { for (std::vector<host_t>::iterator i = hosts_.begin(); i != hosts_.end(); ++i) { i->keypair.public_key = pks[i->endpoint].get("public_key", "").asString(); i->keypair.secret_key = pks[i->endpoint].get("private_key", "").asString(); i->uid = Hash(i->keypair.public_key).getString(); } f_pk.close(); } // Reading the keystore std::ifstream if_ks( *keystore_file ); Json::Value ks; if ( !if_ks ) { std::cerr << "[E] Could not find the keystore. Please share all " << "public keys before starting flocksy" << std::endl; return 1; } else { Json::Reader json_reader; if ( !json_reader.parse(if_ks, ks) ) { std::cerr << "[E] error parsing " << *keystore_file << std::endl; return 1; } if_ks.close(); } for ( std::vector<node_t>::iterator i = this->nodes_vec_.begin(); i != this->nodes_vec_.end(); ++i ) { i->public_key = ks.get(i->endpoint, "").asString(); Hash* hash = new Hash(i->public_key); std::memcpy(i->uid, hash->getBytes(), F_GENERIC_HASH_LEN); nodes_.insert( std::make_pair(hash, *i) ); } return 0; }
StoreResult LocalStore::get(KeySpace keySpace, const Hash& id) const { return get(keySpace, id.getBytes()); }
void LocalStore::WriteBatch::put( LocalStore::KeySpace keySpace, const Hash& id, folly::ByteRange value) { put(keySpace, id.getBytes(), value); }
bool LocalStore::hasKey(KeySpace keySpace, const Hash& id) const { return hasKey(keySpace, id.getBytes()); }