void encode_messages(confluent::codec& codec, const std::vector<std::pair<sample::contact_info_key, sample::contact_info>>& src, int32_t key_id, int32_t value_id, std::vector<std::shared_ptr<csi::kafka::basic_message>>& dst) { dst.reserve(src.size()); for (std::vector<std::pair<sample::contact_info_key, sample::contact_info>>::const_iterator i = src.begin(); i != src.end(); ++i) { std::shared_ptr<csi::kafka::basic_message> msg(new csi::kafka::basic_message()); //encode key { auto os = codec.encode_nonblock(key_id, i->first); size_t sz = os->byteCount(); auto is = avro::memoryInputStream(*os); avro::StreamReader stream_reader(*is); msg->key.set_null(false); msg->key.resize(sz); stream_reader.readBytes(msg->key.data(), sz); } //encode value { auto os = codec.encode_nonblock(value_id, i->second); size_t sz = os->byteCount(); auto is = avro::memoryInputStream(*os); avro::StreamReader stream_reader(*is); msg->value.set_null(false); msg->value.resize(sz); stream_reader.readBytes(msg->value.data(), sz); } dst.push_back(msg); } }
void BitVector::clearAll() { if (isInline()) m_bitsOrPointer = makeInlineBits(0); else memset(outOfLineBits()->bits(), 0, byteCount(size())); }
void Decoder::readBoolsAndStrictEnums(size_t skipBits) { auto& s = top(); size_t nbytes = byteCount(skipBits + s.bools.count + s.totalStrictEnumBits); if (s.bools.count + s.totalStrictEnumBits) { s.boolStartBit = skipBits; s.bools.values.resize(nbytes); cursor_.pull(&s.bools.values.front(), nbytes); // Strict enums are only supported in structs for now DCHECK(s.totalStrictEnumBits == 0 || s.state == IN_STRUCT); // Extract strict enums size_t bit = skipBits + s.bools.count; for (size_t i = 0; i < s.strictEnums.count; ++i) { auto dt = s.str.strictEnumTags[i].second.dataType; auto bitCount = dt->enumBits(); auto index = getBits(&s.bools.values.front(), bit, bitCount); bit += bitCount; s.strictEnums.values.push_back(*(dt->enumValues.begin() + index)); } DCHECK_EQ(bit, skipBits + s.bools.count + s.totalStrictEnumBits); } else { cursor_.skip(nbytes); } s.bytesRead += nbytes; }
size_t ProgressTask::loadSize() { // For very fast network, speed up even before first doTestProgress: if (++no_started_loads == load_size_check && current_duration<=0) { double time = elapsed(); double speed = addOverheadMbps(byteCount(), time); doTestProgress(speed, time, currentNoConnections()); } if (speedlimit_mbps > 0.0) { double time = elapsed(); double speed = addOverheadMbps(byteCount(), time); if (speed > speedlimit_mbps) { log() << "going too fast, will pause"; return 0; } } return current_load_size; }
// public uint8_t IPAddressV4::getNthMSByte(size_t byteIndex) const { const auto highestIndex = byteCount() - 1; if (byteIndex > highestIndex) { throw std::invalid_argument(to<string>("Byte index must be <= ", to<string>(highestIndex), " for addresses of type :", detail::familyNameStr(AF_INET))); } return bytes()[byteIndex]; }
uint8_t IPAddress::getNthMSByte(size_t byteIndex) const { const auto highestIndex = byteCount() - 1; if (byteIndex > highestIndex) { throw std::invalid_argument(sformat( "Byte index must be <= {} for addresses of type: {}", highestIndex, detail::familyNameStr(family()))); } if (isV4()) { return asV4().bytes()[byteIndex]; } return asV6().bytes()[byteIndex]; }
void BitVector::setSlow(const BitVector& other) { uintptr_t newBitsOrPointer; if (other.isInline()) { newBitsOrPointer = other.m_bitsOrPointer; } else { OutOfLineBits* newOutOfLineBits = OutOfLineBits::create(other.size()); memcpy(newOutOfLineBits->bits(), other.bits(), byteCount(other.size())); newBitsOrPointer = bitwiseCast<uintptr_t>(newOutOfLineBits) >> 1; } if (!isInline()) OutOfLineBits::destroy(outOfLineBits()); m_bitsOrPointer = newBitsOrPointer; }
void Basic2DWindowOpenGLDisplayPlugin::customizeContext() { #if defined(Q_OS_ANDROID) qreal dpi = getFullscreenTarget()->physicalDotsPerInch(); _virtualPadPixelSize = dpi * VirtualPad::Manager::BASE_DIAMETER_PIXELS / VirtualPad::Manager::DPI; if (!_virtualPadStickTexture) { auto iconPath = PathUtils::resourcesPath() + "images/analog_stick.png"; auto image = QImage(iconPath); if (image.format() != QImage::Format_ARGB32) { image = image.convertToFormat(QImage::Format_ARGB32); } if ((image.width() > 0) && (image.height() > 0)) { image = image.scaled(_virtualPadPixelSize, _virtualPadPixelSize, Qt::KeepAspectRatio); _virtualPadStickTexture = gpu::Texture::createStrict( gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA), image.width(), image.height(), gpu::Texture::MAX_NUM_MIPS, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)); _virtualPadStickTexture->setSource("virtualPad stick"); auto usage = gpu::Texture::Usage::Builder().withColor().withAlpha(); _virtualPadStickTexture->setUsage(usage.build()); _virtualPadStickTexture->setStoredMipFormat(gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA)); _virtualPadStickTexture->assignStoredMip(0, image.byteCount(), image.constBits()); _virtualPadStickTexture->setAutoGenerateMips(true); } } if (!_virtualPadStickBaseTexture) { auto iconPath = PathUtils::resourcesPath() + "images/analog_stick_base.png"; auto image = QImage(iconPath); if (image.format() != QImage::Format_ARGB32) { image = image.convertToFormat(QImage::Format_ARGB32); } if ((image.width() > 0) && (image.height() > 0)) { image = image.scaled(_virtualPadPixelSize, _virtualPadPixelSize, Qt::KeepAspectRatio); _virtualPadStickBaseTexture = gpu::Texture::createStrict( gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA), image.width(), image.height(), gpu::Texture::MAX_NUM_MIPS, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)); _virtualPadStickBaseTexture->setSource("virtualPad base"); auto usage = gpu::Texture::Usage::Builder().withColor().withAlpha(); _virtualPadStickBaseTexture->setUsage(usage.build()); _virtualPadStickBaseTexture->setStoredMipFormat(gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA)); _virtualPadStickBaseTexture->assignStoredMip(0, image.byteCount(), image.constBits()); _virtualPadStickBaseTexture->setAutoGenerateMips(true); } } if (_virtualPadButtons.size() == 0) { _virtualPadButtons.append(VirtualPadButton( dpi * VirtualPad::Manager::BTN_FULL_PIXELS / VirtualPad::Manager::DPI, PathUtils::resourcesPath() + "images/fly.png", VirtualPad::Manager::Button::JUMP)); _virtualPadButtons.append(VirtualPadButton( dpi * VirtualPad::Manager::BTN_FULL_PIXELS / VirtualPad::Manager::DPI, PathUtils::resourcesPath() + "images/handshake.png", VirtualPad::Manager::Button::HANDSHAKE)); } #endif Parent::customizeContext(); }
QString ArrayPin::toString() const { return( QString( "<p>Type: %1</p><p>Count: %2</p><p>Size: %3</p><p>Total Size: %4</p>" ).arg( QString( QMetaType::typeName( type() ) ) ).arg( count() ).arg( elementCount() ).arg( byteCount() ) ); }
void Decoder::read() { auto& s = top(); size_t totalVarIntCount = 0; size_t optionalFieldBits = 0; if (s.state == IN_STRUCT) { size_t nbits = s.dataType->optionalFields.size(); optionalFieldBits = nbits; size_t nbytes = byteCount(nbits); cursor_.gather(nbytes); auto optionalSet = ensure(nbytes).first; // Now we know which optionals are set // Assign by tag. size_t optionalIdx = 0; for (auto& p : s.dataType->fields) { auto tag = p.first; if (!p.second.isRequired && !testBit(optionalSet, optionalIdx++)) { continue; } TypeInfo tinfo(schema_, p.second.type); s.addType(tinfo, p.second, tag, 1); } // Structs encode the bitfield first, so we can use only one bitfield // for both optional fields and bools and strict enums readBoolsAndStrictEnums(nbits); } else { if (s.state == IN_MAP_VALUE) { s.addType(s.list.mapKeyType, StructField(), 0, s.list.remaining); } s.addType(s.list.valueType, StructField(), 0, s.list.remaining); ++totalVarIntCount; // element count } s.ints.values.reserve(s.ints.count); s.int64s.values.reserve(s.int64s.count); s.bytes.values.reserve(s.bytes.count); // Waste some memory. Oh well, the code is simpler :) if (s.bools.count + s.totalStrictEnumBits) { s.bools.values.reserve(byteCount(optionalFieldBits + s.bools.count + s.totalStrictEnumBits)); } s.strictEnums.values.reserve(s.strictEnums.count); s.vars.values.reserve(s.vars.count); s.internedStrings.values.reserve(s.internedStrings.count); // Read ints size_t varIntCount = s.ints.count; size_t varInt64Count = s.int64s.count; size_t varLengthCount = s.vars.count; size_t internCount = s.internedStrings.count; if (s.state == IN_STRUCT) { varIntCount -= (s.str.fixedInt16Tags.size() + s.str.fixedInt32Tags.size()); varInt64Count -= s.str.fixedInt64Tags.size(); } totalVarIntCount += varIntCount + 2 * varInt64Count + varLengthCount + internCount; if (totalVarIntCount) { size_t maxSize = folly::GroupVarint32::maxSize(totalVarIntCount); cursor_.gatherAtMost(maxSize); folly::StringPiece data = SP(cursor_.peek()); folly::GroupVarint32Decoder decoder(data, totalVarIntCount); if (s.state != IN_STRUCT) { uint32_t n; if (!decoder.next(&n)) { throw TProtocolException("too few ints on the wire"); } DCHECK_EQ(n, s.list.remaining); } for (size_t i = 0; i < varIntCount; i++) { uint32_t val; if (!decoder.next(&val)) { throw TProtocolException("too few ints on the wire: int"); } s.ints.values.push_back(val); } for (size_t i = 0; i < varInt64Count; i++) { uint32_t hi; uint32_t lo; if (!decoder.next(&hi) || !decoder.next(&lo)) { throw TProtocolException("too few ints on the wire: int64"); } uint64_t val = ((uint64_t)hi << 32) | lo; s.int64s.values.push_back(val); } for (size_t i = 0; i < varLengthCount; i++) { uint32_t val; if (!decoder.next(&val)) { throw TProtocolException("too few ints on the wire: var"); } s.vars.values.push_back(val); } for (size_t i = 0; i < internCount; i++) { uint32_t val; if (!decoder.next(&val)) { throw TProtocolException("too few ints on the wire: intern"); } auto sp = internTable_->get(val); s.internedStrings.values.push_back(sp); if (s.state == IN_STRUCT) { // fixed size s.str.internedStringTags[i].second.length = sp.size(); } } uint32_t tmp; CHECK(!decoder.next(&tmp)); // or else we have an internal error size_t bytesUsed = data.size() - decoder.rest().size(); cursor_.skip(bytesUsed); s.bytesRead += bytesUsed; } // Read bools and strict enums, already done for structs if (s.state != IN_STRUCT) { readBoolsAndStrictEnums(0); } // Read bytes if (s.bytes.count) { s.bytes.values.resize(s.bytes.count); cursor_.pull(&s.bytes.values.front(), s.bytes.count); s.bytesRead += s.bytes.count; } // Read fixed-size fields, currently only for structs if (s.state == IN_STRUCT) { if (!s.str.fixedInt16Tags.empty()) { for (auto tag : s.str.fixedInt16Tags) { s.ints.values.push_back(cursor_.readBE<uint16_t>()); s.str.intTags.emplace_back(tag); } s.bytesRead += s.str.fixedInt16Tags.size() * sizeof(uint16_t); } if (!s.str.fixedInt32Tags.empty()) { for (auto tag : s.str.fixedInt32Tags) { s.ints.values.push_back(cursor_.readBE<uint32_t>()); s.str.intTags.emplace_back(tag); } s.bytesRead += s.str.fixedInt32Tags.size() * sizeof(uint32_t); } if (!s.str.fixedInt64Tags.empty()) { for (auto tag : s.str.fixedInt64Tags) { s.int64s.values.push_back(cursor_.readBE<uint64_t>()); s.str.int64Tags.push_back(tag); } s.bytesRead += s.str.fixedInt64Tags.size() * sizeof(uint64_t); } } }