glm::vec4 vec4FromVariant(const QVariant& object, bool& valid) { glm::vec4 v; valid = false; if (!object.isValid() || object.isNull()) { return v; } else if (object.canConvert<float>()) { v = glm::vec4(object.toFloat()); valid = true; } else if (object.canConvert<QVector4D>()) { auto qvec4 = qvariant_cast<QVector4D>(object); v.x = qvec4.x(); v.y = qvec4.y(); v.z = qvec4.z(); v.w = qvec4.w(); valid = true; } else { auto map = object.toMap(); auto x = map["x"]; auto y = map["y"]; auto z = map["z"]; auto w = map["w"]; if (x.canConvert<float>() && y.canConvert<float>() && z.canConvert<float>() && w.canConvert<float>()) { v.x = x.toFloat(); v.y = y.toFloat(); v.z = z.toFloat(); v.w = w.toFloat(); valid = true; } } return v; }
v8::Handle<v8::Value> V8WebKitPoint::constructorCallback(const v8::Arguments& args) { INC_STATS("DOM.WebKitPoint.Constructor"); if (!args.IsConstructCall()) return throwError("DOM object constructor cannot be called as a function.", V8Proxy::TypeError); if (ConstructorMode::current() == ConstructorMode::WrapExistingObject) return args.Holder(); float x = 0; float y = 0; if (args.Length() > 1) { if (!args[0]->IsUndefined()) { x = toFloat(args[0]); if (isnan(x)) x = 0; } if (!args[1]->IsUndefined()) { y = toFloat(args[1]); if (isnan(y)) y = 0; } } RefPtr<WebKitPoint> point = WebKitPoint::create(x, y); V8DOMWrapper::setDOMWrapper(args.Holder(), &info, point.get()); V8DOMWrapper::setJSWrapperForDOMObject(point.release(), v8::Persistent<v8::Object>::New(args.Holder())); return args.Holder(); }
v8::Handle<v8::Value> V8CanvasRenderingContext2D::strokeTextCallback(const v8::Arguments& args) { INC_STATS("DOM.CanvasRenderingContext2D.strokeText()"); CanvasRenderingContext2D* context = V8CanvasRenderingContext2D::toNative(args.Holder()); // Two forms: // * strokeText(text, x, y) // * strokeText(text, x, y, maxWidth) if (args.Length() < 3 || args.Length() > 4) { V8Proxy::setDOMException(SYNTAX_ERR); return notHandledByInterceptor(); } String text = toWebCoreString(args[0]); float x = toFloat(args[1]); float y = toFloat(args[2]); if (args.Length() == 4) { float maxWidth = toFloat(args[3]); context->strokeText(text, x, y, maxWidth); } else context->strokeText(text, x, y); return v8::Undefined(); }
bool VRGuiVectorEntry::proxy2D(GdkEventFocus* focus, sigc::slot<void, OSG::Vec2f&> sig, Gtk::Entry* ex, Gtk::Entry* ey) { OSG::Vec2f res; res[0] = toFloat( ex->get_text() ); res[1] = toFloat( ey->get_text() ); sig(res); return true; }
glm::vec2 vec2FromVariant(const QVariant &object, bool& isValid) { isValid = false; glm::vec2 result; if (object.canConvert<float>()) { result = glm::vec2(object.toFloat()); } else if (object.canConvert<QVector2D>()) { auto qvec2 = qvariant_cast<QVector2D>(object); result.x = qvec2.x(); result.y = qvec2.y(); } else { auto map = object.toMap(); auto x = map["x"]; if (!x.isValid()) { x = map["width"]; } auto y = map["y"]; if (!y.isValid()) { y = map["height"]; } if (x.isValid() && y.isValid()) { result.x = x.toFloat(&isValid); if (isValid) { result.y = y.toFloat(&isValid); } } } return result; }
// TODO: SetStrokeColor and SetFillColor are similar except function names, // consolidate them into one. v8::Handle<v8::Value> V8CanvasRenderingContext2D::setStrokeColorCallback(const v8::Arguments& args) { INC_STATS("DOM.CanvasRenderingContext2D.setStrokeColor()"); CanvasRenderingContext2D* context = V8CanvasRenderingContext2D::toNative(args.Holder()); switch (args.Length()) { case 1: if (args[0]->IsString()) context->setStrokeColor(toWebCoreString(args[0])); else context->setStrokeColor(toFloat(args[0])); break; case 2: if (args[0]->IsString()) context->setStrokeColor(toWebCoreString(args[0]), toFloat(args[1])); else context->setStrokeColor(toFloat(args[0]), toFloat(args[1])); break; case 4: context->setStrokeColor(toFloat(args[0]), toFloat(args[1]), toFloat(args[2]), toFloat(args[3])); break; case 5: context->setStrokeColor(toFloat(args[0]), toFloat(args[1]), toFloat(args[2]), toFloat(args[3]), toFloat(args[4])); break; default: V8Proxy::throwError(V8Proxy::SyntaxError, "setStrokeColor: Invalid number of arguments"); break; } return v8::Undefined(); }
/* Here is the definition of the block processing function */ void process_block(short *clean, short *echo, short *out, int size) { int i; for(i = 0;i < size;i++) { out[i] = toShort(process_sample(toFloat(clean[i]), toFloat(echo[i]))); } }
bool setBrush(const GiContext* ctx) { bool changed = !_ctxused[1]; if (ctx && ctx->hasFillColor()) { if (_gictx.getFillColor() != ctx->getFillColor()) { _gictx.setFillColor(ctx->getFillColor()); changed = true; } } if (!ctx) ctx = &_gictx; if (ctx->hasFillColor() && changed) { _ctxused[1] = true; GiColor color = ctx->getFillColor(); if (gs()) color = gs()->calcPenColor(color); CGContextSetRGBFillColor(getContext(), toFloat(color.r), toFloat(color.g), toFloat(color.b), toFloat(color.a)); } return ctx->hasFillColor(); }
void ViFann::runTrain(const qreal *input, qreal *output, const qreal *desiredOutput) { toFloat(input, mInput, mInputCount); toFloat(output, mOutput, mOutputCount); run(mInput, mOutput); toFloat(desiredOutput, mOutput, mOutputCount); train(mInput, mOutput); }
Point parsePoint(std::string ¢erString) { Point centerPoint; centerString.resize(centerString.length() - 1); size_t leftParen = centerString.find_first_of("("); std::vector<std::string> points = tokenizeString(centerString.substr(leftParen + 1), ","); centerPoint.x = toFloat(points.at(0)); centerPoint.y = toFloat(points.at(1)); return centerPoint; }
void conv_Short2ToByte1(void* dst, const void* s, s32 numSamples) { LSbyte* d = reinterpret_cast<LSbyte*>(dst); const LSshort* src = reinterpret_cast<const LSshort*>(s); for(s32 i=0; i<numSamples; ++i){ s32 j = i<<1; f32 v = 0.5f*(toFloat(src[j+0]) + toFloat(src[j+1])); d[i] = toByte(v); } }
bool setPen(const GiContext* ctx) { bool changed = !_ctxused[0]; if (ctx && !ctx->isNullLine()) { if (_gictx.getLineColor() != ctx->getLineColor()) { _gictx.setLineColor(ctx->getLineColor()); changed = true; } if (_gictx.getLineWidth() != ctx->getLineWidth()) { _gictx.setLineWidth(ctx->getLineWidth(), ctx->isAutoScale()); changed = true; } if (_gictx.getLineStyle() != ctx->getLineStyle()) { _gictx.setLineStyle(ctx->getLineStyle()); changed = true; } } if (!ctx) ctx = &_gictx; if (!ctx->isNullLine() && changed) { _ctxused[0] = true; GiColor color = ctx->getLineColor(); if (gs()) color = gs()->calcPenColor(color); CGContextSetRGBStrokeColor(getContext(), toFloat(color.r), toFloat(color.g), toFloat(color.b), toFloat(color.a)); float w = ctx->getLineWidth(); w = gs() ? gs()->calcPenWidth(w, ctx->isAutoScale()) : (w < 0 ? -w : 1); CGContextSetLineWidth(getContext(), _fast && w > 1 ? w - 1 : w); // 不是反走样就细一点 int style = ctx->getLineStyle(); CGFloat pattern[6]; if (style >= 0 && style < sizeof(lpats)/sizeof(lpats[0])) { if (lpats[style].arr && !_fast) { // 快速画时不要线型 makeLinePattern(pattern, lpats[style].arr, lpats[style].n, w); CGContextSetLineDash(getContext(), 0, pattern, lpats[style].n); } else { CGContextSetLineDash(getContext(), 0, NULL, 0); } CGContextSetLineCap(getContext(), style > 0 ? kCGLineCapButt : kCGLineCapRound); } } return !ctx->isNullLine(); }
v8::Handle<v8::Value> V8CanvasRenderingContext2D::drawImageFromRectCallback(const v8::Arguments& args) { INC_STATS("DOM.CanvasRenderingContext2D.drawImageFromRect()"); CanvasRenderingContext2D* context = V8CanvasRenderingContext2D::toNative(args.Holder()); v8::Handle<v8::Value> arg = args[0]; if (V8HTMLImageElement::HasInstance(arg)) { HTMLImageElement* imageElement = V8HTMLImageElement::toNative(v8::Handle<v8::Object>::Cast(arg)); context->drawImageFromRect(imageElement, toFloat(args[1]), toFloat(args[2]), toFloat(args[3]), toFloat(args[4]), toFloat(args[5]), toFloat(args[6]), toFloat(args[7]), toFloat(args[8]), toWebCoreString(args[9])); } else V8Proxy::throwError(V8Proxy::TypeError, "drawImageFromRect: Invalid type of arguments"); return v8::Undefined(); }
void conv_Short1ToFloat2(void* dst, const void* s, s32 numSamples) { LSfloat* d = reinterpret_cast<LSfloat*>(dst); const LSshort* src = reinterpret_cast<const LSshort*>(s); s32 num = numSamples >> 3; //8個のshortをまとめて処理 s32 offset = num << 3; s32 rem = numSamples - offset; const __m128i izero = _mm_setzero_si128(); const __m128 fcoff = _mm_set1_ps(1.0f/32767.0f); const LSshort* p = src; LSfloat* q = d; for(s32 i=0; i<num; ++i){ __m128i t = _mm_loadu_si128((const __m128i*)p); __m128i s16_0 = _mm_unpackhi_epi16(t, t); __m128i s16_1 = _mm_unpacklo_epi16(t, t); __m128i t1 = _mm_cmpgt_epi16(izero, s16_0); __m128i t2 = _mm_cmpgt_epi16(izero, s16_1); __m128i s32_0 = _mm_unpackhi_epi16(s16_0, t1); __m128i s32_1 = _mm_unpacklo_epi16(s16_0, t1); __m128i s32_2 = _mm_unpackhi_epi16(s16_1, t2); __m128i s32_3 = _mm_unpacklo_epi16(s16_1, t2); //32bit浮動小数点に変換 __m128 f32_0 = _mm_mul_ps(_mm_cvtepi32_ps(s32_0), fcoff); __m128 f32_1 = _mm_mul_ps(_mm_cvtepi32_ps(s32_1), fcoff); __m128 f32_2 = _mm_mul_ps(_mm_cvtepi32_ps(s32_2), fcoff); __m128 f32_3 = _mm_mul_ps(_mm_cvtepi32_ps(s32_3), fcoff); _mm_storeu_ps((q+0), f32_3); _mm_storeu_ps((q+4), f32_2); _mm_storeu_ps((q+8), f32_1); _mm_storeu_ps((q+12), f32_0); p += 8; q += 16; } for(s32 i=0; i<rem; ++i){ s32 j = i<<1; q[j+0] = toFloat(p[i]); q[j+1] = toFloat(p[i]); } }
NodeRendererResult SurfelRenderer::displayNode(FrameContext & context, Node * node, const RenderParam & /*rp*/){ static const Util::StringIdentifier SURFEL_ATTRIBUTE("surfels"); auto surfelAttribute = dynamic_cast<Util::ReferenceAttribute<Rendering::Mesh>*>(node->findAttribute( SURFEL_ATTRIBUTE )); if( !surfelAttribute || !surfelAttribute->get()) return NodeRendererResult::PASS_ON; Rendering::Mesh& surfelMesh = *surfelAttribute->get(); const Geometry::Rect projection = context.getProjectedRect(node); const float approxProjectedSideLength = std::sqrt(projection.getHeight() * projection.getWidth()); // const auto& worldBB = node->getWorldBB(); // const float approxProjectedSideLength = projectionScale * worldBB.getDiameter() / (worldBB.getCenter()-cameraOrigin).length(); if(approxProjectedSideLength > maxSideLength) return NodeRendererResult::PASS_ON; static const Util::StringIdentifier REL_COVERING_ATTRIBUTE("surfelRelCovering"); auto surfelCoverageAttr = node->findAttribute(REL_COVERING_ATTRIBUTE); const float relCovering = surfelCoverageAttr ? surfelCoverageAttr->toFloat() : 0.5; const float approxProjectedArea = approxProjectedSideLength * approxProjectedSideLength * relCovering; uint32_t surfelCount = std::min( surfelMesh.isUsingIndexData() ? surfelMesh.getIndexCount() : surfelMesh.getVertexCount(), static_cast<uint32_t>(approxProjectedArea * countFactor) + 1); float surfelSize = std::min(sizeFactor * approxProjectedArea / surfelCount,maxSurfelSize); bool handled = true; if(approxProjectedSideLength > minSideLength && minSideLength<maxSideLength){ const float f = 1.0f -(approxProjectedSideLength-minSideLength) / (maxSideLength-minSideLength); surfelCount = std::min( surfelMesh.isUsingIndexData() ? surfelMesh.getIndexCount() : surfelMesh.getVertexCount(), static_cast<uint32_t>(f * surfelCount) + 1); surfelSize *= f; handled = false; // std::cout << approxProjectedSideLength<<"\t"<<f<<"\n"; } // std::cout << surfelSize<<"\t"<<"\n"; // if( node->getRenderingLayers()&0x02 ) // std::cout << "pSize"<<approxProjectedSideLength << "\t#:"<<surfelCount<<"\ts:"<<surfelSize<<"\n"; auto& renderingContext = context.getRenderingContext(); static Rendering::Uniform enableSurfels("renderSurfels", true); static Rendering::Uniform disableSurfels("renderSurfels", false); renderingContext.setGlobalUniform(enableSurfels); renderingContext.pushAndSetPointParameters( Rendering::PointParameters(std::min(surfelSize,32.0f) )); renderingContext.pushAndSetMatrix_modelToCamera( renderingContext.getMatrix_worldToCamera() ); renderingContext.multMatrix_modelToCamera(node->getWorldTransformationMatrix()); context.displayMesh(&surfelMesh, 0, surfelCount ); renderingContext.popMatrix_modelToCamera(); renderingContext.popPointParameters(); renderingContext.setGlobalUniform(disableSurfels); return handled ? NodeRendererResult::NODE_HANDLED : NodeRendererResult::PASS_ON; }
//---------------------------------------------------------------------------- void conv_Short1ToFloat1(void* dst, const void* s, s32 numSamples) { LSfloat* d = reinterpret_cast<LSfloat*>(dst); const LSshort* src = reinterpret_cast<const LSshort*>(s); s32 num = numSamples >> 3; //8個のshortをまとめて処理 s32 offset = num << 3; s32 rem = numSamples - offset; const __m128i izero = _mm_setzero_si128(); const __m128 fcoff = _mm_set1_ps(1.0f/32767.0f); const LSshort* p = src; LSfloat* q = d; for(s32 i=0; i<num; ++i){ //32bit浮動小数点r0, r1に変換 __m128i t0 = _mm_loadu_si128((const __m128i*)p); __m128i t1 = _mm_cmpgt_epi16(izero, t0); __m128 r0 = _mm_cvtepi32_ps(_mm_unpackhi_epi16(t0, t1)); __m128 r1 = _mm_cvtepi32_ps(_mm_unpacklo_epi16(t0, t1)); r0 = _mm_mul_ps(r0, fcoff); r1 = _mm_mul_ps(r1, fcoff); _mm_storeu_ps((q+0), r1); _mm_storeu_ps((q+4), r0); p += 8; q += 8; } for(s32 i=0; i<rem; ++i){ q[i] = toFloat(p[i]); } }
void Web3DOverlay::setProperties(const QVariantMap& properties) { Billboard3DOverlay::setProperties(properties); auto urlValue = properties["url"]; if (urlValue.isValid()) { QString newURL = urlValue.toString(); if (newURL != _url) { setURL(newURL); } } auto resolution = properties["resolution"]; if (resolution.isValid()) { bool valid; auto res = vec2FromVariant(resolution, valid); if (valid) { _resolution = res; } } auto dpi = properties["dpi"]; if (dpi.isValid()) { _dpi = dpi.toFloat(); } }
void extractBackgroundColor( Array2DReadView< uint8x4 > composite, Array2DReadView< uint8x4 > foreground, Array2DWriteView< uint8x4 > background ) { for( int y = 0; y < composite.height(); ++y ) { for( int x = 0; x < composite.width(); ++x ) { Vector4f cRGBA = toFloat( composite[ { x, y } ] ); Vector4f fRGBA = toFloat( foreground[ { x, y } ] ); Vector4f bRGBA = extractBackgroundColor( cRGBA, fRGBA ); background[ { x, y } ] = toUInt8( bRGBA ); } } }
// auto-adjust the received vector to correct for latency void in1klap::compensate(vector<pair<int, string> >& received, const vector<pair<int, string> >& expected){ vector<double> differences; vector<pair<int, string> >::const_iterator expectedIt, expectedIt2; vector<pair<int, string> >::size_type expectedSize=expected.size(); vector<pair<int, string> >::iterator receivedIt; vector<pair<int, string> >::size_type receivedSize=received.size(); // for each item in expected, find the closest item in received (that's not closer to another item in expected) // Calculate the position difference and store that. for(expectedIt=expected.begin();expectedIt!=expected.end();expectedIt++){ double sourcepos=expectedIt->first; vector<string> r=getPosOfNearestReceived(sourcepos, received, expected); bool match=(r[0]=="true"); double matchpos=toFloat(r[1]); string matchkey=r[2]; if(match){ differences.push_back(matchpos-sourcepos); } } //std::cout << "differences size" << differences.size() << std::endl; if(differences.size() > 0){ // get the mean difference double meanDeviationFromExpected=std::accumulate(differences.begin(),differences.end(), 0.0)/differences.size(); std::cout << "meanDeviationFromExpected: " << ofToString(meanDeviationFromExpected) << endl; // transform each pos in received events, trying to compensate for system latency for(receivedIt=received.begin();receivedIt!=received.end();receivedIt++){ receivedIt->first = receivedIt->first - meanDeviationFromExpected; } } }
void BluetoothDeviceListItem::paintButton(Graphics &g, bool isMouseOverButton, bool isButtonDown) { auto bounds = getLocalBounds(); auto inset = bounds.reduced(6, 4); auto w = bounds.getWidth(), h = bounds.getHeight(); auto iconBounds = Rectangle<float>(w - h, h/5.0, h*0.6, h*0.6); auto listOutline = Path(); listOutline.addRoundedRectangle(inset.toFloat(), 10.0f); g.setColour(findColour(ListBox::ColourIds::backgroundColourId)); g.fillPath(listOutline); if (device->connected) { icons->checkIcon->setSize(h, h); icons->checkIcon->drawWithin(g, iconBounds, RectanglePlacement::fillDestination, 1.0f); } // icons->arrowIcon->setSize(h, h); // icons->arrowIcon->drawWithin(g, Rectangle<float>(w - (h/8), contentHeight + 8, contentHeight, contentHeight), // RectanglePlacement::fillDestination, 1.0f); g.setFont(Font(getLookAndFeel().getTypefaceForFont(Font()))); g.setFont(h * 0.5); g.setColour(findColour(ListBox::ColourIds::textColourId)); g.drawText(getName(), inset.reduced(h * 0.2, 0), Justification::centredLeft, true); }
v8::Handle<v8::Value> V8AudioContext::constructorCallback(const v8::Arguments& args) { INC_STATS("DOM.AudioContext.Contructor"); if (!args.IsConstructCall()) return throwError("AudioContext constructor cannot be called as a function.", V8Proxy::TypeError); if (ConstructorMode::current() == ConstructorMode::WrapExistingObject) return args.Holder(); Frame* frame = V8Proxy::retrieveFrameForCurrentContext(); if (!frame) return throwError("AudioContext constructor associated frame is unavailable", V8Proxy::ReferenceError); Document* document = frame->document(); if (!document) return throwError("AudioContext constructor associated document is unavailable", V8Proxy::ReferenceError); RefPtr<AudioContext> audioContext; if (!args.Length()) { // Constructor for default AudioContext which talks to audio hardware. audioContext = AudioContext::create(document); if (!audioContext.get()) return throwError("audio resources unavailable for AudioContext construction", V8Proxy::SyntaxError); } else { // Constructor for offline (render-target) AudioContext which renders into an AudioBuffer. // new AudioContext(in unsigned long numberOfChannels, in unsigned long numberOfFrames, in float sampleRate); if (args.Length() < 3) return throwError("Not enough arguments", V8Proxy::SyntaxError); bool ok = false; int32_t numberOfChannels = toInt32(args[0], ok); if (!ok || numberOfChannels <= 0 || numberOfChannels > 10) return throwError("Invalid number of channels", V8Proxy::SyntaxError); int32_t numberOfFrames = toInt32(args[1], ok); if (!ok || numberOfFrames <= 0) return throwError("Invalid number of frames", V8Proxy::SyntaxError); float sampleRate = toFloat(args[2]); if (sampleRate <= 0) return throwError("Invalid sample rate", V8Proxy::SyntaxError); ExceptionCode ec = 0; audioContext = AudioContext::createOfflineContext(document, numberOfChannels, numberOfFrames, sampleRate, ec); if (ec) return throwError(ec); } if (!audioContext.get()) return throwError("Error creating AudioContext", V8Proxy::SyntaxError); // Transform the holder into a wrapper object for the audio context. V8DOMWrapper::setDOMWrapper(args.Holder(), &info, audioContext.get()); audioContext->ref(); return args.Holder(); }
void Profiler::Profile() { timespec stat_time; clock_gettime( CLOCK_MONOTONIC, &stat_time); // // Calculate: // double seconds = toFloat(stat_time) - toFloat(ms_stat_time); double per_second = ms_stat_count / seconds; double prep = 0.0; double proc = 0.0; double post = 0.0; for( int i = 0; i < ms_profiles.size(); i++) { prep += toFloat( ms_profiles[i]->m_tstart ) - toFloat( ms_profiles[i]->m_tinit ); proc += toFloat( ms_profiles[i]->m_tfinish ) - toFloat( ms_profiles[i]->m_tstart ); proc += toFloat( ms_profiles[i]->m_tcollect ) - toFloat( ms_profiles[i]->m_tfinish ); } prep /= double( ms_stat_period ) / 1000.0; proc /= double( ms_stat_period ) / 1000.0; post /= double( ms_stat_period ) / 1000.0; // // Reset: // for( int i = 0; i < ms_profiles.size(); i++) delete ms_profiles[i]; ms_profiles.clear(); ms_stat_count = 0; ms_stat_time = stat_time; if( seconds < 10.0 ) ms_stat_period *= 10; else if(( seconds > 100.0 ) && ( ms_stat_period > 100 )) ms_stat_period /= 10; // // Print: // static char buffer[1024]; std::string log; sprintf( buffer,"Server load profiling:\n"); log += buffer; sprintf( buffer,"Clients per second: %4.2f, Now: %d\n", per_second, ms_meter); log += buffer; sprintf( buffer,"Prep: %4.2f, Proc: %4.2f, Post: %4.2f, Tolal: %4.2f ms.\n", prep, proc, post, (prep + proc + post)); log += buffer; AFCommon::QueueLog( log); }
JNIEXPORT jint JNICALL Java_edu_berkeley_bid_CUMAT_toFloat (JNIEnv *env, jobject obj, jobject jA, jobject jB, jint N) { int *A = (int*)getPointer(env, jA); float *B = (float*)getPointer(env, jB); return toFloat(A, B, N); }
void ViFann::runTrain(const qreal *input, qreal &output, const qreal &desiredOutput) { toFloat(input, mInput, mInputCount); run(mInput, mSingle); output = mSingle; mSingle = desiredOutput; train(mInput, mSingle); }
bool operator>(const Data& other) const { if (isNumberInt() && other.isNumberInt()) { return toInt() > other.toInt(); } else if (isNumber() && other.isNumber()) { return toFloat() > other.toFloat(); } return false; }
void Text3DOverlay::setProperties(const QVariantMap& properties) { Billboard3DOverlay::setProperties(properties); auto text = properties["text"]; if (text.isValid()) { setText(text.toString()); } auto textAlpha = properties["textAlpha"]; if (textAlpha.isValid()) { float prevTextAlpha = getTextAlpha(); setTextAlpha(textAlpha.toFloat()); // Update our payload key if necessary to handle transparency if ((prevTextAlpha < 1.0f && _textAlpha >= 1.0f) || (prevTextAlpha >= 1.0f && _textAlpha < 1.0f)) { auto itemID = getRenderItemID(); if (render::Item::isValidID(itemID)) { render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene(); render::Transaction transaction; transaction.updateItem(itemID); scene->enqueueTransaction(transaction); } } } bool valid; auto backgroundColor = properties["backgroundColor"]; if (backgroundColor.isValid()) { auto color = u8vec3FromVariant(backgroundColor, valid); if (valid) { _backgroundColor = color; } } if (properties["backgroundAlpha"].isValid()) { setAlpha(properties["backgroundAlpha"].toFloat()); } if (properties["lineHeight"].isValid()) { setLineHeight(properties["lineHeight"].toFloat()); } if (properties["leftMargin"].isValid()) { setLeftMargin(properties["leftMargin"].toFloat()); } if (properties["topMargin"].isValid()) { setTopMargin(properties["topMargin"].toFloat()); } if (properties["rightMargin"].isValid()) { setRightMargin(properties["rightMargin"].toFloat()); } if (properties["bottomMargin"].isValid()) { setBottomMargin(properties["bottomMargin"].toFloat()); } }
AgentInitializer::ParseResult AgentInitializer::constFloat( float & numValue, const ::std::string & valueStr, float scale ) { try { float f = toFloat( valueStr ); numValue = f * scale; return ACCEPTED; } catch ( UtilException ) { return FAILURE; } }
void CheatsManager::OnWatchItemChanged(QTableWidgetItem* item) { if (m_updating) return; int index = item->data(INDEX_ROLE).toInt(); int column = item->data(COLUMN_ROLE).toInt(); switch (column) { case 0: m_watch[index].name = item->text(); break; case 2: m_watch[index].locked = item->checkState() == Qt::Checked; if (m_watch[index].locked) m_watch[index].locked_value = GetResultValue(m_results[index]); UpdatePatch(m_watch[index]); break; case 3: { const auto text = item->text(); u32 value = 0; switch (m_watch[index].type) { case DataType::Byte: value = text.toUShort(nullptr, 16) & 0xFF; break; case DataType::Short: value = text.toUShort(nullptr, 16); break; case DataType::Int: value = text.toUInt(nullptr, 16); break; case DataType::Float: { float f = text.toFloat(); std::memcpy(&value, &f, sizeof(float)); break; } default: break; } m_watch[index].locked_value = value; UpdatePatch(m_watch[index]); break; } } Update(); }
float getValueFloat(tinyxml2::XMLElement* _parentElement, const std::string& _name) { assert(_parentElement != nullptr); assert(!_name.empty()); std::string str = _parentElement->FirstChildElement(_name.c_str())->GetText(); return toFloat(str); }
bool OsmAnd::MapStyleEvaluationResult::getFloatValue(const int valueDefId, float& value) const { const auto itValue = _d->_values.constFind(valueDefId); if(itValue == _d->_values.cend()) return false; value = itValue->toFloat(); return true; }