static int previewrange_define_exec(bContext *C, wmOperator *op) { Scene *scene = CTX_data_scene(C); ARegion *ar = CTX_wm_region(C); float sfra, efra; rcti rect; /* get min/max values from border select rect (already in region coordinates, not screen) */ WM_operator_properties_border_to_rcti(op, &rect); /* convert min/max values to frames (i.e. region to 'tot' rect) */ sfra = UI_view2d_region_to_view_x(&ar->v2d, rect.xmin); efra = UI_view2d_region_to_view_x(&ar->v2d, rect.xmax); /* set start/end frames for preview-range * - must clamp within allowable limits * - end must not be before start (though this won't occur most of the time) */ FRAMENUMBER_MIN_CLAMP(sfra); FRAMENUMBER_MIN_CLAMP(efra); if (efra < sfra) efra = sfra; scene->r.flag |= SCER_PRV_RANGE; scene->r.psfra = iroundf(sfra); scene->r.pefra = iroundf(efra); /* send notifiers */ WM_event_add_notifier(C, NC_SCENE | ND_FRAME, scene); return OPERATOR_FINISHED; }
QPointF OverlayUser::alignedPosition(const QRectF &box, const QRectF &item, Qt::Alignment a) { qreal boxw = box.width(); qreal boxh = box.height(); qreal itemw = item.width(); qreal itemh = item.height(); qreal wdiff = boxw - itemw; qreal hdiff = boxh - itemh; qreal xofs = box.x() - item.x(); qreal yofs = box.y() - item.y(); if (a & Qt::AlignRight) xofs += wdiff; else if (a & Qt::AlignHCenter) xofs += wdiff * 0.5f; if (a & Qt::AlignBottom) yofs += hdiff; else if (a & Qt::AlignVCenter) yofs += hdiff * 0.5f; return QPointF(iroundf(xofs + 0.5f), iroundf(yofs + 0.5f)); }
/* draw grease-pencil sketches to specified 3d-view assuming that matrices are already set correctly * Note: this gets called twice - first time with only3d=1 to draw 3d-strokes, * second time with only3d=0 for screen-aligned strokes */ void ED_gpencil_draw_view3d(wmWindowManager *wm, Scene *scene, View3D *v3d, ARegion *ar, bool only3d) { bGPdata *gpd; int dflag = 0; RegionView3D *rv3d = ar->regiondata; int offsx, offsy, winx, winy; /* check that we have grease-pencil stuff to draw */ gpd = ED_gpencil_data_get_active_v3d(scene, v3d); if (gpd == NULL) return; /* when rendering to the offscreen buffer we don't want to * deal with the camera border, otherwise map the coords to the camera border. */ if ((rv3d->persp == RV3D_CAMOB) && !(G.f & G_RENDER_OGL)) { rctf rectf; ED_view3d_calc_camera_border(scene, ar, v3d, rv3d, &rectf, true); /* no shift */ offsx = iroundf(rectf.xmin); offsy = iroundf(rectf.ymin); winx = iroundf(rectf.xmax - rectf.xmin); winy = iroundf(rectf.ymax - rectf.ymin); } else { offsx = 0; offsy = 0; winx = ar->winx; winy = ar->winy; } /* set flags */ if (only3d) { /* 3D strokes/3D space: * - only 3D space points * - don't status text either (as it's the wrong space) */ dflag |= (GP_DRAWDATA_ONLY3D | GP_DRAWDATA_NOSTATUS); } if (v3d->flag2 & V3D_RENDER_OVERRIDE) { /* don't draw status text when "only render" flag is set */ dflag |= GP_DRAWDATA_NOSTATUS; } if ((wm == NULL) || ED_screen_animation_playing(wm)) { /* don't show onionskins during animation playback/scrub (i.e. it obscures the poses) * OpenGL Renders (i.e. final output), or depth buffer (i.e. not real strokes) */ dflag |= GP_DRAWDATA_NO_ONIONS; } /* draw it! */ gp_draw_data_all(scene, gpd, offsx, offsy, winx, winy, CFRA, dflag, v3d->spacetype); }
/* tweak and line gestures */ int wm_gesture_evaluate(wmGesture *gesture) { if (gesture->type == WM_GESTURE_TWEAK) { rcti *rect = gesture->customdata; int dx = BLI_rcti_size_x(rect); int dy = BLI_rcti_size_y(rect); if (abs(dx) + abs(dy) > U.tweak_threshold) { int theta = iroundf(4.0f * atan2f((float)dy, (float)dx) / (float)M_PI); int val = EVT_GESTURE_W; if (theta == 0) val = EVT_GESTURE_E; else if (theta == 1) val = EVT_GESTURE_NE; else if (theta == 2) val = EVT_GESTURE_N; else if (theta == 3) val = EVT_GESTURE_NW; else if (theta == -1) val = EVT_GESTURE_SE; else if (theta == -2) val = EVT_GESTURE_S; else if (theta == -3) val = EVT_GESTURE_SW; #if 0 /* debug */ if (val == 1) printf("tweak north\n"); if (val == 2) printf("tweak north-east\n"); if (val == 3) printf("tweak east\n"); if (val == 4) printf("tweak south-east\n"); if (val == 5) printf("tweak south\n"); if (val == 6) printf("tweak south-west\n"); if (val == 7) printf("tweak west\n"); if (val == 8) printf("tweak north-west\n"); #endif return val; } } return 0; }
/* set the operator properties from the initial event */ static void graphview_cursor_setprops(bContext *C, wmOperator *op, const wmEvent *event) { Scene *scene = CTX_data_scene(C); ARegion *ar = CTX_wm_region(C); float viewx, viewy; int frame; /* abort if not active region (should not really be possible) */ if (ar == NULL) return; /* convert from region coordinates to View2D 'tot' space */ UI_view2d_region_to_view(&ar->v2d, event->mval[0], event->mval[1], &viewx, &viewy); /* frame is rounded to the nearest int, since frames are ints */ frame = iroundf(viewx); if (scene->r.flag & SCER_LOCK_FRAME_SELECTION) { CLAMP(frame, PSFRA, PEFRA); } /* store the values in the operator properties */ RNA_int_set(op->ptr, "frame", frame); RNA_float_set(op->ptr, "value", viewy); }
/** * Apply some transformation to markers after the fact * * \param markers List of markers to affect - this may or may not be the scene markers list, so don't assume anything * \param scene Current scene (for getting current frame) * \param mode (TfmMode) transform mode that this transform is for * \param value From the transform code, this is ``t->vec[0]`` * (which is delta transform for grab/extend, and scale factor for scale) * \param side (B/L/R) for 'extend' functionality, which side of current frame to use */ int ED_markers_post_apply_transform(ListBase *markers, Scene *scene, int mode, float value, char side) { TimeMarker *marker; float cfra = (float)CFRA; int changed_tot = 0; /* sanity check */ if (markers == NULL) return changed_tot; /* affect selected markers - it's unlikely that we will want to affect all in this way? */ for (marker = markers->first; marker; marker = marker->next) { if (marker->flag & SELECT) { switch (mode) { case TFM_TIME_TRANSLATE: case TFM_TIME_EXTEND: { /* apply delta if marker is on the right side of the current frame */ if ((side == 'B') || (side == 'L' && marker->frame < cfra) || (side == 'R' && marker->frame >= cfra)) { marker->frame += iroundf(value); changed_tot++; } break; } case TFM_TIME_SCALE: { /* rescale the distance between the marker and the current frame */ marker->frame = cfra + iroundf((float)(marker->frame - cfra) * value); changed_tot++; break; } } } } return changed_tot; }
void AudioBar::paintEvent(QPaintEvent *) { QPainter p(this); if (isEnabled()) { qcBelow.setAlphaF(1.0f); qcAbove.setAlphaF(1.0f); qcInside.setAlphaF(1.0f); } else { qcBelow.setAlphaF(0.5f); qcAbove.setAlphaF(0.5f); qcInside.setAlphaF(0.5f); } if (iBelow > iAbove) iBelow = iAbove; if (iValue < iMin) iValue = iMin; else if (iValue > iMax) iValue = iMax; float scale = static_cast<float>(width()) / static_cast<float>(iMax - iMin); int h = height(); int val = iroundf(static_cast<float>(iValue) * scale); int below = iroundf(static_cast<float>(iBelow) * scale); int above = iroundf(static_cast<float>(iAbove) * scale); int max = iroundf(static_cast<float>(iMax) * scale); int min = iroundf(static_cast<float>(iMin) * scale); int peak = iroundf(static_cast<float>(iPeak) * scale); if (val <= below) { p.fillRect(0, 0, val, h, qcBelow); p.fillRect(val, 0, below-val, h, qcBelow.darker(300)); p.fillRect(below, 0, above-below, h, qcInside.darker(300)); p.fillRect(above, 0, max-above, h, qcAbove.darker(300)); } else if (val <= above) { p.fillRect(0, 0, below, h, qcBelow); p.fillRect(below, 0, val-below, h, qcInside); p.fillRect(val, 0, above-val, h, qcInside.darker(300)); p.fillRect(above, 0, max-above, h, qcAbove.darker(300)); } else { p.fillRect(0, 0, below, h, qcBelow); p.fillRect(below, 0, above-below, h, qcInside); p.fillRect(above, 0, val-above, h, qcAbove); p.fillRect(val, 0, max-val, h, qcAbove.darker(300)); } if ((peak >= min) && (peak <= max)) { if (peak <= below) p.setPen(qcBelow.lighter(150)); else if (peak <= above) p.setPen(qcInside.lighter(150)); else p.setPen(qcAbove.lighter(150)); p.drawLine(peak, 0, peak, h); } }
void OverlayEditorScene::drawBackground(QPainter *p, const QRectF &rect) { p->setBrushOrigin(0, 0); p->fillRect(rect, backgroundBrush()); QRectF upscaled = OverlayUser::scaledRect(rect, 128.f / static_cast<float>(uiSize * uiZoom)); { int min = iroundf(upscaled.left()); int max = iroundf(ceil(upscaled.right())); for (int i=min;i<=max;++i) { qreal v = (i / 128) * static_cast<qreal>(uiSize * uiZoom); if (i != 0) p->setPen(QPen(QColor(128, 128, 128, 255), 0.0f)); else p->setPen(QPen(QColor(0, 0, 0, 255), 2.0f)); p->drawLine(QPointF(v, rect.top()), QPointF(v, rect.bottom())); } } { int min = iroundf(upscaled.top()); int max = iroundf(ceil(upscaled.bottom())); for (int i=min;i<=max;++i) { qreal v = (i / 128) * static_cast<qreal>(uiSize * uiZoom); if (i != 0) p->setPen(QPen(QColor(128, 128, 128, 255), 0.0f)); else p->setPen(QPen(QColor(0, 0, 0, 255), 2.0f)); p->drawLine(QPointF(rect.left(), v), QPointF(rect.right(), v)); } } }
/** * Generate time string and store in \a str * * \param str destination string * \param maxncpy maximum number of characters to copy ``sizeof(str)`` * \param power special setting for #View2D grid drawing, * used to specify how detailed we need to be * \param time_seconds time total time in seconds * \param seconds time in seconds. * \return length of \a str * * \note in some cases this is used to print non-seconds values. */ size_t BLI_timecode_string_from_time_simple( char *str, const size_t maxncpy, const int power, const float time_seconds) { size_t rlen; /* round to whole numbers if power is >= 1 (i.e. scale is coarse) */ if (power <= 0) { rlen = BLI_snprintf_rlen(str, maxncpy, "%.*f", 1 - power, time_seconds); } else { rlen = BLI_snprintf_rlen(str, maxncpy, "%d", iroundf(time_seconds)); } return rlen; }
BasepointPixmap OverlayTextLine::createPixmap(QColor col) { if (qsText.isEmpty()) { return BasepointPixmap(); } QRectF qr; if (qpp.isEmpty()) { qpp.addText(0.0f, fAscent, qfFont, qsText); qr = qpp.controlPointRect(); // fit into (0,0)-based coordinates fXCorrection = 0.0f; fYCorrection = 0.0f; if (qr.left() < fEdge) { fXCorrection = fEdge - static_cast<float>(qr.left()); } if (qr.top() < fEdge) { fYCorrection = fEdge - static_cast<float>(qr.top()); } QMatrix correction; correction.translate(fXCorrection, fYCorrection); qpp = correction.map(qpp); } qr = qpp.controlPointRect(); return render( iroundf(qr.right() + 2.0f*fEdge + 0.5f), iroundf(qr.bottom() + 2.0f*fEdge + 0.5f), col, QPoint(iroundf(fXCorrection + 0.5f), iroundf(fYCorrection + fAscent + 0.5f)) ); }
BasepointPixmap OverlayTextLine::render(int w, int h, const QColor& col, const QPoint& bp) const { BasepointPixmap img(w, h, bp); img.fill(Qt::transparent); QPainter imgp(&img); imgp.setRenderHint(QPainter::Antialiasing); imgp.setRenderHint(QPainter::TextAntialiasing); imgp.setBackground(QColor(0,0,0,0)); imgp.setCompositionMode(QPainter::CompositionMode_SourceOver); QColor qc(col); qc.setAlpha(255); imgp.setBrush(qc); imgp.setPen(QPen(Qt::black, fEdge, Qt::SolidLine, Qt::RoundCap, Qt::RoundJoin)); imgp.drawPath(qpp); imgp.setPen(Qt::NoPen); imgp.drawPath(qpp); img.iAscent = iroundf(fAscent + 0.5f); img.iDescent = iroundf(fDescent + 0.5f); return img; }
OverlayEditor::OverlayEditor(QWidget *p, QGraphicsItem *qgi, OverlaySettings *osptr) : QDialog(p), qgiPromote(qgi), oes(g.s.os) { setupUi(this); os = osptr ? osptr : &g.s.os; connect(qdbbBox->button(QDialogButtonBox::Apply), SIGNAL(clicked()), this, SLOT(apply())); connect(qdbbBox->button(QDialogButtonBox::Reset), SIGNAL(clicked()), this, SLOT(reset())); QGraphicsProxyWidget *qgpw = graphicsProxyWidget(); if (qgpw) { qgpw->setFlag(QGraphicsItem::ItemIgnoresParentOpacity); if (g.ocIntercept) { qgpw->setPos(iroundf(g.ocIntercept->uiWidth / 16.0f + 0.5f), iroundf(g.ocIntercept->uiHeight / 16.0f + 0.5f)); qgpw->resize(iroundf(g.ocIntercept->uiWidth * 14.0f / 16.0f + 0.5f), iroundf(g.ocIntercept->uiHeight * 14.0f / 16.0f + 0.5f)); } } qgvView->setScene(&oes); reset(); }
void AudioInput::resetAudioProcessor() { if (!bResetProcessor) return; int iArg; if (sppPreprocess) speex_preprocess_state_destroy(sppPreprocess); if (sesEcho) speex_echo_state_destroy(sesEcho); sppPreprocess = speex_preprocess_state_init(iFrameSize, iSampleRate); iArg = 1; speex_preprocess_ctl(sppPreprocess, SPEEX_PREPROCESS_SET_VAD, &iArg); speex_preprocess_ctl(sppPreprocess, SPEEX_PREPROCESS_SET_AGC, &iArg); speex_preprocess_ctl(sppPreprocess, SPEEX_PREPROCESS_SET_DENOISE, &iArg); speex_preprocess_ctl(sppPreprocess, SPEEX_PREPROCESS_SET_DEREVERB, &iArg); iArg = 30000; speex_preprocess_ctl(sppPreprocess, SPEEX_PREPROCESS_SET_AGC_TARGET, &iArg); float v = 30000.0f / static_cast<float>(g.s.iMinLoudness); iArg = iroundf(floorf(20.0f * log10f(v))); speex_preprocess_ctl(sppPreprocess, SPEEX_PREPROCESS_SET_AGC_MAX_GAIN, &iArg); iArg = -60; speex_preprocess_ctl(sppPreprocess, SPEEX_PREPROCESS_SET_AGC_DECREMENT, &iArg); iArg = g.s.iNoiseSuppress; speex_preprocess_ctl(sppPreprocess, SPEEX_PREPROCESS_SET_NOISE_SUPPRESS, &iArg); if (iEchoChannels > 0) { sesEcho = speex_echo_state_init_mc(iFrameSize, iFrameSize * 10, 1, bEchoMulti ? iEchoChannels : 1); iArg = iSampleRate; speex_echo_ctl(sesEcho, SPEEX_ECHO_SET_SAMPLING_RATE, &iArg); speex_preprocess_ctl(sppPreprocess, SPEEX_PREPROCESS_SET_ECHO_STATE, sesEcho); qWarning("AudioInput: ECHO CANCELLER ACTIVE"); } else { sesEcho = NULL; } bResetEncoder = true; bResetProcessor = false; }
/* set the operator properties from the initial event */ static void graphview_cursor_setprops(bContext *C, wmOperator *op, const wmEvent *event) { ARegion *ar = CTX_wm_region(C); float viewx, viewy; /* abort if not active region (should not really be possible) */ if (ar == NULL) return; /* convert from region coordinates to View2D 'tot' space */ UI_view2d_region_to_view(&ar->v2d, event->mval[0], event->mval[1], &viewx, &viewy); /* store the values in the operator properties */ /* frame is rounded to the nearest int, since frames are ints */ RNA_int_set(op->ptr, "frame", iroundf(viewx)); RNA_float_set(op->ptr, "value", viewy); }
void bglBegin(int mode) { curmode = mode; if (mode == GL_POINTS) { float value[4]; glGetFloatv(GL_POINT_SIZE_RANGE, value); if (value[1] < 2.0f) { glGetFloatv(GL_POINT_SIZE, value); pointhack = iroundf(value[0]); if (pointhack > 4) pointhack = 4; } else { glBegin(mode); } } }
/* Get frame from mouse coordinates */ static int frame_from_event(bContext *C, const wmEvent *event) { ARegion *region = CTX_wm_region(C); Scene *scene = CTX_data_scene(C); float viewx; int frame; /* convert from region coordinates to View2D 'tot' space */ viewx = UI_view2d_region_to_view_x(®ion->v2d, event->mval[0]); /* round result to nearest int (frames are ints!) */ frame = iroundf(viewx); if (scene->r.flag & SCER_LOCK_FRAME_SELECTION) { CLAMP(frame, PSFRA, PEFRA); } return frame; }
/* snap current-frame indicator to 'average time' of selected keyframe */ static int actkeys_framejump_exec(bContext *C, wmOperator *UNUSED(op)) { bAnimContext ac; ListBase anim_data = {NULL, NULL}; bAnimListElem *ale; int filter; KeyframeEditData ked = {{NULL}}; /* get editor data */ if (ANIM_animdata_get_context(C, &ac) == 0) return OPERATOR_CANCELLED; /* init edit data */ /* loop over action data, averaging values */ filter = (ANIMFILTER_DATA_VISIBLE | ANIMFILTER_LIST_VISIBLE /*| ANIMFILTER_CURVESONLY */ | ANIMFILTER_NODUPLIS); ANIM_animdata_filter(&ac, &anim_data, filter, ac.data, ac.datatype); for (ale = anim_data.first; ale; ale = ale->next) { AnimData *adt = ANIM_nla_mapping_get(&ac, ale); if (adt) { ANIM_nla_mapping_apply_fcurve(adt, ale->key_data, 0, 1); ANIM_fcurve_keyframes_loop(&ked, ale->key_data, NULL, bezt_calc_average, NULL); ANIM_nla_mapping_apply_fcurve(adt, ale->key_data, 1, 1); } else ANIM_fcurve_keyframes_loop(&ked, ale->key_data, NULL, bezt_calc_average, NULL); } ANIM_animdata_freelist(&anim_data); /* set the new current frame value, based on the average time */ if (ked.i1) { Scene *scene = ac.scene; CFRA = iroundf(ked.f1 / ked.i1); SUBFRA = 0.f; } /* set notifier that things have changed */ WM_event_add_notifier(C, NC_SCENE | ND_FRAME, ac.scene); return OPERATOR_FINISHED; }
/* Set the new frame number */ static void graphview_cursor_apply(bContext *C, wmOperator *op) { Main *bmain = CTX_data_main(C); Scene *scene = CTX_data_scene(C); SpaceIpo *sipo = CTX_wm_space_graph(C); float frame = RNA_float_get(op->ptr, "frame"); /* this isn't technically "frame", but it'll do... */ /* adjust the frame or the cursor x-value */ if (sipo->mode == SIPO_MODE_DRIVERS) { /* adjust cursor x-value */ sipo->cursorTime = frame; } else { /* adjust the frame * NOTE: sync this part of the code with ANIM_OT_change_frame */ /* 1) frame is rounded to the nearest int, since frames are ints */ CFRA = iroundf(frame); if (scene->r.flag & SCER_LOCK_FRAME_SELECTION) { /* Clip to preview range * NOTE: Preview range won't go into negative values, * so only clamping once should be fine. */ CLAMP(CFRA, PSFRA, PEFRA); } else { /* Prevent negative frames */ FRAMENUMBER_MIN_CLAMP(CFRA); } SUBFRA = 0.0f; BKE_sound_seek_scene(bmain, scene); } /* set the cursor value */ sipo->cursorVal = RNA_float_get(op->ptr, "value"); /* send notifiers - notifiers for frame should force an update for both vars ok... */ WM_event_add_notifier(C, NC_SCENE | ND_FRAME, scene); }
static int frame_from_event(bContext *C, const wmEvent *event) { ARegion *ar = CTX_wm_region(C); Scene *scene = CTX_data_scene(C); int framenr = 0; if (ar->regiontype == RGN_TYPE_WINDOW) { float sfra = SFRA, efra = EFRA, framelen = ar->winx / (efra - sfra + 1); framenr = sfra + event->mval[0] / framelen; } else { float viewx, viewy; UI_view2d_region_to_view(&ar->v2d, event->mval[0], event->mval[1], &viewx, &viewy); framenr = iroundf(viewx); } return framenr; }
void MOD_meshcache_calc_range(const float frame, const char interp, const int frame_tot, int r_index_range[2], float *r_factor) { if (interp == MOD_MESHCACHE_INTERP_NONE) { r_index_range[0] = r_index_range[1] = max_ii(0, min_ii(frame_tot - 1, iroundf(frame))); *r_factor = 1.0f; /* dummy */ } else { const float tframe = floorf(frame); const float range = frame - tframe; r_index_range[0] = (int)tframe; if (range <= FRAME_SNAP_EPS) { /* we're close enough not to need blending */ r_index_range[1] = r_index_range[0]; *r_factor = 1.0f; /* dummy */ } else { /* blend between 2 frames */ r_index_range[1] = r_index_range[0] + 1; *r_factor = range; } /* clamp */ if ((r_index_range[0] >= frame_tot) || (r_index_range[1] >= frame_tot)) { r_index_range[0] = r_index_range[1] = frame_tot - 1; *r_factor = 1.0f; /* dummy */ } else if ((r_index_range[0] < 0) || (r_index_range[1] < 0)) { r_index_range[0] = r_index_range[1] = 0; *r_factor = 1.0f; /* dummy */ } } }
void AudioInputConfig::on_Tick_timeout() { if (!inputProcessor) { inputProcessor = new QtSpeex::SpeexInputProcessor(); inputProcessor->open(QIODevice::WriteOnly | QIODevice::Unbuffered); if (!inputDevice) { inputDevice = AudioDeviceHelper::getPreferedInputDevice(); } inputDevice->start(inputProcessor); connect(inputProcessor, SIGNAL(networkPacketReady()), this, SLOT(emptyBuffer())); } abSpeech->iBelow = ui.qsTransmitMin->value(); abSpeech->iAbove = ui.qsTransmitMax->value(); if (loaded) { rsVoip->setVoipfVADmin(ui.qsTransmitMin->value()); rsVoip->setVoipfVADmax(ui.qsTransmitMax->value()); } abSpeech->iValue = iroundf(inputProcessor->dVoiceAcivityLevel * 32767.0f + 0.5f); abSpeech->update(); }
void AudioBar::paintEvent(QPaintEvent *) { QPainter p(this); if (isEnabled()) { qcBelow.setAlphaF(1.0f); qcAbove.setAlphaF(1.0f); qcInside.setAlphaF(1.0f); } else { qcBelow.setAlphaF(0.5f); qcAbove.setAlphaF(0.5f); qcInside.setAlphaF(0.5f); } if (iBelow > iAbove) iBelow = iAbove; if (iValue < iMin) iValue = iMin; else if (iValue > iMax) iValue = iMax; float scale = static_cast<float>(width()) / static_cast<float>(iMax - iMin); int h = height(); int val = iroundf(static_cast<float>(iValue) * scale + 0.5f); int below = iroundf(static_cast<float>(iBelow) * scale + 0.5f); int above = iroundf(static_cast<float>(iAbove) * scale + 0.5f); int max = iroundf(static_cast<float>(iMax) * scale + 0.5f); int min = iroundf(static_cast<float>(iMin) * scale + 0.5f); int peak = iroundf(static_cast<float>(iPeak) * scale + 0.5f); if (highContrast) { // Draw monochrome representation QColor fg = QPalette().foreground().color(); p.fillRect(0, 0, below, h, QBrush(fg, qlReplacementBrushes.value(qlReplacableColors.indexOf(qcBelow), Qt::CrossPattern))); p.fillRect(below, 0, above - below, h, QBrush(fg, qlReplacementBrushes.value(qlReplacableColors.indexOf(qcInside), Qt::NoBrush))); p.fillRect(above, 0, max - above, h, QBrush(fg, qlReplacementBrushes.value(qlReplacableColors.indexOf(qcAbove), Qt::CrossPattern))); p.fillRect(0, 0, val, h, QBrush(fg, Qt::SolidPattern)); p.drawRect(0, 0, max - 1, h - 1); p.drawLine(below, 0, below, h); p.drawLine(above, 0, above, h); } else { if (val <= below) { p.fillRect(0, 0, val, h, qcBelow); p.fillRect(val, 0, below-val, h, qcBelow.darker(300)); p.fillRect(below, 0, above-below, h, qcInside.darker(300)); p.fillRect(above, 0, max-above, h, qcAbove.darker(300)); } else if (val <= above) { p.fillRect(0, 0, below, h, qcBelow); p.fillRect(below, 0, val-below, h, qcInside); p.fillRect(val, 0, above-val, h, qcInside.darker(300)); p.fillRect(above, 0, max-above, h, qcAbove.darker(300)); } else { p.fillRect(0, 0, below, h, qcBelow); p.fillRect(below, 0, above-below, h, qcInside); p.fillRect(above, 0, val-above, h, qcAbove); p.fillRect(val, 0, max-val, h, qcAbove.darker(300)); } } if ((peak >= min) && (peak <= max)) { if (peak <= below) p.setPen(qcBelow.lighter(150)); else if (peak <= above) p.setPen(qcInside.lighter(150)); else p.setPen(qcAbove.lighter(150)); p.drawLine(peak, 0, peak, h); } }
/* Return the time of the marker that occurs on a frame closest to the given time */ int ED_markers_find_nearest_marker_time(ListBase *markers, float x) { TimeMarker *nearest = ED_markers_find_nearest_marker(markers, x); return (nearest) ? (nearest->frame) : iroundf(x); }
/* propagate just works along each F-Curve in turn */ static void pose_propagate_fcurve(wmOperator *op, Object *ob, FCurve *fcu, float startFrame, tPosePropagate_ModeData modeData) { const int mode = RNA_enum_get(op->ptr, "mode"); BezTriple *bezt; float refVal = 0.0f; bool keyExists; int i, match; short first = 1; /* skip if no keyframes to edit */ if ((fcu->bezt == NULL) || (fcu->totvert < 2)) return; /* find the reference value from bones directly, which means that the user * doesn't need to firstly keyframe the pose (though this doesn't mean that * they can't either) */ if (!pose_propagate_get_refVal(ob, fcu, &refVal)) return; /* find the first keyframe to start propagating from * - if there's a keyframe on the current frame, we probably want to save this value there too * since it may be as of yet unkeyed * - if starting before the starting frame, don't touch the key, as it may have had some valid * values */ match = binarysearch_bezt_index(fcu->bezt, startFrame, fcu->totvert, &keyExists); if (fcu->bezt[match].vec[1][0] < startFrame) i = match + 1; else i = match; for (bezt = &fcu->bezt[i]; i < fcu->totvert; i++, bezt++) { /* additional termination conditions based on the operator 'mode' property go here... */ if (ELEM(mode, POSE_PROPAGATE_BEFORE_FRAME, POSE_PROPAGATE_SMART_HOLDS)) { /* stop if keyframe is outside the accepted range */ if (bezt->vec[1][0] > modeData.end_frame) break; } else if (mode == POSE_PROPAGATE_NEXT_KEY) { /* stop after the first keyframe has been processed */ if (first == 0) break; } else if (mode == POSE_PROPAGATE_LAST_KEY) { /* only affect this frame if it will be the last one */ if (i != (fcu->totvert - 1)) continue; } else if (mode == POSE_PROPAGATE_SELECTED_MARKERS) { /* only allow if there's a marker on this frame */ CfraElem *ce = NULL; /* stop on matching marker if there is one */ for (ce = modeData.sel_markers.first; ce; ce = ce->next) { if (ce->cfra == iroundf(bezt->vec[1][0])) break; } /* skip this keyframe if no marker */ if (ce == NULL) continue; } /* just flatten handles, since values will now be the same either side... */ /* TODO: perhaps a fade-out modulation of the value is required here (optional once again)? */ bezt->vec[0][1] = bezt->vec[1][1] = bezt->vec[2][1] = refVal; /* select keyframe to indicate that it's been changed */ bezt->f2 |= SELECT; first = 0; } }
void OverlayUser::updateLayout() { QPixmap pm; if (scene()) uiSize = iroundf(scene()->sceneRect().height() + 0.5); prepareGeometryChange(); for (int i=0;i<4;++i) qgpiName[i]->setPixmap(pm); qgpiAvatar->setPixmap(pm); qgpiChannel->setPixmap(pm); { QImageReader qir(QLatin1String("skin:muted_self.svg")); QSize sz = qir.size(); sz.scale(SCALESIZE(MutedDeafened), Qt::KeepAspectRatio); qir.setScaledSize(sz); qgpiMuted->setPixmap(QPixmap::fromImage(qir.read())); } { QImageReader qir(QLatin1String("skin:deafened_self.svg")); QSize sz = qir.size(); sz.scale(SCALESIZE(MutedDeafened), Qt::KeepAspectRatio); qir.setScaledSize(sz); qgpiDeafened->setPixmap(QPixmap::fromImage(qir.read())); } qgpiMuted->setPos(alignedPosition(scaledRect(os->qrfMutedDeafened, uiSize * os->fZoom), qgpiMuted->boundingRect(), os->qaMutedDeafened)); qgpiMuted->setZValue(1.0f); qgpiMuted->setOpacity(os->fMutedDeafened); qgpiDeafened->setPos(alignedPosition(scaledRect(os->qrfMutedDeafened, uiSize * os->fZoom), qgpiDeafened->boundingRect(), os->qaMutedDeafened)); qgpiDeafened->setZValue(1.0f); qgpiDeafened->setOpacity(os->fMutedDeafened); qgpiAvatar->setPos(0.0f, 0.0f); qgpiAvatar->setOpacity(os->fAvatar); for (int i=0;i<4;++i) { qgpiName[i]->setPos(0.0f, 0.0f); qgpiName[i]->setZValue(2.0f); qgpiName[i]->setOpacity(os->fUserName); } qgpiChannel->setPos(0.0f, 0.0f); qgpiChannel->setZValue(3.0f); qgpiChannel->setOpacity(os->fChannel); QRectF childrenBounds = os->qrfAvatar | os->qrfChannel | os->qrfMutedDeafened | os->qrfUserName; bool haspen = (os->qcBoxPen != os->qcBoxFill) && (! qFuzzyCompare(os->qcBoxPen.alphaF(), static_cast<qreal>(0.0f))); qreal pw = haspen ? qMax<qreal>(1.0f, os->fBoxPenWidth * uiSize * os->fZoom) : 0.0f; qreal pad = os->fBoxPad * uiSize * os->fZoom; QPainterPath pp; pp.addRoundedRect(childrenBounds.x() * uiSize * os->fZoom + -pw / 2.0f - pad, childrenBounds.y() * uiSize * os->fZoom + -pw / 2.0f - pad, childrenBounds.width() * uiSize * os->fZoom + pw + 2.0f * pad, childrenBounds.height() * uiSize * os->fZoom + pw + 2.0f * pad, 2.0f * pw, 2.0f * pw); qgpiBox->setPath(pp); qgpiBox->setPos(0.0f, 0.0f); qgpiBox->setZValue(-1.0f); qgpiBox->setPen(haspen ? QPen(os->qcBoxPen, pw) : Qt::NoPen); qgpiBox->setBrush(qFuzzyCompare(os->qcBoxFill.alphaF(), static_cast<qreal>(0.0f)) ? Qt::NoBrush : os->qcBoxFill); qgpiBox->setOpacity(1.0f); if (! cuUser) { switch (tsColor) { case Settings::Passive: qsName = Overlay::tr("Silent"); break; case Settings::Talking: qsName = Overlay::tr("Talking"); break; case Settings::Whispering: qsName = Overlay::tr("Whisper"); break; case Settings::Shouting: qsName = Overlay::tr("Shout"); break; } } }
void OverlayUserGroup::updateUsers() { const QRectF &sr = scene()->sceneRect(); unsigned int uiHeight = iroundf(sr.height() + 0.5f); QList<QGraphicsItem *> items; foreach(QGraphicsItem *qgi, childItems()) items << qgi; QList<OverlayUser *> users; if (bShowExamples) { if (qlExampleUsers.isEmpty()) { qlExampleUsers << new OverlayUser(Settings::Passive, uiHeight, os); qlExampleUsers << new OverlayUser(Settings::Talking, uiHeight, os); qlExampleUsers << new OverlayUser(Settings::Whispering, uiHeight, os); qlExampleUsers << new OverlayUser(Settings::Shouting, uiHeight, os); } users = qlExampleUsers; foreach(OverlayUser *ou, users) items.removeAll(ou); if (! qgeiHandle) { qgeiHandle = new QGraphicsEllipseItem(QRectF(-4.0f, -4.0f, 8.0f, 8.0f)); qgeiHandle->setPen(QPen(Qt::darkRed, 0.0f)); qgeiHandle->setBrush(Qt::red); qgeiHandle->setZValue(0.5f); qgeiHandle->setFlag(QGraphicsItem::ItemIsMovable); qgeiHandle->setFlag(QGraphicsItem::ItemIsSelectable); qgeiHandle->setPos(sr.width() * os->fX, sr.height() * os->fY); scene()->addItem(qgeiHandle); qgeiHandle->show(); qgeiHandle->installSceneEventFilter(this); } } else { delete qgeiHandle; qgeiHandle = NULL; } ClientUser *self = ClientUser::get(g.uiSession); if (self) { QList<ClientUser *> showusers; Channel *home = ClientUser::get(g.uiSession)->cChannel; switch (os->osShow) { case OverlaySettings::LinkedChannels: foreach(Channel *c, home->allLinks()) foreach(User *p, c->qlUsers) showusers << static_cast<ClientUser *>(p); foreach(ClientUser *cu, ClientUser::getTalking()) if (! showusers.contains(cu)) showusers << cu; break; case OverlaySettings::HomeChannel: foreach(User *p, home->qlUsers) showusers << static_cast<ClientUser *>(p); foreach(ClientUser *cu, ClientUser::getTalking()) if (! showusers.contains(cu)) showusers << cu; break; case OverlaySettings::Active: showusers = ClientUser::getActive(); if (os->bAlwaysSelf && !showusers.contains(self)) showusers << self; break; default: showusers = ClientUser::getTalking(); if (os->bAlwaysSelf && (self->tsState == Settings::Passive)) showusers << self; break; } ClientUser::sortUsersOverlay(showusers); foreach(ClientUser *cu, showusers) { OverlayUser *ou = qmUsers.value(cu); if (! ou) { ou = new OverlayUser(cu, uiHeight, os); connect(cu, SIGNAL(destroyed(QObject *)), this, SLOT(userDestroyed(QObject *))); qmUsers.insert(cu, ou); ou->hide(); } else {
bool AudioOutputSpeech::needSamples(unsigned int snum) { for (unsigned int i=iLastConsume;i<iBufferFilled;++i) pfBuffer[i-iLastConsume]=pfBuffer[i]; iBufferFilled -= iLastConsume; iLastConsume = snum; if (iBufferFilled >= snum) return bLastAlive; float *pOut; STACKVAR(float, fOut, iFrameSize + 4096); bool nextalive = bLastAlive; while (iBufferFilled < snum) { resizeBuffer(iBufferFilled + iOutputSize); pOut = (srs) ? fOut : (pfBuffer + iBufferFilled); if (! bLastAlive) { memset(pOut, 0, iFrameSize * sizeof(float)); } else { if (p == LoopUser::lpLoopy) { LoopUser::lpLoopy->fetchFrames(); } int avail = 0; int ts = jitter_buffer_get_pointer_timestamp(jbJitter); jitter_buffer_ctl(jbJitter, JITTER_BUFFER_GET_AVAILABLE_COUNT, &avail); if (p && (ts == 0)) { int want = iroundf(p->fAverageAvailable); if (avail < want) { ++iMissCount; if (iMissCount < 20) { memset(pOut, 0, iFrameSize * sizeof(float)); goto nextframe; } } } if (qlFrames.isEmpty()) { QMutexLocker lock(&qmJitter); char data[4096]; JitterBufferPacket jbp; jbp.data = data; jbp.len = 4096; spx_int32_t startofs = 0; if (jitter_buffer_get(jbJitter, &jbp, iFrameSize, &startofs) == JITTER_BUFFER_OK) { PacketDataStream pds(jbp.data, jbp.len); iMissCount = 0; ucFlags = static_cast<unsigned char>(pds.next()); bHasTerminator = false; unsigned int header = 0; do { header = static_cast<unsigned int>(pds.next()); if (header) qlFrames << pds.dataBlock(header & 0x7f); else bHasTerminator = true; } while ((header & 0x80) && pds.isValid()); if (pds.left()) { pds >> fPos[0]; pds >> fPos[1]; pds >> fPos[2]; } else { fPos[0] = fPos[1] = fPos[2] = 0.0f; } if (p) { float a = static_cast<float>(avail); if (avail >= p->fAverageAvailable) p->fAverageAvailable = a; else p->fAverageAvailable *= 0.99f; } } else {
void AudioStats::on_Tick_timeout() { AudioInputPtr ai = g.ai; if (ai.get() == NULL || ! ai->sppPreprocess) return; bool nTalking = ai->isTransmitting(); QString txt; txt.sprintf("%06.2f dB",ai->dPeakMic); qlMicLevel->setText(txt); txt.sprintf("%06.2f dB",ai->dPeakSpeaker); qlSpeakerLevel->setText(txt); txt.sprintf("%06.2f dB",ai->dPeakSignal); qlSignalLevel->setText(txt); spx_int32_t ps_size = 0; speex_preprocess_ctl(ai->sppPreprocess, SPEEX_PREPROCESS_GET_PSD_SIZE, &ps_size); STACKVAR(spx_int32_t, noise, ps_size); STACKVAR(spx_int32_t, ps, ps_size); speex_preprocess_ctl(ai->sppPreprocess, SPEEX_PREPROCESS_GET_PSD, ps); speex_preprocess_ctl(ai->sppPreprocess, SPEEX_PREPROCESS_GET_NOISE_PSD, noise); float s = 0.0f; float n = 0.0001f; int start = (ps_size * 300) / SAMPLE_RATE; int stop = (ps_size * 2000) / SAMPLE_RATE; for (int i=start;i<stop;i++) { s += sqrtf(static_cast<float>(ps[i])); n += sqrtf(static_cast<float>(noise[i])); } txt.sprintf("%06.3f",s / n); qlMicSNR->setText(txt); spx_int32_t v; speex_preprocess_ctl(ai->sppPreprocess, SPEEX_PREPROCESS_GET_AGC_GAIN, &v); float fv = powf(10.0f, (static_cast<float>(v) / 20.0f)); txt.sprintf("%03.0f%%",100.0f / fv); qlMicVolume->setText(txt); txt.sprintf("%03.0f%%",ai->fSpeechProb * 100.0f); qlSpeechProb->setText(txt); txt.sprintf("%04.1f kbit/s",static_cast<float>(ai->iBitrate) / 1000.0f); qlBitrate->setText(txt); if (nTalking != bTalking) { bTalking = nTalking; QFont f = qlSpeechProb->font(); f.setBold(bTalking); qlSpeechProb->setFont(f); } if (g.uiDoublePush > 1000000) txt = tr(">1000 ms"); else txt.sprintf("%04llu ms",g.uiDoublePush / 1000); qlDoublePush->setText(txt); abSpeech->iBelow = iroundf(g.s.fVADmin * 32767.0f); abSpeech->iAbove = iroundf(g.s.fVADmax * 32767.0f); if (g.s.vsVAD == Settings::Amplitude) { abSpeech->iValue = iroundf((32767.f/96.0f) * (96.0f + ai->dPeakMic)); } else { abSpeech->iValue = iroundf(ai->fSpeechProb * 32767.0f); } abSpeech->update(); anwNoise->update(); if (aewEcho) aewEcho->updateGL(); }
BasepointPixmap OverlayTextLine::createPixmap(unsigned int maxwidth, unsigned int height, QColor col) { float twice_edge = 2.0f * fEdge; if (! height || ! maxwidth) return BasepointPixmap(); if (qpp.isEmpty() || iCurWidth > static_cast<int>(maxwidth) || iCurHeight != static_cast<int>(height) || (static_cast<int>(maxwidth) > iCurWidth && bElided)) { QFont f = qfFont; QFontMetrics fm(f); // fit the font into a bounding box with padding float ps = static_cast<float>(f.pointSizeF()); float f_ad = static_cast<float>(fm.ascent() + fm.descent()+1) / ps; float pointsize = static_cast<float>(height) / (f_ad + 2.0f*fEdgeFactor); if (fEdgeFactor * ps > 1.0f) { pointsize = static_cast<float>(height-2) / f_ad; } if (pointsize <= 0.0f) { return BasepointPixmap(); } f.setPointSizeF(pointsize); setFont(f); fm = QFontMetrics(f); twice_edge = 2.0f * fEdge; if (!qpp.isEmpty()) { qpp = QPainterPath(); } // calculate text metrics for eliding and scaling QRectF bb; qpp.addText(0.0f, 0.0f, f, qsText); bb = qpp.controlPointRect(); qreal effective_ascent = -bb.top(); qreal effective_descent = bb.bottom(); float scale = 1.0f; bool keep_baseline = true; if (effective_descent > fDescent || effective_ascent > fAscent) { qreal scale_ascent = effective_ascent > 0.0f ? fAscent / effective_ascent : 1.0f; qreal scale_descent = effective_descent > 0.0f ? fDescent / effective_descent : 1.0f; scale = static_cast<float>(qMin(scale_ascent, scale_descent)); if (scale < fBaseliningThreshold) { float text_height = static_cast<float>(bb.height()) + twice_edge; scale = static_cast<float>(height) / text_height; keep_baseline = false; } qWarning() << QString(QLatin1String("Text \"%1\" did not fit (+%2/-%3): (+%4/-%5). Scaling to %6.")).arg(qsText).arg(fAscent).arg(fDescent).arg(effective_ascent).arg(effective_descent).arg(scale); } // eliding by previously calculated width if ((bb.width()*scale) + twice_edge > maxwidth) { int eliding_width = iroundf((static_cast<float>(maxwidth) / scale) - twice_edge + 0.5f); QString str = fm.elidedText(qsText, Qt::ElideRight, eliding_width); // use ellipsis as shortest possible string if (str.trimmed().isEmpty()) { str = QString(QChar(0x2026)); } qpp = QPainterPath(); qpp.addText(0.0f, 0.0f, f, str); bb = qpp.controlPointRect(); bElided = true; } else { bElided = false; } // translation to "pixmap space": QMatrix correction; // * adjust left edge correction.translate(-bb.x() + fEdge, 0.0f); // * scale overly high text (still on baseline) correction.scale(scale, scale); if (keep_baseline) { // * translate down to baseline correction.translate(0.0f, (fAscent + fEdge) / scale); } else { // * translate into bounding box correction.translate(0.0f, -bb.top() + fEdge); } qpp = correction.map(qpp); iCurWidth = iroundf(bb.width() * scale + 0.5f); iCurHeight = height; } QRectF qr = qpp.controlPointRect(); return render( iroundf(qr.width() + twice_edge + 0.5f), iroundf(fAscent + fDescent + twice_edge + 0.5f), col, QPoint(0, iroundf(fAscent + fEdge + 0.5f)) ); }
size_t BLI_timecode_string_from_time( char *str, const size_t maxncpy, const int power, const float time_seconds, const double fps, const short timecode_style) { int hours = 0, minutes = 0, seconds = 0, frames = 0; float time = time_seconds; char neg[2] = {'\0'}; size_t rlen; /* get cframes */ if (time < 0) { /* correction for negative cfraues */ neg[0] = '-'; time = -time; } if (time >= 3600) { /* hours */ /* XXX should we only display a single digit for hours since clips are * VERY UNLIKELY to be more than 1-2 hours max? However, that would * go against conventions... */ hours = (int)time / 3600; time = (float)fmod(time, 3600); } if (time >= 60) { /* minutes */ minutes = (int)time / 60; time = (float)fmod(time, 60); } if (power <= 0) { /* seconds + frames * Frames are derived from 'fraction' of second. We need to perform some additional rounding * to cope with 'half' frames, etc., which should be fine in most cases */ seconds = (int)time; frames = iroundf((float)(((double)time - (double)seconds) * fps)); } else { /* seconds (with pixel offset rounding) */ seconds = iroundf(time); } switch (timecode_style) { case USER_TIMECODE_MINIMAL: { /* - In general, minutes and seconds should be shown, as most clips will be * within this length. Hours will only be included if relevant. * - Only show frames when zoomed in enough for them to be relevant * (using separator of '+' for frames). * When showing frames, use slightly different display to avoid confusion with mm:ss format */ if (power <= 0) { /* include "frames" in display */ if (hours) { rlen = BLI_snprintf_rlen(str, maxncpy, "%s%02d:%02d:%02d+%02d", neg, hours, minutes, seconds, frames); } else if (minutes) { rlen = BLI_snprintf_rlen(str, maxncpy, "%s%02d:%02d+%02d", neg, minutes, seconds, frames); } else { rlen = BLI_snprintf_rlen(str, maxncpy, "%s%d+%02d", neg, seconds, frames); } } else { /* don't include 'frames' in display */ if (hours) { rlen = BLI_snprintf_rlen(str, maxncpy, "%s%02d:%02d:%02d", neg, hours, minutes, seconds); } else { rlen = BLI_snprintf_rlen(str, maxncpy, "%s%02d:%02d", neg, minutes, seconds); } } break; } case USER_TIMECODE_SMPTE_MSF: { /* reduced SMPTE format that always shows minutes, seconds, frames. Hours only shown as needed. */ if (hours) { rlen = BLI_snprintf_rlen(str, maxncpy, "%s%02d:%02d:%02d:%02d", neg, hours, minutes, seconds, frames); } else { rlen = BLI_snprintf_rlen(str, maxncpy, "%s%02d:%02d:%02d", neg, minutes, seconds, frames); } break; } case USER_TIMECODE_MILLISECONDS: { /* reduced SMPTE. Instead of frames, milliseconds are shown */ /* precision of decimal part */ const int ms_dp = (power <= 0) ? (1 - power) : 1; /* to get 2 digit whole-number part for seconds display * (i.e. 3 is for 2 digits + radix, on top of full length) */ const int s_pad = ms_dp + 3; if (hours) { rlen = BLI_snprintf_rlen(str, maxncpy, "%s%02d:%02d:%0*.*f", neg, hours, minutes, s_pad, ms_dp, time); } else { rlen = BLI_snprintf_rlen(str, maxncpy, "%s%02d:%0*.*f", neg, minutes, s_pad, ms_dp, time); } break; } case USER_TIMECODE_SECONDS_ONLY: { /* only show the original seconds display */ /* round to whole numbers if power is >= 1 (i.e. scale is coarse) */ if (power <= 0) { rlen = BLI_snprintf_rlen(str, maxncpy, "%.*f", 1 - power, time_seconds); } else { rlen = BLI_snprintf_rlen(str, maxncpy, "%d", iroundf(time_seconds)); } break; } case USER_TIMECODE_SMPTE_FULL: default: { /* full SMPTE format */ rlen = BLI_snprintf_rlen(str, maxncpy, "%s%02d:%02d:%02d:%02d", neg, hours, minutes, seconds, frames); break; } } return rlen; }