void MotionGraphController::update(float _time) { // if time has not advanced, there's nothing to do if ((_time-status.current_time) < 0.0001f) return; // store the current time status.current_time = _time; // determine position in active motion sequence computeCurrentFrame(); // check for transitions to a new motion sequence if (status.active_frame >= status.transition_trigger_frame) { // make the transition status.active_seqID = status.transition_seqID; status.active_frame = status.transition_frame; status.frame_zero_time = status.current_time - status.active_frame/frame_rate; computeCurrentFrame(); // set up next transition setupNextTransition(); } }
bool MotionGraphController::timeToTransition(float time) {// need to find out how to figure out when the time matches with the frame number. MotionSequence *MS; MS = returnMotionSequenceContainerFromID(status.SeqID).MS; int currentFrame = computeCurrentFrame(time); if (currentFrame >= status.FrameNumberTransition) return(true); else return(false); }
void Player::play() { computeCurrentFrame(); if (_currentFrame < 0 || (_currentFrame >= _recording->getFrameNumber() - 2)) { // -2 because of interpolation if (_loop) { loopRecording(); } else { stopPlaying(); } return; } const RecordingContext* context = &_recording->getContext(); if (_playFromCurrentPosition) { context = &_currentContext; } const RecordingFrame& currentFrame = _recording->getFrame(_currentFrame); const RecordingFrame& nextFrame = _recording->getFrame(_currentFrame + 1); glm::vec3 translation = glm::mix(currentFrame.getTranslation(), nextFrame.getTranslation(), _frameInterpolationFactor); _avatar->setPosition(context->position + context->orientation * translation); glm::quat rotation = safeMix(currentFrame.getRotation(), nextFrame.getRotation(), _frameInterpolationFactor); _avatar->setOrientation(context->orientation * rotation); float scale = glm::mix(currentFrame.getScale(), nextFrame.getScale(), _frameInterpolationFactor); _avatar->setTargetScale(context->scale * scale); QVector<glm::quat> jointRotations(currentFrame.getJointRotations().size()); for (int i = 0; i < currentFrame.getJointRotations().size(); ++i) { jointRotations[i] = safeMix(currentFrame.getJointRotations()[i], nextFrame.getJointRotations()[i], _frameInterpolationFactor); } _avatar->setJointRotations(jointRotations); HeadData* head = const_cast<HeadData*>(_avatar->getHeadData()); if (head) { // Make sure fake face tracker connection doesn't get turned off _avatar->setForceFaceTrackerConnected(true); QVector<float> blendCoef(currentFrame.getBlendshapeCoefficients().size()); for (int i = 0; i < currentFrame.getBlendshapeCoefficients().size(); ++i) { blendCoef[i] = glm::mix(currentFrame.getBlendshapeCoefficients()[i], nextFrame.getBlendshapeCoefficients()[i], _frameInterpolationFactor); } head->setBlendshapeCoefficients(blendCoef); float leanSideways = glm::mix(currentFrame.getLeanSideways(), nextFrame.getLeanSideways(), _frameInterpolationFactor); head->setLeanSideways(leanSideways); float leanForward = glm::mix(currentFrame.getLeanForward(), nextFrame.getLeanForward(), _frameInterpolationFactor); head->setLeanForward(leanForward); glm::quat headRotation = safeMix(currentFrame.getHeadRotation(), nextFrame.getHeadRotation(), _frameInterpolationFactor); glm::vec3 eulers = glm::degrees(safeEulerAngles(headRotation)); head->setFinalPitch(eulers.x); head->setFinalYaw(eulers.y); head->setFinalRoll(eulers.z); glm::vec3 lookAt = glm::mix(currentFrame.getLookAtPosition(), nextFrame.getLookAtPosition(), _frameInterpolationFactor); head->setLookAtPosition(context->position + context->orientation * lookAt); } else { qCDebug(avatars) << "WARNING: Player couldn't find head data."; } _options.position = _avatar->getPosition(); _options.orientation = _avatar->getOrientation(); _injector->setOptions(_options); }
void Player::play() { computeCurrentFrame(); if (_currentFrame < 0 || (_currentFrame >= _recording->getFrameNumber() - 2)) { // -2 because of interpolation if (_loop) { loopRecording(); } else { stopPlaying(); } return; } const RecordingContext* context = &_recording->getContext(); if (_playFromCurrentPosition) { context = &_currentContext; } const RecordingFrame& currentFrame = _recording->getFrame(_currentFrame); const RecordingFrame& nextFrame = _recording->getFrame(_currentFrame + 1); glm::vec3 translation = glm::mix(currentFrame.getTranslation(), nextFrame.getTranslation(), _frameInterpolationFactor); _avatar->setPosition(context->position + context->orientation * translation); glm::quat rotation = safeMix(currentFrame.getRotation(), nextFrame.getRotation(), _frameInterpolationFactor); _avatar->setOrientation(context->orientation * rotation); float scale = glm::mix(currentFrame.getScale(), nextFrame.getScale(), _frameInterpolationFactor); _avatar->setTargetScale(context->scale * scale); // Joint array playback // FIXME: THis is still using a deprecated path to assign the joint orientation since setting the full RawJointData array doesn't // work for Avatar. We need to fix this working with the animation team const auto& prevJointArray = currentFrame.getJointArray(); const auto& nextJointArray = currentFrame.getJointArray(); QVector<JointData> jointArray(prevJointArray.size()); QVector<glm::quat> jointRotations(prevJointArray.size()); // FIXME: remove once the setRawJointData is fixed QVector<glm::vec3> jointTranslations(prevJointArray.size()); // FIXME: remove once the setRawJointData is fixed for (int i = 0; i < jointArray.size(); i++) { const auto& prevJoint = prevJointArray[i]; const auto& nextJoint = nextJointArray[i]; auto& joint = jointArray[i]; // Rotation joint.rotationSet = prevJoint.rotationSet || nextJoint.rotationSet; if (joint.rotationSet) { joint.rotation = safeMix(prevJoint.rotation, nextJoint.rotation, _frameInterpolationFactor); jointRotations[i] = joint.rotation; // FIXME: remove once the setRawJointData is fixed } joint.translationSet = prevJoint.translationSet || nextJoint.translationSet; if (joint.translationSet) { joint.translation = glm::mix(prevJoint.translation, nextJoint.translation, _frameInterpolationFactor); jointTranslations[i] = joint.translation; // FIXME: remove once the setRawJointData is fixed } } // _avatar->setRawJointData(jointArray); // FIXME: Enable once the setRawJointData is fixed _avatar->setJointRotations(jointRotations); // FIXME: remove once the setRawJointData is fixed // _avatar->setJointTranslations(jointTranslations); // FIXME: remove once the setRawJointData is fixed HeadData* head = const_cast<HeadData*>(_avatar->getHeadData()); if (head) { // Make sure fake face tracker connection doesn't get turned off _avatar->setForceFaceTrackerConnected(true); QVector<float> blendCoef(currentFrame.getBlendshapeCoefficients().size()); for (int i = 0; i < currentFrame.getBlendshapeCoefficients().size(); ++i) { blendCoef[i] = glm::mix(currentFrame.getBlendshapeCoefficients()[i], nextFrame.getBlendshapeCoefficients()[i], _frameInterpolationFactor); } head->setBlendshapeCoefficients(blendCoef); float leanSideways = glm::mix(currentFrame.getLeanSideways(), nextFrame.getLeanSideways(), _frameInterpolationFactor); head->setLeanSideways(leanSideways); float leanForward = glm::mix(currentFrame.getLeanForward(), nextFrame.getLeanForward(), _frameInterpolationFactor); head->setLeanForward(leanForward); glm::quat headRotation = safeMix(currentFrame.getHeadRotation(), nextFrame.getHeadRotation(), _frameInterpolationFactor); glm::vec3 eulers = glm::degrees(safeEulerAngles(headRotation)); head->setFinalPitch(eulers.x); head->setFinalYaw(eulers.y); head->setFinalRoll(eulers.z); glm::vec3 lookAt = glm::mix(currentFrame.getLookAtPosition(), nextFrame.getLookAtPosition(), _frameInterpolationFactor); head->setLookAtPosition(context->position + context->orientation * lookAt); } else { qCDebug(avatars) << "WARNING: Player couldn't find head data."; } _options.position = _avatar->getPosition(); _options.orientation = _avatar->getOrientation(); _injector->setOptions(_options); }
float MotionGraphController::getValue(CHANNEL_ID _channel, float _time){ //if it is not transitioning and its time to loop if (!status.isTransitioning && timeToTransition(_time)) { //iterateStatus(); int frame3 = computeCurrentFrame(_time); MotionSequence *motion_sequence = returnMotionSequenceContainerFromID(status.SeqID).MS; if (motion_sequence == NULL) throw AnimationException("MotionSequenceController has no attached MotionSequence"); if (!isValidChannel(_channel, _time)) { string s = string("MotionSequenceController received request for invalid channel ") + " bone: " + toString(_channel.bone_id) + " dof: " + toString(_channel.channel_type); throw AnimationException(s.c_str()); } // int frame3 = computeCurrentFrame(_time); //set this for currentFrameCalculations last_transition_time = _time; last_transition_frame = status.FrameNumberTransitionTo; // set the frame number to the frame we transition to. status.FrameNumber = status.FrameNumberTransitionTo; // int frame3 = computeCurrentFrame(_time); float value = motion_sequence->getValue(_channel, computeCurrentFrame(_time)); //transition the graph using status information // only use when we have matching names of Motion sequences are the same as the filenames of the frames on the graph // in this case we need to put the vertex descriptor back to its original spot //transitionGraph(); printStatus(); //update the status iterateStatus(); printStatus(); return(value); } else if (status.isTransitioning && timeToTransition(_time)) { MotionSequence *motion_sequence = returnMotionSequenceContainerFromID(status.TransitionToSeqId).MS; if (motion_sequence == NULL) throw AnimationException("MotionSequenceController has no attached MotionSequence"); if (!isValidChannel(_channel, _time)) { string s = string("MotionSequenceController received request for invalid channel ") + " bone: " + toString(_channel.bone_id) + " dof: " + toString(_channel.channel_type); throw AnimationException(s.c_str()); } //set this for currentFrameCalculations last_transition_time = _time; last_transition_frame = status.FrameNumberTransitionTo; // set the frame number to the frame we transition to. status.FrameNumber = status.FrameNumberTransitionTo; int frame3 = computeCurrentFrame(_time); float value = motion_sequence->getValue(_channel, computeCurrentFrame(_time)); //transition the graph using status information // only use when we have matching names of Motion sequences are the same as the filenames of the frames on the graph //transitionGraph(); printStatus(); //update the status iterateStatus(); printStatus(); return(value); } //not at right time to transition else{ MotionSequence *motion_sequence = returnMotionSequenceContainerFromID(status.SeqID).MS; if (motion_sequence == NULL) throw AnimationException("MotionSequenceController has no attached MotionSequence"); if (!isValidChannel(_channel, _time)) { string s = string("MotionSequenceController received request for invalid channel ") + " bone: " + toString(_channel.bone_id) + " dof: " + toString(_channel.channel_type); throw AnimationException(s.c_str()); } int frame3 = computeCurrentFrame(_time); float value = motion_sequence->getValue(_channel, computeCurrentFrame(_time)); if (frame3 > status.FrameNumber) { //printStatus(); } //update status status.FrameNumber = frame3; // only use when we have matching names of Motion sequences are the same as the filenames of the frames on the graph //iterate graph //iterateMotionGraph(); return(value); } }