Ejemplo n.º 1
0
    bool writeAnimation(shared_ptr <GLTFAnimation> cvtAnimation,
                        const COLLADAFW::AnimationList::AnimationClass animationClass,
                        AnimatedTargetsSharedPtr animatedTargets,
                        GLTF::GLTFConverterContext &converterContext) {
        
        shared_ptr<JSONObject> samplers = cvtAnimation->samplers();
        shared_ptr<JSONArray> channels = cvtAnimation->channels();
        GLTFAnimation::Parameter *timeParameter = cvtAnimation->getParameterNamed("TIME");
        shared_ptr<GLTFBufferView> timeBufferView = timeParameter->getBufferView();
        std::string name = "TIME";
        std::string samplerID = cvtAnimation->getSamplerIDForName(name);
                
        cvtAnimation->removeParameterNamed("TIME");
        
        setupAndWriteAnimationParameter(cvtAnimation,
                                             "TIME",
                                             "FLOAT",
                                             (unsigned char*)timeBufferView->getBufferDataByApplyingOffset(), timeBufferView->getByteLength(),
                                             converterContext);
        
        //timeParameter->setByteOffset(outputStream->length());
        //outputStream->write(timeBufferView);
        
        //printf("time bufferLength: %d\n",(int)timeBufferView->getByteLength());
        
        switch (animationClass) {
            case COLLADAFW::AnimationList::TIME:
            {
                //In Currrent OpenCOLLADA Implementation, this is never called, only cases mapping to OUTPUT are, so we handle INPUT (i.e time) when we enter this function.
            }
                break;
            case COLLADAFW::AnimationList::AXISANGLE: {
                GLTFAnimation::Parameter *parameter = cvtAnimation->getParameterNamed("OUTPUT");
                if (parameter) {
                    shared_ptr<GLTFBufferView> bufferView = parameter->getBufferView();
                    //the angles to radians necessary convertion is done within the animationFlattener
                    //but it might be better to make it before...
                    for (size_t animatedTargetIndex = 0 ; animatedTargetIndex < animatedTargets->size() ; animatedTargetIndex++) {
                        shared_ptr<JSONObject> animatedTarget = (*animatedTargets)[animatedTargetIndex];
                        std::string targetID = animatedTarget->getString("target");
                        if (converterContext._uniqueIDToTrackedObject.count(targetID) != 0) {
                            cvtAnimation->targets()->setValue(targetID, animatedTarget);
                            
                            shared_ptr<JSONObject> targetObject = converterContext._uniqueIDToTrackedObject[targetID];
                            std::string path = animatedTarget->getString("path");
                            if (path == "rotation") {
                                std::string transformID = animatedTarget->getString("transformId");
                                shared_ptr<GLTFAnimationFlattener> animationFlattener = converterContext._uniqueIDToAnimationFlattener[targetID];
                                
                                float* timeValues = (float*)timeBufferView->getBufferDataByApplyingOffset();
                                float* rotations = (float*)bufferView->getBufferDataByApplyingOffset();
                                for (size_t k = 0 ; k < cvtAnimation->getCount() ; k++) {
                                    size_t offset = k * 4;
                                    
                                    shared_ptr <COLLADAFW::Rotate> rotate(new COLLADAFW::Rotate(rotations[offset + 0],
                                                                                                rotations[offset + 1],
                                                                                                rotations[offset + 2],
                                                                                                rotations[offset + 3]));
                                    animationFlattener->insertTransformAtTime(transformID, rotate, timeValues[k]);
                                }
                            }
                        }
                    }
                }
                cvtAnimation->removeParameterNamed("OUTPUT");
            }
                break;
            case COLLADAFW::AnimationList::MATRIX4X4: {
                GLTFAnimation::Parameter *parameter = cvtAnimation->getParameterNamed("OUTPUT");
                if (parameter) {
                    std::vector< shared_ptr <GLTFBufferView> > TRSBufferViews;
                    //FIXME: we assume float here, might be double
                    shared_ptr<GLTFBufferView> bufferView = parameter->getBufferView();
                    float* matrices = (float*)bufferView->getBufferDataByApplyingOffset();
                    float* timeValues = (float*)timeBufferView->getBufferDataByApplyingOffset();
                    
                    for (size_t animatedTargetIndex = 0 ; animatedTargetIndex < animatedTargets->size() ; animatedTargetIndex++) {
                        shared_ptr<JSONObject> animatedTarget = (*animatedTargets)[animatedTargetIndex];
                        if (animatedTarget->getString("path") == "MATRIX") {
                            std::string targetID = animatedTarget->getString("target");
                            if (converterContext._uniqueIDToTrackedObject.count(targetID) != 0) {
                                cvtAnimation->targets()->setValue(targetID, animatedTarget);
                                
                                std::string transformID = animatedTarget->getString("transformId");
                                shared_ptr<GLTFAnimationFlattener> animationFlattener = converterContext._uniqueIDToAnimationFlattener[targetID];
                                for (size_t k = 0 ; k < cvtAnimation->getCount() ; k++) {
                                    size_t offset = k * 16;
                                    float *m = matrices + offset;
                                    
                                    COLLADABU::Math::Matrix4 mat;
                                    mat.setAllElements(m[0], m[1], m[2], m[3],
                                                       m[4], m[5], m[6], m[7],
                                                       m[8], m[9], m[10], m[11],
                                                       m[12], m[13], m[14], m[15] );
                                    
                                    shared_ptr <COLLADAFW::Matrix> matTr(new COLLADAFW::Matrix(mat));
                                    animationFlattener->insertTransformAtTime(transformID, matTr, timeValues[k]);
                                }
                            }
                        }
                    }
                    cvtAnimation->removeParameterNamed("OUTPUT");

                } else {
                    //FIXME: report error
                    printf("WARNING: cannot find intermediate parameter named OUTPUT\n");
                }
            }
                return true;
                break;
            case COLLADAFW::AnimationList::POSITION_XYZ: {
                GLTFAnimation::Parameter *parameter = cvtAnimation->getParameterNamed("OUTPUT");
                if (parameter) {
                    shared_ptr<GLTFBufferView> bufferView = parameter->getBufferView();
                    //the angles to radians necessary convertion is done within the animationFlattener
                    //but it might be better to make it before...
                    for (size_t animatedTargetIndex = 0 ; animatedTargetIndex < animatedTargets->size() ; animatedTargetIndex++) {
                        shared_ptr<JSONObject> animatedTarget = (*animatedTargets)[animatedTargetIndex];
                        std::string targetID = animatedTarget->getString("target");
                        if (converterContext._uniqueIDToTrackedObject.count(targetID) != 0) {
                            cvtAnimation->targets()->setValue(targetID, animatedTarget);
                            
                            shared_ptr<JSONObject> targetObject = converterContext._uniqueIDToTrackedObject[targetID];
                            std::string path = animatedTarget->getString("path");
                            if (path == "translation") {
                                std::string transformID = animatedTarget->getString("transformId");
                                shared_ptr<GLTFAnimationFlattener> animationFlattener = converterContext._uniqueIDToAnimationFlattener[targetID];
                                
                                float* timeValues = (float*)timeBufferView->getBufferDataByApplyingOffset();
                                float* translations = (float*)bufferView->getBufferDataByApplyingOffset();
                                for (size_t k = 0 ; k < cvtAnimation->getCount() ; k++) {
                                    size_t offset = k * 3;
                                    shared_ptr <COLLADAFW::Translate> translate(new COLLADAFW::Translate(translations[offset + 0],
                                                                                                         translations[offset + 1],
                                                                                                         translations[offset + 2]));
                                    animationFlattener->insertTransformAtTime(transformID, translate, timeValues[k]);
                                }
                            } else if (path == "scale") {
                                std::string transformID = animatedTarget->getString("transformId");
                                shared_ptr<GLTFAnimationFlattener> animationFlattener = converterContext._uniqueIDToAnimationFlattener[targetID];
                                float* timeValues = (float*)timeBufferView->getBufferDataByApplyingOffset();
                                float* scales = (float*)bufferView->getBufferDataByApplyingOffset();
                                for (size_t k = 0 ; k < cvtAnimation->getCount() ; k++) {
                                    size_t offset = k * 3;
                                    shared_ptr <COLLADAFW::Scale> scale(new COLLADAFW::Scale(scales[offset + 0],
                                                                                                     scales[offset + 1],
                                                                                                     scales[offset + 2]));
                                    animationFlattener->insertTransformAtTime(transformID, scale, timeValues[k]);
                                }
                            }
                        }
                    }
                }
                cvtAnimation->removeParameterNamed("OUTPUT");
            }
                
                return true;
            case COLLADAFW::AnimationList::ANGLE: {
                GLTFAnimation::Parameter *parameter = cvtAnimation->getParameterNamed("OUTPUT");
                if (parameter) {
                    shared_ptr<GLTFBufferView> bufferView = parameter->getBufferView();
                    //the angles to radians necessary convertion is done within the animationFlattener
                    //but it might be better to make it before...
                    for (size_t animatedTargetIndex = 0 ; animatedTargetIndex < animatedTargets->size() ; animatedTargetIndex++) {
                        shared_ptr<JSONObject> animatedTarget = (*animatedTargets)[animatedTargetIndex];
                        std::string targetID = animatedTarget->getString("target");
                        if (converterContext._uniqueIDToTrackedObject.count(targetID) != 0) {
                            cvtAnimation->targets()->setValue(targetID, animatedTarget);
                            
                            shared_ptr<JSONObject> targetObject = converterContext._uniqueIDToTrackedObject[targetID];
                            std::string path = animatedTarget->getString("path");
                            if (path == "rotation") {
                                std::string transformID = animatedTarget->getString("transformId");
                                shared_ptr<GLTFAnimationFlattener> animationFlattener = converterContext._uniqueIDToAnimationFlattener[targetID];
                                
                                float* timeValues = (float*)timeBufferView->getBufferDataByApplyingOffset();
                                float* rotations = (float*)bufferView->getBufferDataByApplyingOffset();
                                for (size_t k = 0 ; k < cvtAnimation->getCount() ; k++) {
                                    animationFlattener->insertValueAtTime(transformID, rotations[k], 3, timeValues[k]);
                                }
                            }                             
                        }
                    }
                }
                cvtAnimation->removeParameterNamed("OUTPUT");
            }
                return true;
            case COLLADAFW::AnimationList::POSITION_X:
            case COLLADAFW::AnimationList::POSITION_Y:
            case COLLADAFW::AnimationList::POSITION_Z:
            {
                int index = animationClass - COLLADAFW::AnimationList::POSITION_X;
                GLTFAnimation::Parameter *parameter = cvtAnimation->getParameterNamed("OUTPUT");
                if (parameter) {
                    shared_ptr<GLTFBufferView> bufferView = parameter->getBufferView();
                    for (size_t animatedTargetIndex = 0 ; animatedTargetIndex < animatedTargets->size() ; animatedTargetIndex++) {
                        shared_ptr<JSONObject> animatedTarget = (*animatedTargets)[animatedTargetIndex];
                        std::string targetID = animatedTarget->getString("target");
                        
                        if (converterContext._uniqueIDToTrackedObject.count(targetID) != 0) {
                            cvtAnimation->targets()->setValue(targetID, animatedTarget);
                            shared_ptr<JSONObject> targetObject = converterContext._uniqueIDToTrackedObject[targetID];
                            std::string path = animatedTarget->getString("path");
                            if (path == "translation") {
                                std::string transformID = animatedTarget->getString("transformId");
                                shared_ptr<GLTFAnimationFlattener> animationFlattener = converterContext._uniqueIDToAnimationFlattener[targetID];
                                
                                float* timeValues = (float*)timeBufferView->getBufferDataByApplyingOffset();
                                float* translations = (float*)bufferView->getBufferDataByApplyingOffset();
                                for (size_t k = 0 ; k < cvtAnimation->getCount() ; k++) {
                                    animationFlattener->insertValueAtTime(transformID, translations[k], index, timeValues[k]);
                                }
                                
                            } else if (path == "scale") {
                                std::string transformID = animatedTarget->getString("transformId");
                                shared_ptr<GLTFAnimationFlattener> animationFlattener = converterContext._uniqueIDToAnimationFlattener[targetID];
                                
                                float* timeValues = (float*)timeBufferView->getBufferDataByApplyingOffset();
                                float* scales = (float*)bufferView->getBufferDataByApplyingOffset();
                                for (size_t k = 0 ; k < cvtAnimation->getCount() ; k++) {
                                    animationFlattener->insertValueAtTime(transformID, scales[k], index, timeValues[k]);
                                }
                            }
                            else {
                                assert(0 && "unknown path name");
                            }
                        }
                    }
                }
                cvtAnimation->removeParameterNamed("OUTPUT");
            }
                return true;
                
                
            case COLLADAFW::AnimationList::COLOR_RGB:
            case COLLADAFW::AnimationList::COLOR_RGBA:
            case COLLADAFW::AnimationList::COLOR_R:
            case COLLADAFW::AnimationList::COLOR_G:
            case COLLADAFW::AnimationList::COLOR_B:
            case COLLADAFW::AnimationList::COLOR_A:
            case COLLADAFW::AnimationList::ARRAY_ELEMENT_1D:
            case COLLADAFW::AnimationList::ARRAY_ELEMENT_2D:
            case COLLADAFW::AnimationList::FLOAT:
            default:
                static bool printedOnce = false;
                if (!printedOnce) {
                    printf("WARNING: unhandled transform type\n");
                    printedOnce = true;
                }
                break;
        }
        
        return false;
    }
Ejemplo n.º 2
0
 bool writeAnimation(shared_ptr <GLTFAnimation> cvtAnimation,
                     const COLLADAFW::AnimationList::AnimationClass animationClass,
                     AnimatedTargetsSharedPtr animatedTargets,
                     std::ofstream &animationsOutputStream,
                     GLTF::GLTFConverterContext &converterContext) {
     
     
     
     std::string samplerID;
     std::string name;
     shared_ptr<JSONObject> samplers = cvtAnimation->samplers();
     shared_ptr<JSONArray> channels = cvtAnimation->channels();
     size_t keyCount = cvtAnimation->getCount();
     GLTFAnimation::Parameter *timeParameter = cvtAnimation->getParameterNamed("TIME");
     if (timeParameter) {
         shared_ptr<GLTFBufferView> timeBufferView = timeParameter->getBufferView();
         std::string name = "TIME";
         std::string samplerID = cvtAnimation->getSamplerIDForName(name);
         
         timeParameter->setByteOffset(static_cast<size_t>(animationsOutputStream.tellp()));
         animationsOutputStream.write((const char*)( timeBufferView->getBufferDataByApplyingOffset()),
                                      timeBufferView->getByteLength());
         
         //printf("time bufferLength: %d\n",(int)timeBufferView->getByteLength());
     }
     
     switch (animationClass) {
         case COLLADAFW::AnimationList::TIME:
         {
             //In Currrent COLLADA Implementation, this is never called, only cases mapping to OUTPUT are, so we handle INPUT when we enter this function.
         }
             break;
         case COLLADAFW::AnimationList::AXISANGLE:
             break;
         case COLLADAFW::AnimationList::MATRIX4X4: {
             GLTFAnimation::Parameter *parameter = cvtAnimation->getParameterNamed("OUTPUT");
             if (parameter) {
                 std::vector< shared_ptr <GLTFBufferView> > TRSBufferViews;
                 //FIXME: we assume float here, might be double
                 shared_ptr<GLTFBufferView> bufferView = parameter->getBufferView();
                 float* matrices = (float*)bufferView->getBufferDataByApplyingOffset();
                 __DecomposeMatrices(matrices, cvtAnimation->getCount(), TRSBufferViews);
                 cvtAnimation->removeParameterNamed("OUTPUT");
                 
                 //Translation
                 __SetupAndWriteAnimationParameter(cvtAnimation,
                                                   "translation",
                                                   "FLOAT_VEC3",
                                                   TRSBufferViews[0],
                                                   animationsOutputStream);
                 
                 //Rotation
                 __SetupAndWriteAnimationParameter(cvtAnimation,
                                                   "rotation",
                                                   "FLOAT_VEC4",
                                                   TRSBufferViews[1],
                                                   animationsOutputStream);
                 
                 //Scale
                 __SetupAndWriteAnimationParameter(cvtAnimation,
                                                   "scale",
                                                   "FLOAT_VEC3",
                                                   TRSBufferViews[2],
                                                   animationsOutputStream);
                 
                 for (size_t animatedTargetIndex = 0 ; animatedTargetIndex < animatedTargets->size() ; animatedTargetIndex++) {
                     shared_ptr<JSONObject> animatedTarget = (*animatedTargets)[animatedTargetIndex];
                     if (animatedTarget->getString("path") == "MATRIX") {
                         std::string targetID = animatedTarget->getString("target");
                         __AddChannel(cvtAnimation, targetID, "translation");
                         __AddChannel(cvtAnimation, targetID, "rotation");
                         __AddChannel(cvtAnimation, targetID, "scale");
                     }
                 }
                 
             } else {
                 //FIXME: report error
                 printf("WARNING: cannot find intermediate parameter named OUTPUT\n");
             }
         }
             return true;
             break;
         case COLLADAFW::AnimationList::POSITION_XYZ: {
             GLTFAnimation::Parameter *parameter = cvtAnimation->getParameterNamed("OUTPUT");
             if (parameter) {
                 shared_ptr<GLTFBufferView> bufferView = parameter->getBufferView();
                 
                 __SetupAndWriteAnimationParameter(cvtAnimation,
                                                   "translation",
                                                   "FLOAT_VEC3",
                                                   bufferView,
                                                   animationsOutputStream);
                 
                 for (size_t animatedTargetIndex = 0 ; animatedTargetIndex < animatedTargets->size() ; animatedTargetIndex++) {
                     shared_ptr<JSONObject> animatedTarget = (*animatedTargets)[animatedTargetIndex];
                     
                     if (animatedTarget->getString("path") == "translation") {
                         std::string targetID = animatedTarget->getString("target");
                         __AddChannel(cvtAnimation, targetID, "translation");
                     }
                 }
                 cvtAnimation->removeParameterNamed("OUTPUT");
             }
         }
             
             return true;
             break;
         case COLLADAFW::AnimationList::ANGLE: {
             GLTFAnimation::Parameter *parameter = cvtAnimation->getParameterNamed("OUTPUT");
             if (parameter) {
                 shared_ptr<GLTFBufferView> bufferView = parameter->getBufferView();
                 //Convert angles to radians
                 float *angles = (float*)bufferView->getBufferDataByApplyingOffset();
                 for (size_t i = 0 ; i < keyCount ; i++) {
                     angles[i] = angles[i] * 0.0174532925; //to radians.
                 }
                 
                 for (size_t animatedTargetIndex = 0 ; animatedTargetIndex < animatedTargets->size() ; animatedTargetIndex++) {
                     shared_ptr<JSONObject> animatedTarget = (*animatedTargets)[animatedTargetIndex];
                     std::string targetID = animatedTarget->getString("target");
                     
                     if (converterContext._uniqueIDToTrackedObject.count(targetID) != 0) {
                         shared_ptr<JSONObject> targetObject = converterContext._uniqueIDToTrackedObject[targetID];
                         std::string path = animatedTarget->getString("path");
                         if (path == "rotation") {
                             shared_ptr<JSONArray> rotationArray = static_pointer_cast <JSONArray>(targetObject->getValue(path));
                             shared_ptr<GLTFBufferView> adjustedBuffer = __CreateBufferViewByReplicatingArrayAndReplacingValueAtIndex(bufferView, rotationArray, 3, "FLOAT", cvtAnimation->getCount());
                             
                             __SetupAndWriteAnimationParameter(cvtAnimation,
                                                               "rotation",
                                                               "FLOAT_VEC4",
                                                               adjustedBuffer,
                                                               animationsOutputStream);
                             
                             __AddChannel(cvtAnimation, targetID, path);
                         }
                     }
                 }
             }
             cvtAnimation->removeParameterNamed("OUTPUT");
         }
             return true;
             break;
         case COLLADAFW::AnimationList::POSITION_X:
         case COLLADAFW::AnimationList::POSITION_Y:
         case COLLADAFW::AnimationList::POSITION_Z:
         case COLLADAFW::AnimationList::COLOR_RGB:
         case COLLADAFW::AnimationList::COLOR_RGBA:
         case COLLADAFW::AnimationList::COLOR_R:
         case COLLADAFW::AnimationList::COLOR_G:
         case COLLADAFW::AnimationList::COLOR_B:
         case COLLADAFW::AnimationList::COLOR_A:
         case COLLADAFW::AnimationList::ARRAY_ELEMENT_1D:
         case COLLADAFW::AnimationList::ARRAY_ELEMENT_2D:
         case COLLADAFW::AnimationList::FLOAT: 
         default:
             break;
     }
     
     return false;
 }