void EditorSceneImporterFBXConv::_detect_bones_in_nodes(State& state,const Array& p_nodes) { for(int i=0;i<p_nodes.size();i++) { Dictionary d = p_nodes[i]; if (d.has("isBone") && bool(d["isBone"])) { String bone_name=_id(d["id"]); print_line("IS BONE: "+bone_name); if (!state.bones.has(bone_name)) { state.bones.insert(bone_name,BoneInfo()); } if (!state.bones[bone_name].has_rest) { state.bones[bone_name].rest=_get_transform(d).affine_inverse(); } state.bones[bone_name].node=d; //state.bones[name].rest=_get_transform(b); } if (d.has("parts")) { Array parts=d["parts"]; for(int j=0;j<parts.size();j++) { Dictionary p=parts[j]; if (p.has("bones")) { Array bones=p["bones"]; //omfg for(int k=0;k<bones.size();k++) { Dictionary b = bones[k]; if (b.has("node")) { String name = _id(b["node"]); if (!state.bones.has(name)) { state.bones.insert(name,BoneInfo()); } state.bones[name].rest=_get_transform(b); state.bones[name].has_rest=true; } } } } } if (d.has("children")) { _detect_bones_in_nodes(state,d["children"]); } } }
//--------------------------------------- void Mesh::LoadBoneData( unsigned index, const aiMesh* mesh, std::vector< Vertex >& verts ) { for ( unsigned i = 0; i < mesh->mNumBones; ++i ) { unsigned boneIndex = 0; const aiBone* bone = mesh->mBones[i]; std::string boneName = bone->mName.data; auto itr = mBoneMap.find( boneName ); if ( itr != mBoneMap.end() ) { boneIndex = itr->second; } else { boneIndex = mNumBones++; mBoneInfo.push_back( BoneInfo() ); mBoneInfo[ boneIndex ].boneOffset = glmMat4FromaiMat4( bone->mOffsetMatrix ); itr->second = boneIndex; DebugPrintf( "Adding Bone '%s' : %u\n", itr->first.c_str(), itr->second ); } unsigned nWeights = bone->mNumWeights; assert( nWeights <= 4U ); for ( unsigned j = 0; j < nWeights; ++j ) { const unsigned vertexId = bone->mWeights[j].mVertexId; verts[ vertexId ].boneWeights[j] = bone->mWeights[j].mWeight; verts[ vertexId ].boneIds[j] = boneIndex; } } }
void EditorSceneImporterFBXConv::_detect_bones(State& state) { //This format should mark when a node is a bone, //which is the only thing that Collada does right. //think about others implementing a parser. //Just _one_ string and you avoid loads of lines of code to other people. for(int i=0;i<state.animations.size();i++) { Dictionary an = state.animations[i]; if (an.has("bones")) { Array bo=an["bones"]; for(int j=0;j<bo.size();j++) { Dictionary b=bo[j]; if (b.has("boneId")) { String id = b["boneId"]; if (!state.bones.has(id)) { state.bones.insert(id,BoneInfo()); } state.bones[id].has_anim_chan=true; //used in anim } } } } _detect_bones_in_nodes(state,state.nodes); _parse_skeletons("",state,state.nodes,NULL,-1); print_line("found bones: "+itos(state.bones.size())); print_line("found skeletons: "+itos(state.skeletons.size())); }
/** Remove this function when VER_UE4_REFERENCE_SKELETON_REFACTOR is removed. */ void USkeleton::ConvertToFReferenceSkeleton() { check( BoneTree.Num() == RefLocalPoses_DEPRECATED.Num() ); const int32 NumRefBones = RefLocalPoses_DEPRECATED.Num(); ReferenceSkeleton.Empty(); for(int32 BoneIndex=0; BoneIndex<NumRefBones; BoneIndex++) { const FBoneNode& BoneNode = BoneTree[BoneIndex]; FMeshBoneInfo BoneInfo(BoneNode.Name_DEPRECATED, BoneNode.Name_DEPRECATED.ToString(), BoneNode.ParentIndex_DEPRECATED); const FTransform& BoneTransform = RefLocalPoses_DEPRECATED[BoneIndex]; // All should be good. Parents before children, no duplicate bones? ReferenceSkeleton.Add(BoneInfo, BoneTransform); } // technically here we should call RefershAllRetargetSources(); but this is added after // VER_UE4_REFERENCE_SKELETON_REFACTOR, this shouldn't be needed. It shouldn't have any // AnimatedRetargetSources ensure (AnimRetargetSources.Num() == 0); }