void EditorMap::setHeight(I32 h) { for (auto it = mLayers.begin(); it != mLayers.end(); ++it) { std::vector<I32> newIndices( mWidth * h); if (h > mHeight) { I32 zeroOverflow = h - mHeight; for (I32 y = 0; y < h; ++y) { for (I32 x = 0; x < mWidth; ++x) { if (y < mHeight) newIndices[y * mWidth + x] = (*it)[y * mWidth + x]; else newIndices[y * mWidth + x] = 0; } } } else { for (I32 y = 0; y < h; ++y) { for (I32 x = 0; x < mWidth; ++x) { newIndices[y * mWidth + x] = (*it)[y * mWidth + x]; } } } it->mIndices = newIndices; } mHeight = h; }
void EditorMap::setWidth(I32 w) { for (auto it = mLayers.begin(); it != mLayers.end(); ++it) { std::vector<I32> newIndices( w * mHeight); if (w >= mWidth) { I32 zeroOverflow = w - mWidth; for (I32 y = 0; y < mHeight; ++y) { for (I32 x = 0; x < w; ++x) { if (x < mWidth) newIndices[y * w + x] = (*it)[y * mWidth + x]; else newIndices[y * w + x] = 0; } } } else { for (I32 y = 0; y < mHeight; ++y) { for (I32 x = 0; x < w; ++x) { newIndices[y * w + x] = (*it)[y * mWidth + x]; } } } it->mIndices = newIndices; } mWidth = w; }
void Exporter::buildBoneHierarchy() { if(bones.size() == 0) return; // Hierarchy level for each bone (starting from 0) std::vector<int> boneLevels(bones.size()); std::vector<Bone> newBones = bones; // Parents before their childs { for(unsigned int k = 0; k < newBones.size(); ++k) for(unsigned int i = 1; i < newBones.size(); ++i) { for(unsigned int j = 0; j < i; ++j) { if(newBones[i].getName() == newBones[j].getParentName()) { std::swap(newBones[i], newBones[j]); } } } } // Calculate hierarchy levels for(unsigned int i = 0; i < bones.size(); ++i) { Bone &bone = newBones[i]; std::string parent = bone.getParentName(); int level = 0; while(!parent.empty()) { // Find parent for(unsigned int j = 0; j < i; ++j) { Bone &b = newBones[j]; if(newBones[j].getName() == parent) { ++level; parent = newBones[j].getParentName(); break; } } boneLevels[i] = level; if(parent == newBones[i].getParentName()) break; } } // Sort levels for(unsigned int i = 0; i < newBones.size(); ++i) // Max amount of levels { // Search for level i for(unsigned int j = 0; j < newBones.size(); ++j) { if(boneLevels[j] == static_cast<int> (i)) { // Can we move this? for(unsigned int k = 0; k < j; ++k) { if(boneLevels[k] > static_cast<int> (i)) { std::swap(boneLevels[k], boneLevels[j]); std::swap(newBones[k], newBones[j]); break; } } } } } unsigned int sortStart = 0; unsigned int sortEnd = bones.size(); int sortLevel = 0; // Sort levels alphabetically for(;;) { // Find the range for sorting for(unsigned int i = sortStart; i < bones.size(); ++i) { // This gives us the end if(boneLevels[i] != sortLevel) { sortEnd = i; break; } } // End of list if(sortStart == sortEnd) sortEnd = bones.size(); // Sort std::sort(newBones.begin() + sortStart, newBones.begin() + sortEnd, &BoneAlphaSort); // All done, exit if(sortEnd == bones.size()) break; // Update values sortStart = sortEnd; ++sortLevel; assert(sortLevel < int(bones.size())); } // [old index] = new index std::vector<int> newIndices(bones.size()); for(unsigned int i = 0; i < bones.size(); ++i) // old index for(unsigned int j = 0; j < bones.size(); ++j) // new index { if(bones[i].getName() == newBones[j].getName()) { newIndices[i] = j; break; } } // Correct bone indices from meshes std::vector<boost::shared_ptr<Object> > &objects = model.getObjects(); for(unsigned int j = 0; j < objects.size(); ++j) objects[j]->correctBoneIndices(newIndices); // Correct bone indices from animation std::vector<std::vector<AnimationKey> > newBoneKeys(boneKeys.size()); for(unsigned int k = 0; k < boneKeys.size(); ++k) newBoneKeys[newIndices[k]] = boneKeys[k]; // Set bones = newBones; boneKeys = newBoneKeys; printInfo("Resorted bone hierarchy"); }
BinnedED BinnedEDShrinker::ShrinkDist(const BinnedED& dist_) const{ // No buffer no problem. FIXME: what about if all the values are zero? if (!fBuffers.size()) return dist_; size_t nDims = dist_.GetNDims(); // FIXME Add a check to see if the non zero entries of fBuffers are in the pdf and give warning // 1. Build new axes. ShrinkPdf method just makes a copy if buffer size is zero AxisCollection newAxes; const std::vector<size_t> distDataIndices = dist_.GetObservables().GetIndices(); size_t dataIndex = 0; for(size_t i = 0; i < nDims; i++){ dataIndex = distDataIndices.at(i); if (!fBuffers.count(dataIndex)) newAxes.AddAxis(dist_.GetAxes().GetAxis(i)); else newAxes.AddAxis(ShrinkAxis(dist_.GetAxes().GetAxis(i), fBuffers.at(dataIndex).first, fBuffers.at(dataIndex).second)); } // 2. Initialise the new pdf with same data rep BinnedED newDist(dist_.GetName() + "_shrunk", newAxes); newDist.SetObservables(dist_.GetObservables()); // 3. Fill the axes std::vector<size_t> newIndices(dist_.GetNDims()); // same as old, just corrected for overflow int offsetIndex = 0; // note taking difference of two unsigneds size_t newBin = 0; // will loop over dims and use this to assign bin # corrected for overflow const AxisCollection& axes = dist_.GetAxes(); double content = 0; // bin by bin of old pdf for(size_t i = 0; i < dist_.GetNBins(); i++){ content = dist_.GetBinContent(i); if(!content) // no content no problem continue; // work out the index of this bin in the new shrunk pdf. for(size_t j = 0; j < nDims; j++){ offsetIndex = axes.UnflattenIndex(i, j); // the index in old pdf if (fBuffers.count(distDataIndices.at(j))) // offset by lower buffer if nonzero offsetIndex -= fBuffers.at(distDataIndices.at(j)).first; // Correct the ones that fall in the buffer regions // bins in the lower buffer have negative index. Put in first bin in fit region or ignore if (offsetIndex < 0){ offsetIndex = 0; if(!fUsingOverflows) content = 0; } // bins in the upper buffer have i > number of bins in axis j. Do the same if (offsetIndex >= newAxes.GetAxis(j).GetNBins()){ offsetIndex = newAxes.GetAxis(j).GetNBins() - 1; if (!fUsingOverflows) content = 0; } newIndices[j] = offsetIndex; } // Fill newBin = newAxes.FlattenIndices(newIndices); newDist.AddBinContent(newBin, content); } return newDist; }