bool MeshPyramidReader::trackFrame(int nFrame, unsigned char* pColorImageRGB,
                                    TrackerOutputInfo** pOutputInfo)
{
    if(!setCurrentFrame(nFrame))
    return false;

    // set up the color used later
    memcpy(pCurrentColorImageRGB, pColorImageRGB, 3*m_nWidth*m_nHeight);
    cv::Mat tempColorImageRGB(m_nHeight, m_nWidth, CV_8UC3, pCurrentColorImageRGB);
    tempColorImageRGB.convertTo(colorImage, cv::DataType<Vec3d>::type, 1./255);
    cv::split(colorImage, colorImageSplit);
    
    setMeshPyramid();

    // cout << "frame " << nFrame << ":" << endl;
    // //print mesh center
    // for(int i = 0; i < currentMeshPyramid.numLevels; ++i)
    // {
    //     cout << "level " << i << ":" << endl;
    //     cout << "center: " << currentMeshPyramid.levels[i].center[0] << " "
    //          << currentMeshPyramid.levels[i].center[1] << " "
    //          << currentMeshPyramid.levels[i].center[2] << " "
    //          << endl;
    // }

    // cout << "number of pyramid levels " << outputInfoPyramid.size() << endl;
    // *pOutputInfo = &outputInfoPyramid[0];
    if(!trackerInitialized)
    {
        *pOutputInfo = &outputInfoPyramid[0];
        trackerInitialized = true;
    }

    return true;
}
// load mesh from uImage, vImage, dImage and maskImage
// only for the first frame
void MainEngine::LoadInitialMeshUVD()
{
  //
  cout << "load mesh from uvd images" << endl;

  DepthImageType uImage(m_nHeight,m_nWidth);
  DepthImageType vImage(m_nHeight,m_nWidth);
  DepthImageType dImage(m_nHeight,m_nWidth);
  InternalIntensityImageType maskImage;

  m_pImageSourceEngine->readUVDImage(uImage,vImage,dImage,maskImage);

  //     // we need to compute normals
  // specify the depth scale for the level we want to do optimization on
  if(!trackerSettings.useDepthPyramid)
    {
      PangaeaMeshIO::createMeshFromDepth(templateMesh, m_pColorImageRGB,
                                         uImage, vImage, dImage, maskImage, m_nHeight, m_nWidth,
                                         trackerSettings.depth2MeshScale);
      templateMeshPyramid = std::move(PangaeaMeshPyramid(templateMesh));
    }else
    {
      int numMeshLevels = trackerSettings.imagePyramidSamplingFactors.size();
      templateMeshPyramid.numLevels = numMeshLevels;
      templateMeshPyramid.levels.resize(numMeshLevels);
      templateMeshPyramid.meshPyramidVertexNum.resize(numMeshLevels);
      // in this case, the shape and image are subsampled using the same factor
      for(int i = 0; i < numMeshLevels; ++i)
        {
          // PangaeaMeshIO::createMeshFromDepth(templateMesh, m_pColorImageRGB,
          // uImage, vImage, dImage, maskImage, m_nHeight, m_nWidth,
          // 1.0/trackerSettings.imagePyramidSamplingFactors[i]);
          // templateMeshPyramid.levels[i] = std::move(templateMesh);
          // templateMesh.clear();

          InternalColorImageType colorImage;
          cv::Mat tempColorImageRGB(m_nHeight, m_nWidth, CV_8UC3, m_pColorImageRGB);
          tempColorImageRGB.convertTo(colorImage, cv::DataType<Vec3d>::type, 1./255);

          int blurSize = trackerSettings.blurFilterSizes[i];
          if(blurSize > 0)
            cv::GaussianBlur(colorImage, colorImage, cv::Size(blurSize, blurSize), 3);

          PangaeaMeshIO::createMeshFromDepth(templateMesh, colorImage,
                                             uImage, vImage, dImage, maskImage, m_nHeight, m_nWidth,
                                             1.0/trackerSettings.imagePyramidSamplingFactors[i]);

          templateMeshPyramid.meshPyramidVertexNum[i] = templateMesh.numVertices;
          templateMeshPyramid.levels[i] = std::move(templateMesh);
          templateMesh.clear();
        }
    }

}
bool MeshBufferReader::trackFrame(int nFrame, unsigned char* pColorImageRGB,
                                  TrackerOutputInfo** pOutputInfo)
{
  memcpy(pCurrentColorImageRGB, pColorImageRGB, 3*m_nWidth*m_nHeight);
  cv::Mat tempColorImageRGB(m_nHeight, m_nWidth, CV_8UC3, pCurrentColorImageRGB);
  tempColorImageRGB.convertTo(colorImage, cv::DataType<Vec3d>::type, 1./255);
  cv::split(colorImage, colorImageSplit);

  if(!setCurrentFrame(nFrame))
    return false;

  int bufferPos = (currentFrameNo - startFrameNo) / nFrameStep;
  *pOutputInfo = &outputInfoPyramidBuffer[ bufferPos ][ nRenderingLevel ];

  return true;
}
bool MeshSequenceReader::trackFrame(int nFrame, unsigned char* pColorImageRGB,
                                    TrackerOutputInfo** pOutputInfo)
{
    *pOutputInfo = &outputInfo;
    //
    if(!setCurrentFrame(nFrame))
    return false;

    // set up the color used later
    memcpy(pCurrentColorImageRGB, pColorImageRGB, 3*m_nWidth*m_nHeight);
    cv::Mat tempColorImageRGB(m_nHeight, m_nWidth, CV_8UC3, pCurrentColorImageRGB);
    tempColorImageRGB.convertTo(colorImage, cv::DataType<Vec3d>::type, 1./255);
    cv::split(colorImage, colorImageSplit);

    if(!trackerInitialized)
    trackerInitSetup(outputInfo);
    else
    trackerUpdate(outputInfo);

    return true;
}