Void TAppRendererTop::xCreateLib()
{
  m_pcRenTop = new TRenTop();

  for(Int iViewIdx=0; iViewIdx<m_iNumberOfInputViews; iViewIdx++)
  {
    TVideoIOYuv* pcVideoInput = new TVideoIOYuv;
    TVideoIOYuv* pcDepthInput = new TVideoIOYuv;

    pcVideoInput->open( m_pchVideoInputFileList[iViewIdx], false, m_inputBitDepthY, m_inputBitDepthC, m_internalBitDepthY, m_internalBitDepthC);  // read mode
    pcDepthInput->open( m_pchDepthInputFileList[iViewIdx], false, m_inputBitDepthY, m_inputBitDepthC, m_internalBitDepthY, m_internalBitDepthC  );  // read mode
    pcVideoInput->skipFrames(m_iFrameSkip, m_iSourceWidth, m_iSourceHeight  );
    pcDepthInput->skipFrames(m_iFrameSkip, m_iSourceWidth, m_iSourceHeight  );

    m_apcTVideoIOYuvVideoInput.push_back( pcVideoInput );
    m_apcTVideoIOYuvDepthInput.push_back( pcDepthInput );
  }

  for(Int iViewIdx=0; iViewIdx<m_iNumberOfOutputViews; iViewIdx++)
  {
    TVideoIOYuv* pcSynthOutput = new TVideoIOYuv;
    pcSynthOutput->open( m_pchSynthOutputFileList[iViewIdx], true, m_outputBitDepthY, m_outputBitDepthC, m_internalBitDepthY, m_internalBitDepthC );  // write mode
    m_apcTVideoIOYuvSynthOutput.push_back( pcSynthOutput );
  }
}
Beispiel #2
0
Void TAppRendererTop::xCreateLib()
{
  m_pcRenTop = new TRenTop();

  for(Int iViewIdx=0; iViewIdx<m_iNumberOfInputViews; iViewIdx++)
  {
    TVideoIOYuv* pcVideoInput = new TVideoIOYuv;
    TVideoIOYuv* pcDepthInput = new TVideoIOYuv;

//    ( Char* pchFile, Bool bWriteMode, const Int fileBitDepth[MAX_NUM_CHANNEL_TYPE], const Int MSBExtendedBitDepth[MAX_NUM_CHANNEL_TYPE], const Int internalBitDepth[MAX_NUM_CHANNEL_TYPE] )

    pcVideoInput->open( m_pchVideoInputFileList[iViewIdx], false, m_inputBitDepth, m_internalBitDepth, m_internalBitDepth );  // read mode
    pcDepthInput->open( m_pchDepthInputFileList[iViewIdx], false, m_inputBitDepth, m_internalBitDepth, m_internalBitDepth );  // read mode
    pcVideoInput->skipFrames(m_iFrameSkip, m_iSourceWidth, m_iSourceHeight, CHROMA_420 );
    pcDepthInput->skipFrames(m_iFrameSkip, m_iSourceWidth, m_iSourceHeight, CHROMA_420 );

    m_apcTVideoIOYuvVideoInput.push_back( pcVideoInput );
    m_apcTVideoIOYuvDepthInput.push_back( pcDepthInput );
  }

  for(Int iViewIdx=0; iViewIdx<m_iNumberOfOutputViews; iViewIdx++)
  {
    TVideoIOYuv* pcSynthOutput = new TVideoIOYuv;
    pcSynthOutput->open( m_pchSynthOutputFileList[iViewIdx], true, m_outputBitDepth, m_internalBitDepth, m_internalBitDepth );  // write mode
    m_apcTVideoIOYuvSynthOutput.push_back( pcSynthOutput );
  }
}
Beispiel #3
0
int main(int argc, const char** argv)
{
  bool do_help;
  string filename_in, filename_out;
  unsigned int width, height;
  unsigned int bitdepth_in, bitdepth_out;
  unsigned int num_frames;
  unsigned int num_frames_skip;

  po::Options opts;
  opts.addOptions()
  ("help", do_help, false, "this help text")
  ("InputFile,i", filename_in, string(""), "input file to convert")
  ("OutputFile,o", filename_out, string(""), "output file")
  ("SourceWidth", width, 0u, "source picture width")
  ("SourceHeight", height, 0u, "source picture height")
  ("InputBitDepth", bitdepth_in, 8u, "bit-depth of input file")
  ("OutputBitDepth", bitdepth_out, 8u, "bit-depth of output file")
  ("NumFrames", num_frames, 0xffffffffu, "number of frames to process")
  ("FrameSkip,-fs", num_frames_skip, 0u, "Number of frames to skip at start of input YUV")
  ;

  po::setDefaults(opts);
  po::scanArgv(opts, argc, argv);

  if (argc == 1 || do_help)
  {
    /* argc == 1: no options have been specified */
    po::doHelp(cout, opts);
    return EXIT_FAILURE;
  }

  TVideoIOYuv input;
  TVideoIOYuv output;

  input.open((char*)filename_in.c_str(), false, bitdepth_in, bitdepth_in, bitdepth_out, bitdepth_out);
  output.open((char*)filename_out.c_str(), true, bitdepth_out, bitdepth_out, bitdepth_out, bitdepth_out);

  input.skipFrames(num_frames_skip, width, height);

  TComPicYuv frame;
  frame.create( width, height, 1, 1, 0 );

  int pad[2] = {0, 0};

  unsigned int num_frames_processed = 0;
  while (!input.isEof()) 
  {
    if (! input.read(&frame, pad))
    {
      break;
    }
#if 0
    Pel* img = frame.getLumaAddr();
    for (int y = 0; y < height; y++) 
    {
      for (int x = 0; x < height; x++)
        img[x] = 0;
      img += frame.getStride();
    }
    img = frame.getLumaAddr();
    img[0] = 1;
#endif

    output.write(&frame);
    num_frames_processed++;
    if (num_frames_processed == num_frames)
      break;
  }

  input.close();
  output.close();

  return EXIT_SUCCESS;
}
Int main(Int argc, const char** argv)
{
    Bool do_help;
    string filename_in, filename_out;
    UInt width, height;
    UInt bitdepth_in, bitdepth_out, chromaFormatRaw;
    UInt num_frames;
    UInt num_frames_skip;

    po::Options opts;
    opts.addOptions()
    ("help", do_help, false, "this help text")
    ("InputFile,i", filename_in, string(""), "input file to convert")
    ("OutputFile,o", filename_out, string(""), "output file")
    ("SourceWidth", width, 0u, "source picture width")
    ("SourceHeight", height, 0u, "source picture height")
    ("InputBitDepth", bitdepth_in, 8u, "bit-depth of input file")
    ("OutputBitDepth", bitdepth_out, 8u, "bit-depth of output file")
    ("ChromaFormat", chromaFormatRaw, 420u, "chroma format. 400, 420, 422 or 444 only")
    ("NumFrames", num_frames, 0xffffffffu, "number of frames to process")
    ("FrameSkip,-fs", num_frames_skip, 0u, "Number of frames to skip at start of input YUV")
    ;

    po::setDefaults(opts);
    po::scanArgv(opts, argc, argv);


    if (argc == 1 || do_help)
    {
        /* argc == 1: no options have been specified */
        po::doHelp(cout, opts);
        return EXIT_FAILURE;
    }

    ChromaFormat chromaFormatIDC=CHROMA_420;
    switch (chromaFormatRaw)
    {
    case 400:
        chromaFormatIDC=CHROMA_400;
        break;
    case 420:
        chromaFormatIDC=CHROMA_420;
        break;
    case 422:
        chromaFormatIDC=CHROMA_422;
        break;
    case 444:
        chromaFormatIDC=CHROMA_444;
        break;
    default:
        fprintf(stderr, "Bad chroma format string\n");
        return EXIT_FAILURE;
    }

    TVideoIOYuv input;
    TVideoIOYuv output;

    Int inputBitDepths [MAX_NUM_CHANNEL_TYPE];
    Int outputBitDepths[MAX_NUM_CHANNEL_TYPE];

    for (UInt channelTypeIndex = 0; channelTypeIndex < MAX_NUM_CHANNEL_TYPE; channelTypeIndex++)
    {
        inputBitDepths [channelTypeIndex] = bitdepth_in;
        outputBitDepths[channelTypeIndex] = bitdepth_out;
    }

    input.open((char*)filename_in.c_str(), false, inputBitDepths, inputBitDepths, outputBitDepths);
    output.open((char*)filename_out.c_str(), true, outputBitDepths, outputBitDepths, outputBitDepths);

    input.skipFrames(num_frames_skip, width, height, chromaFormatIDC);

    TComPicYuv frame;
    frame.create( width, height, chromaFormatIDC, 1, 1, 0 );

    Int pad[2] = {0, 0};

    TComPicYuv cPicYuvTrueOrg;
    cPicYuvTrueOrg.create( width, height, chromaFormatIDC, 1, 1, 0 );

    UInt num_frames_processed = 0;
    while (!input.isEof())
    {
        if (! input.read(&frame, &cPicYuvTrueOrg, IPCOLOURSPACE_UNCHANGED, pad))
        {
            break;
        }
#if 0
        Pel* img = frame.getAddr(COMPONENT_Y);
        for (Int y = 0; y < height; y++)
        {
            for (Int x = 0; x < height; x++)
                img[x] = 0;
            img += frame.getStride();
        }
        img = frame.getAddr(COMPONENT_Y);
        img[0] = 1;
#endif

        output.write(&frame, IPCOLOURSPACE_UNCHANGED);
        num_frames_processed++;
        if (num_frames_processed == num_frames)
            break;
    }

    input.close();
    output.close();
    cPicYuvTrueOrg.destroy();

    return EXIT_SUCCESS;
}