Esempio n. 1
0
/* Represents the main loop of an application which generates one frame per loop. */
static void encode_example(const char *filename, int codec_id, int width, int height) {
    int pts;
    uint8_t *rgb = NULL;
    ffmpeg_encoder_start(filename, codec_id, 25, width, height);
    for (pts = 0; pts < 100; pts++) {
        frame->pts = pts;
        common_generate_rgb(width, height, pts, &rgb);
        ffmpeg_encoder_encode_frame(rgb);
    }
    ffmpeg_encoder_finish();
    free(rgb);
}
Esempio n. 2
0
static void encode_example(int width, int height, float factor) {
    char filename[255];
    int pts;
    uint8_t *rgb = NULL;
    snprintf(filename, 255, "tmp." __FILE__ "_%.2f.h264", factor);
    ffmpeg_encoder_start(filename, AV_CODEC_ID_H264, 25, width, height, factor);
    for (pts = 0; pts < 100; pts++) {
        frame->pts = pts;
        common_generate_rgb(width, height, pts, &rgb);
        ffmpeg_encoder_encode_frame(rgb);
    }
    ffmpeg_encoder_finish();
    free(rgb);
}
Esempio n. 3
0
// ----------------------------------------------------------------
//  renders the frame
// ----------------------------------------------------------------
void OgreOpenVR::update()
{
    // update the parent camera node's orientation and position with the new tracking data
    Ogre::Quaternion q = m_orientation;
    Ogre::Quaternion q_hack; q_hack.FromAngleAxis(Ogre::Radian(Ogre::Degree(180)), Ogre::Vector3::UNIT_Y);

    if (!lockToCamera && m_waist != NULL)
    {
        q = q_hack * m_mat4HMDPose.extractQuaternion() * m_waist->getOrientation();
    }
    else if (!lockToCamera)
    {
        q = q * m_mat4HMDPose.extractQuaternion();
    }

    m_cameraNode->setOrientation(q);
        
    //m_position = m_mat4HMDPose.getTrans();
    m_cameraNode->setPosition(m_position); // -m_poseNeutralPosition);

    Ogre::Root::getSingleton().renderOneFrame();

    Ogre::GLTexture* gt = ((Ogre::GLTexture*)m_ogreRenderTexture.get());
    GLuint srcid = gt->getGLID();

    // In OpenGL, the render texture comes out topside-bottomwards.
    //                                       u1    v1    u2    v2
    const vr::VRTextureBounds_t lbounds = { 0.0f, 1.0f, 0.5f, 0.0f };
    const vr::VRTextureBounds_t rbounds = { 0.5f, 1.0f, 1.0f, 0.0f };

    vr::Texture_t stereoTexture = { (void*)srcid, vr::API_OpenGL, vr::ColorSpace_Gamma };

    vr::VRCompositor()->Submit(vr::Eye_Left, &stereoTexture, &lbounds);
    vr::VRCompositor()->Submit(vr::Eye_Right, &stereoTexture, &rbounds);

    if (m_recording)
    {
        static auto pre = boost::posix_time::microsec_clock::local_time();
        static auto now = boost::posix_time::microsec_clock::local_time();

        now = boost::posix_time::microsec_clock::local_time();
        if ((now - pre).total_milliseconds() > 40)
        {
            pre = now;
            static uint8_t *rgb = NULL;
            static GLubyte * pixels = NULL;
            ffmpeg_encoder_glread_rgb(&rgb, &pixels, m_window->getWidth(), m_window->getHeight());
            //ffmpeg_encoder_glgettexture_rgb(&rgb, &pixels, srcid, gt->getWidth(), gt->getHeight());
            ffmpeg_encoder_encode_frame(rgb);
        }
    }

    glFlush();
    glFinish();

    m_frameIndex++;


    // update the tracked device positions
    updateHMDPos();
}