示例#1
0
void CMUCamera::setFeature(CameraFeature Feature, int Value, bool bIgnoreOldValue)
{
    if (bIgnoreOldValue || m_Features[Feature] != Value) {
        m_Features[Feature] = Value;
        if (Feature == CAM_FEATURE_STROBE_DURATION) {
            if (m_pCamera->HasStrobe()) {
                C1394CameraControlStrobe* pControl = m_pCamera->GetStrobeControl(0);
                int err = pControl->SetValue(Value);
                checkCMUWarning(err == CAM_SUCCESS, "Error setting camera strobe.");
            } else {
                AVG_TRACE(Logger::WARNING, "Camera does not support strobe.");
            }
        } else {
            CAMERA_FEATURE cmuFeature = getFeatureID(Feature);
            if (m_pCamera->HasFeature(cmuFeature)) {
                bool bAuto = (Value == -1);
                
                C1394CameraControl* pControl = m_pCamera->GetCameraControl(cmuFeature);
                int err1 = pControl->SetAutoMode(bAuto);
                int err2 = CAM_SUCCESS;
                if (!bAuto) {
                    err2 = pControl->SetValue(Value);
                }
                checkCMUWarning(err1 == CAM_SUCCESS && err2 == CAM_SUCCESS, 
                        string("Error setting camera feature: ") + 
                        cameraFeatureToString(Feature));
            } else {
                AVG_TRACE(Logger::WARNING, string("Camera does not support feature: ") + 
                        cameraFeatureToString(Feature));
            }
        }
    }
}
bool TrackerThread::init()
{
    try {
        m_pImagingContext = GLContext::create(
            GLConfig(false, false, true, 1, GLConfig::AUTO, false));
        createBandpassFilter();
        AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO,
                "Using fragment shaders for imaging operations.");
    } catch (Exception& e) {
        AVG_LOG_WARNING(e.getStr());
        AVG_TRACE(Logger::category::CONFIG, Logger::severity::WARNING,
                "Using CPU for imaging operations (slow and inaccurate).");
        m_pImagingContext = 0;
        m_pBandpassFilter = FilterPtr(new FilterFastBandpass());
    }
    try {
        m_StartTime = TimeSource::get()->getCurrentMillisecs(); 
        m_HistoryDelay = m_pConfig->getIntParam("/tracker/historydelay/@value");
    } catch (Exception& e) {
        AVG_LOG_WARNING(e.getStr());
    }
    
    // Done in TrackerInputDevice::ctor to work around Leopard/libdc1394 threading issue.
    //    m_pCamera->open();
    return true;
}
示例#3
0
ColorNode::ColorNode(const ArgList& Args)
    : m_sFillColorName("FFFFFF")
{   
    AVG_TRACE(Logger::category::PLUGIN, Logger::severity::INFO,
            "ColorNode c'tor gets Argument fillcolor= "  << 
            Args.getArgVal<string>("fillcolor")); 
    
    Args.setMembers(this);
    AVG_TRACE(Logger::category::PLUGIN, Logger::severity::INFO,
            "ColorNode constructed with " << m_sFillColorName);   
}
void TrackerThread::deinit()
{
    m_pCamera = CameraPtr();
    AVG_TRACE(Logger::category::PROFILE, Logger::severity::INFO,
            "Total camera frames: " << m_NumFrames);
    AVG_TRACE(Logger::category::PROFILE, Logger::severity::INFO,
            "Camera frames discarded: " << m_NumCamFramesDiscarded);
    if (m_pBandpassFilter) {
        m_pBandpassFilter.reset();
    }
    if (m_pImagingContext) {
        delete m_pImagingContext;
    }
}
示例#5
0
void AreaNode::maybeRender()
{
    AVG_ASSERT(getState() == NS_CANRENDER);
    if (isVisible()) {
        if (getID() != "") {
            AVG_TRACE(Logger::BLTS, "Rendering " << getTypeStr() << 
                    " with ID " << getID());
        } else {
            AVG_TRACE(Logger::BLTS, "Rendering " << getTypeStr()); 
        }
        m_Transform = getParentTransform()*calcTransform();
        render();
    }
}
示例#6
0
文件: Image.cpp 项目: dboesel/libavg
void Image::setFilename(const std::string& sFilename, TextureCompression comp)
{
    assertValid();
    AVG_TRACE(Logger::category::MEMORY, Logger::severity::INFO, "Loading " << sFilename);
    BitmapPtr pBmp = loadBitmap(sFilename);
    if (comp == TEXTURECOMPRESSION_B5G6R5 && pBmp->hasAlpha()) {
        throw Exception(AVG_ERR_UNSUPPORTED, 
                "B5G6R5-compressed textures with an alpha channel are not supported.");
    }
    changeSource(FILE);
    m_pBmp = pBmp;

    m_sFilename = sFilename;

    switch (comp) {
        case TEXTURECOMPRESSION_B5G6R5:
            m_pBmp = BitmapPtr(new Bitmap(pBmp->getSize(), B5G6R5, sFilename));
            if (!BitmapLoader::get()->isBlueFirst()) {
                FilterFlipRGB().applyInPlace(pBmp);
            }
            m_pBmp->copyPixels(*pBmp);
            break;
        case TEXTURECOMPRESSION_NONE:
            break;
        default:
            assert(false);
    }

    if (m_State == GPU) {
        m_pSurface->destroy();
        setupSurface();
    }
    assertValid();
}
示例#7
0
OGLShader::OGLShader(const string& sName, const string& sProgram, const string& sDefines)
    : m_sName(sName),
      m_sProgram(sProgram)
{
    m_hFragmentShader = glproc::CreateShaderObject(GL_FRAGMENT_SHADER);
    const char * pProgramStrs[2];
    pProgramStrs[0] = sDefines.c_str();
    pProgramStrs[1] = m_sProgram.c_str();
    glproc::ShaderSource(m_hFragmentShader, 2, pProgramStrs, 0);
    glproc::CompileShader(m_hFragmentShader);
    GLContext::getCurrent()->checkError("OGLShader::OGLShader: glCompileShader()");
    dumpInfoLog(m_hFragmentShader);

    m_hProgram = glproc::CreateProgramObject();
    glproc::AttachObject(m_hProgram, m_hFragmentShader);
    glproc::LinkProgram(m_hProgram);
    GLContext::getCurrent()->checkError("OGLShader::OGLShader: glLinkProgram()");

    GLint bLinked;
    glproc::GetObjectParameteriv(m_hProgram, GL_OBJECT_LINK_STATUS_ARB, &bLinked);
    dumpInfoLog(m_hProgram);
    if (!bLinked) {
        AVG_TRACE(Logger::ERROR, "Linking shader program '"+sName+"' failed. Aborting.");
        exit(-1);
    }
    
}
示例#8
0
void DisplayEngine::checkJitter()
{
    if (m_LastFrameTime == 0) {
        m_EffFramerate = 0;
    } else {
        long long CurIntervalTime = TimeSource::get()->getCurrentMicrosecs()
                -m_LastFrameTime;
        m_EffFramerate = 1000000.0f/CurIntervalTime;
    }

    long long frameTime = TimeSource::get()->getCurrentMicrosecs();
    int maxDelay;
    if (m_VBRate == 0) {
        maxDelay = 2;
    } else {
        maxDelay = 6;
    }
    if ((frameTime - m_TargetTime)/1000 > maxDelay || m_bFrameLate) {
        AVG_TRACE (Logger::PROFILE_LATEFRAMES, 
                "DisplayEngine: frame too late by " 
                << (frameTime - m_TargetTime)/1000 << " ms.");
        m_bFrameLate = true;
        m_FramesTooLate++;
    }

    m_LastFrameTime = frameTime;
    m_TimeSpentWaiting += m_LastFrameTime-m_FrameWaitStartTime;
//    cerr << m_LastFrameTime << ", m_FrameWaitStartTime=" << m_FrameWaitStartTime << endl;
//    cerr << m_TimeSpentWaiting << endl;
}
示例#9
0
AVPacket * FFMpegDemuxer::getPacket(int streamIndex)
{
    // Make sure enableStream was called on streamIndex.
    AVG_ASSERT(m_PacketLists.size() > 0);
    AVG_ASSERT(streamIndex > -1 && streamIndex < 10);

    if (m_PacketLists.find(streamIndex) == m_PacketLists.end()) {
        cerr << this << ": getPacket: Stream " << streamIndex << " not found." << endl;
        dump();
        AVG_ASSERT(false);
    }

    PacketList& curPacketList = m_PacketLists.find(streamIndex)->second;
    AVPacket* pPacket;
    if (!curPacketList.empty()) {
        // The stream has packets queued already.
        pPacket = curPacketList.front();
        curPacketList.pop_front();
    } else {
        // No packets queued for this stream -> read and queue packets until we get one
        // that is meant for this stream.
        do {
            pPacket = new AVPacket;
            memset(pPacket, 0, sizeof(AVPacket));
            int err = av_read_frame(m_pFormatContext, pPacket);
            if (err < 0) {
                // EOF or error
                if (err != int(AVERROR_EOF)) {
                    char sz[256];
                    av_strerror(err, sz, 256);
                    AVG_TRACE(Logger::category::PLAYER, Logger::severity::ERROR,
                            "Error decoding video: " << sz);
                }
                av_free_packet(pPacket);
                delete pPacket;
                pPacket = 0;
                return 0;
            }
            if (pPacket->stream_index != streamIndex) {
                if (m_PacketLists.find(pPacket->stream_index) != m_PacketLists.end()) {
                    // Relevant stream, but not ours
                    av_dup_packet(pPacket);
                    PacketList& otherPacketList = 
                            m_PacketLists.find(pPacket->stream_index)->second;
                    otherPacketList.push_back(pPacket);
                } else {
                    // Disabled stream
                    av_free_packet(pPacket);
                    delete pPacket;
                    pPacket = 0;
                } 
            } else {
                // Our stream
                av_dup_packet(pPacket);
            }
        } while (!pPacket || pPacket->stream_index != streamIndex);
    }

    return pPacket;
}
示例#10
0
void KeyEvent::trace()
{
    Event::trace();
    AVG_TRACE(Logger::EVENTS2, "Scancode: " << m_ScanCode 
            << ", Keycode: " << m_KeyCode << ", KeyString: " 
            << m_KeyString << ", Modifiers: " << m_Modifiers);
}
示例#11
0
void RasterNode::checkReload()
{
    string sLastMaskFilename = m_sMaskFilename;
    string sMaskFilename = m_sMaskHref;
    initFilename(sMaskFilename);
    if (sLastMaskFilename != sMaskFilename) {
        m_sMaskFilename = sMaskFilename;
        try {
            if (m_sMaskFilename != "") {
                AVG_TRACE(Logger::category::MEMORY, Logger::severity::INFO,
                        "Loading " << m_sMaskFilename);
                m_pMaskBmp = loadBitmap(m_sMaskFilename, I8);
                setMaskCoords();
            }
        } catch (Exception & ex) {
            if (ex.getCode() == AVG_ERR_VIDEO_GENERAL) {
                throw;
            }
            m_sMaskFilename = "";
            logFileNotFoundWarning(ex.getStr());
        }
        if (m_sMaskFilename == "") {
            m_pMaskBmp = BitmapPtr();
            getSurface()->setMask(MCTexturePtr());
        }
        if (getState() == Node::NS_CANRENDER && m_pMaskBmp) {
            downloadMask();
        }
    } else {
        setMaskCoords();
    }
}
示例#12
0
void ColorNode::setFillColor(const string& sFillColor)
{
    AVG_TRACE(Logger::category::PLUGIN,  Logger::severity::INFO,
            "setFillColor called with " << sFillColor);   
    m_sFillColorName = sFillColor;
    m_Color = colorStringToColor(m_sFillColorName);
}
示例#13
0
void OGLShader::dumpInfoLog(GLuint hObj, long level, bool bIsProgram)
{
    int infoLogLength;
    GLchar * pInfoLog;

    if (!hObj) {
        return;
    }

    if (bIsProgram) {
        glproc::GetProgramiv(hObj, GL_INFO_LOG_LENGTH, &infoLogLength);
    } else {
        glproc::GetShaderiv(hObj, GL_INFO_LOG_LENGTH, &infoLogLength);
    }
    GLContext::checkError("OGLShader::dumpInfoLog: glGetShaderiv()");
    if (infoLogLength > 1) {
        pInfoLog = (GLchar*)malloc(infoLogLength);
        int charsWritten;
        if (bIsProgram) {
            glproc::GetProgramInfoLog(hObj, infoLogLength, &charsWritten, pInfoLog);
        } else {
            glproc::GetShaderInfoLog(hObj, infoLogLength, &charsWritten, pInfoLog);
        }
        string sLog = removeATIInfoLogSpam(pInfoLog);
        GLContext::checkError("OGLShader::dumpInfoLog: glGetShaderInfoLog()");
        if (sLog.size() > 3) {
            AVG_TRACE(Logger::category::SHADER, level, sLog);
        }
        free(pInfoLog);
    }
}
示例#14
0
void TangibleEvent::trace()
{
    CursorEvent::trace();
    AVG_TRACE(Logger::category::EVENTS, Logger::severity::DEBUG, "pos: " << getPos() 
            << ", ID: " << getCursorID()
            << ", Marker ID: " << m_MarkerID);
}
void AppleTrackpadInputDevice::start()
{
    MultitouchInputDevice::start();
    m_Device = MTDeviceCreateDefault();
    MTRegisterContactFrameCallback(m_Device, callback);
    MTDeviceStart(m_Device, 0);
    AVG_TRACE(Logger::CONFIG, "Apple Trackpad Multitouch event source created.");
}
示例#16
0
void TouchEvent::trace()
{
    CursorEvent::trace();
    AVG_TRACE(Logger::EVENTS2, "pos: " << getPos() 
            << ", ID: " << getCursorID()
            << ", Area: " << m_Area
            << ", Eccentricity: " << m_Eccentricity);
}
示例#17
0
void TouchEvent::trace()
{
    CursorEvent::trace();
    AVG_TRACE(Logger::category::EVENTS, Logger::severity::DEBUG, "pos: " << getPos() 
            << ", ID: " << getCursorID()
            << ", Area: " << m_Area
            << ", Eccentricity: " << m_Eccentricity);
}
示例#18
0
void FWCamera::startCapture()
{
#ifdef AVG_ENABLE_1394_2
    int err = dc1394_video_set_transmission(m_pCamera, DC1394_ON);
    AVG_ASSERT(err == DC1394_SUCCESS);

    dc1394switch_t status = DC1394_OFF;

    int i = 0;
    while (status == DC1394_OFF && i++ < 5) {
        usleep(50000);
        err = dc1394_video_get_transmission(m_pCamera, &status);
        AVG_ASSERT(err == DC1394_SUCCESS);
    }

    if (i == 5) {
        AVG_ASSERT(false);
    }
    // Default to turning off any camera sharpness manipulation.
    setFeature(CAM_FEATURE_SHARPNESS, 0);

    // Turn off possible auto exposure.
    dc1394_feature_set_mode(m_pCamera, DC1394_FEATURE_EXPOSURE, 
            DC1394_FEATURE_MODE_MANUAL);
    dc1394_feature_set_power(m_pCamera, DC1394_FEATURE_EXPOSURE, DC1394_OFF);

    AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO, "Firewire camera opened.");
    for (FeatureMap::iterator it=m_Features.begin(); it != m_Features.end(); it++) {
        setFeature(it->first, it->second, true);
    }
    setWhitebalance(m_WhitebalanceU, m_WhitebalanceV, true);
    
    if (getCamPF() == BAYER8) {
        if (strcmp(m_pCamera->model, "DFx 31BF03") == 0) {
            AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO,
                    "Applying bayer pattern fixup for IS DFx31BF03 camera");
            setCamPF(BAYER8_GRBG);
        } else if (strcmp(m_pCamera->vendor, "Point Grey Research") == 0) {
            AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO,
                    "Applying bayer pattern fixup for PointGrey cameras");
            enablePtGreyBayer();
        }
    }
#endif
}
示例#19
0
NodePtr Canvas::getElementByID(const std::string& id)
{
    if (m_IDMap.find(id) != m_IDMap.end()) {
        return m_IDMap.find(id)->second;
    } else {
        AVG_TRACE(Logger::WARNING, "getElementByID(\"" << id << "\") failed.");
        return NodePtr();
    }
}
示例#20
0
FWCamera::~FWCamera()
{
#ifdef AVG_ENABLE_1394_2
    dc1394_video_set_transmission(m_pCamera, DC1394_OFF);
    dc1394_capture_stop(m_pCamera);
    dc1394_camera_free(m_pCamera);
    dc1394_free(m_pDC1394);
#endif
    AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO, "Firewire camera closed.");
}
示例#21
0
void DisplayEngine::setVBlankRate(int rate)
{
    m_VBRate = rate;
    if (m_bInitialized) {
        bool bOK = GLContext::getCurrent()->initVBlank(rate);
        m_Framerate = getRefreshRate()/m_VBRate;
        if (!bOK || rate == 0) { 
            AVG_TRACE(Logger::WARNING, "Using framerate of " << m_Framerate << 
                    " instead of VBRate of " << m_VBRate);
            m_VBRate = 0;
        }
    }
}
示例#22
0
void VideoWriterThread::writeFrame(AVFrame* pFrame)
{
    ScopeTimer timer(ProfilingZoneWriteFrame);
    m_FramesWritten++;
    AVCodecContext* pCodecContext = m_pVideoStream->codec;
    AVPacket packet = { 0 };
    int ret;
    bool bGotOutput;

#if LIBAVCODEC_VERSION_INT > AV_VERSION_INT(54, 0, 0)
    av_init_packet(&packet);
    int got_output = 0;
    ret = avcodec_encode_video2(pCodecContext, &packet, pFrame, &got_output);
    AVG_ASSERT(ret >= 0);
    if ((pCodecContext->coded_frame->pts) != (long long)AV_NOPTS_VALUE) {
        packet.pts = av_rescale_q(pCodecContext->coded_frame->pts,
                pCodecContext->time_base, m_pVideoStream->time_base);
    }
    bGotOutput = (got_output != 0);
#else
    int out_size = avcodec_encode_video(pCodecContext, m_pVideoBuffer,
            VIDEO_BUFFER_SIZE, pFrame);
    if (out_size > 0) {
        av_init_packet(&packet);

        if ((pCodecContext->coded_frame->pts) != (long long)AV_NOPTS_VALUE) {
            packet.pts = av_rescale_q(pCodecContext->coded_frame->pts,
                    pCodecContext->time_base, m_pVideoStream->time_base);
        }

        if (pCodecContext->coded_frame->key_frame) {
            packet.flags |= AV_PKT_FLAG_KEY;
        }
        packet.stream_index = m_pVideoStream->index;
        packet.data = m_pVideoBuffer;
        packet.size = out_size;
    }
    bGotOutput = (out_size > 0);
#endif
    if (bGotOutput) {
        /* write the compressed frame in the media file */
        ret = av_interleaved_write_frame(m_pOutputFormatContext, &packet);
        av_free_packet(&packet);
        if (ret != 0) {
            AVG_TRACE(Logger::category::VIDEO, Logger::severity::ERROR,
                    getAVErrorString(ret));
        }
        AVG_ASSERT(ret == 0);
    }

}
示例#23
0
void CMUCamera::internalGetFeature(CameraFeature Feature, unsigned short* val1, 
        unsigned short* val2) const
{
    *val1 = -1;
    *val2 = -1;
    CAMERA_FEATURE cmuFeature = getFeatureID(Feature);
    if (m_pCamera->HasFeature(cmuFeature)) {
        C1394CameraControl* pControl = m_pCamera->GetCameraControl(cmuFeature);
        pControl->Status();
        pControl->GetValue(val1, val2);
    } else {
        AVG_TRACE(Logger::WARNING, string("Error reading camera feature: ") + 
                cameraFeatureToString(Feature));
    }
}
示例#24
0
void CMUCamera::setFeatureOneShot(CameraFeature Feature)
{
    CAMERA_FEATURE cmuFeature = getFeatureID(Feature);
    if (cmuFeature != FEATURE_INVALID_FEATURE && m_pCamera->HasFeature(cmuFeature)) {
        C1394CameraControl* pControl = m_pCamera->GetCameraControl(cmuFeature);
        int err1 = pControl->SetOnOff(false);
        int err2 = pControl->SetAutoMode(false);
        int err3 = pControl->SetOnePush(true);
        checkCMUWarning(err1 == CAM_SUCCESS && err2 == CAM_SUCCESS 
                && err3 == CAM_SUCCESS,
                string("Error setting feature: ") + cameraFeatureToString(Feature));
    } else {
        AVG_TRACE(Logger::WARNING, string("Camera does not support feature: ") + 
                cameraFeatureToString(Feature));
    }
}
示例#25
0
FakeCamera::FakeCamera(std::vector<std::string>& pictures)
    : Camera(I8, I8, IntPoint(640,480), 60),
      m_pBmpQ(new std::queue<BitmapPtr>()),
      m_bIsOpen(false)
{
    for (vector<string>::iterator it = pictures.begin(); it != pictures.end(); ++it) {
        try {
            BitmapPtr pBmp (new Bitmap(*it));
            FilterGrayscale().applyInPlace(pBmp);
            setImgSize(pBmp->getSize());
            m_pBmpQ->push(pBmp);
        } catch (Exception& ex) {
            AVG_TRACE(Logger::ERROR, ex.getStr());
            throw;
        }
    }
}
示例#26
0
int CMUCamera::getCamIndex(long long guid)
{
    if (guid == 0) {
        return 0;
    } else {
        for (int i=0; i<m_pCamera->GetNumberCameras(); ++i) {
            m_pCamera->SelectCamera(i);
            long long camGuid;
            m_pCamera->GetCameraUniqueID((PLARGE_INTEGER)&camGuid);
            if (camGuid == guid) {
                return i;
            }
        }
        AVG_TRACE(Logger::WARNING, string("Camera with guid ") + toString(guid)
                + " not present. Using first camera.");
        return 0;
    }
}
示例#27
0
void OGLShader::dumpInfoLog(GLhandleARB hObj)
{
    int InfoLogLength;
    GLcharARB * pInfoLog;

    glproc::GetObjectParameteriv(hObj, GL_OBJECT_INFO_LOG_LENGTH_ARB, &InfoLogLength);
    GLContext::getCurrent()->checkError(
            "OGLShader::dumpInfoLog: glGetObjectParameteriv()");
    if (InfoLogLength > 1) {
        pInfoLog = (GLcharARB*)malloc(InfoLogLength);
        int CharsWritten;
        glproc::GetInfoLog(hObj, InfoLogLength, &CharsWritten, pInfoLog);
        string sLog = removeATIInfoLogSpam(pInfoLog);
        GLContext::getCurrent()->checkError("OGLShader::dumpInfoLog: glGetInfoLog()");
        AVG_TRACE(Logger::WARNING, sLog);
        free(pInfoLog);
    }
}
示例#28
0
void AudioEngine::init(const AudioParams& ap, float volume) 
{
    m_Volume = volume;
    if (!m_bInitialized) {
        m_bInitialized = true;
        m_AP = ap;
        Dynamics<float, 2>* pLimiter = new Dynamics<float, 2>(float(m_AP.m_SampleRate));
        pLimiter->setThreshold(0.f); // in dB
        pLimiter->setAttackTime(0.f); // in seconds
        pLimiter->setReleaseTime(0.05f); // in seconds
        pLimiter->setRmsTime(0.f); // in seconds
        pLimiter->setRatio(std::numeric_limits<float>::infinity());
        pLimiter->setMakeupGain(0.f); // in dB
        m_pLimiter = pLimiter;

        SDL_AudioSpec desired;
        desired.freq = m_AP.m_SampleRate;
        desired.format = AUDIO_S16SYS;
        desired.channels = m_AP.m_Channels;
        desired.silence = 0;
        desired.samples = m_AP.m_OutputBufferSamples;
        desired.callback = audioCallback;
        desired.userdata = this;

        int err = SDL_OpenAudio(&desired, 0);
        if (err < 0) {
            AVG_TRACE(Logger::category::CONFIG, Logger::severity::WARNING,
                    "Can't open audio: " << SDL_GetError());
            m_bStopGobbler = false;
            m_bFakeAudio = true;
            m_pGobblerThread = new boost::thread(&AudioEngine::consumeBuffers, this);
        } else {
            m_bFakeAudio = false;
        }

    } else {
        if (m_bFakeAudio) {
            m_bStopGobbler = false;
            m_pGobblerThread = new boost::thread(&AudioEngine::consumeBuffers, this);
        } else {
            SDL_PauseAudio(0);
        }
    }
}
示例#29
0
void XInputMTInputDevice::findMTDevice()
{
    int ndevices;
    XIDeviceInfo* pDevices;
    XIDeviceInfo* pDevice;

    pDevices = XIQueryDevice(s_pDisplay, XIAllDevices, &ndevices);

    XITouchClassInfo* pTouchClass = 0;
    for (int i = 0; i < ndevices && !pTouchClass; ++i) {
        pDevice = &pDevices[i];
//        cerr << "Device " << pDevice->name << "(id: " << pDevice->deviceid << ")."
//                << endl;
        if (pDevice->use == XISlavePointer || pDevice->use == XIFloatingSlave) {
            for (int j = 0; j < pDevice->num_classes; ++j) {
                XIAnyClassInfo * pClass = pDevice->classes[j];
                if (pClass->type == XITouchClass) {
                    XITouchClassInfo* pTempTouchClass = (XITouchClassInfo *)pClass;
                    if (pTempTouchClass->mode == XIDirectTouch) {
                        pTouchClass = pTempTouchClass;
                        m_sDeviceName = pDevice->name;
                        m_DeviceID = pDevice->deviceid;
                        if (pDevice->use == XISlavePointer) {
                            m_OldMasterDeviceID = pDevice->attachment;
                        } else {
                            m_OldMasterDeviceID = -1;
                        }
                        break;
                    }
                }
            }
        }
    }
    if (pTouchClass) {
        AVG_TRACE(Logger::category::CONFIG,Logger::severity::INFO,
                "Using multitouch input device " << m_sDeviceName << ", max touches: " <<
                pTouchClass->num_touches);
    } else {
        throw Exception(AVG_ERR_MT_INIT, 
                "XInput multitouch event source: No multitouch device found.");
    }
    XIFreeDeviceInfo(pDevices);
}
示例#30
0
ImageCache::ImageCache()
    : m_CPUCacheUsed(0),
      m_GPUCacheUsed(0)
{
    glm::vec2 sizeOpt = ConfigMgr::get()->getSizeOption("scr", "imgcachesize");
    if (sizeOpt[0] == -1) {
        m_CPUCacheCapacity = (long long)(getPhysMemorySize())/4;
    } else {
        m_CPUCacheCapacity = (long long)(sizeOpt[0])*1024*1024;
    }
    if (sizeOpt[1] == -1) {
        m_GPUCacheCapacity = 16*1024*1024;
    } else {
        m_GPUCacheCapacity = (long long)(sizeOpt[1])*1024*1024;
    }
    AVG_TRACE(Logger::category::CONFIG, Logger::severity::INFO,
            "Image cache size: CPU=" << m_CPUCacheCapacity/(1024*1024) <<
            "MB, GPU=" << m_GPUCacheCapacity/(1024*1024) << "MB" << endl);
}