bool GSWndEGL::Create(const string& title, int w, int h) { if(m_NativeWindow) throw GSDXRecoverableError(); if(w <= 0 || h <= 0) { w = theApp.GetConfig("ModeWidth", 640); h = theApp.GetConfig("ModeHeight", 480); } m_managed = true; // note this part must be only executed when replaying .gs debug file m_NativeDisplay = XOpenDisplay(NULL); OpenEGLDisplay(); m_NativeWindow = XCreateSimpleWindow(m_NativeDisplay, DefaultRootWindow(m_NativeDisplay), 0, 0, w, h, 0, 0, 0); XMapWindow (m_NativeDisplay, m_NativeWindow); CreateContext(3, 3); AttachContext(); CheckContext(); PopulateGlFunction(); if (m_NativeWindow == 0) throw GSDXRecoverableError(); return true; }
// Set the stackwalker to the specified CONTEXT. void DacDbiInterfaceImpl::SetStackWalkCurrentContext(VMPTR_Thread vmThread, StackWalkHandle pSFIHandle, CorDebugSetContextFlag flag, DT_CONTEXT * pContext) { DD_ENTER_MAY_THROW; StackFrameIterator * pIter = GetIteratorFromHandle(pSFIHandle); REGDISPLAY * pRD = GetRegDisplayFromHandle(pSFIHandle); #if defined(_DEBUG) // The caller should have checked this already. _ASSERTE(CheckContext(vmThread, pContext) == S_OK); #endif // _DEBUG // DD can't keep pointers back into the RS address space. // Allocate a context in DDImpl's memory space. DDImpl can't contain raw pointers back into // the client space since that may not marshal. T_CONTEXT * pContext2 = GetContextBufferFromHandle(pSFIHandle); *pContext2 = *reinterpret_cast<T_CONTEXT *>(pContext); // memcpy // update the REGDISPLAY with the given CONTEXT. // Be sure that the context is in DDImpl's memory space and not the Right-sides. FillRegDisplay(pRD, pContext2); BOOL fSuccess = pIter->ResetRegDisp(pRD, (flag == SET_CONTEXT_FLAG_ACTIVE_FRAME)); if (!fSuccess) { // ResetRegDisp() may fail for the same reason Init() may fail, i.e. // because the stackwalker tries to unwind one frame ahead of time, // or because the stackwalker needs to filter out some frames based on the stackwalk flags. ThrowHR(E_FAIL); } }
bool GSWndOGL::Create(const string& title, int w, int h) { if(m_NativeWindow) throw GSDXRecoverableError(); if(w <= 0 || h <= 0) { w = theApp.GetConfig("ModeWidth", 640); h = theApp.GetConfig("ModeHeight", 480); } m_managed = true; // note this part must be only executed when replaying .gs debug file m_NativeDisplay = XOpenDisplay(NULL); m_NativeWindow = XCreateSimpleWindow(m_NativeDisplay, DefaultRootWindow(m_NativeDisplay), 0, 0, w, h, 0, 0, 0); XMapWindow (m_NativeDisplay, m_NativeWindow); if (m_NativeWindow == 0) throw GSDXRecoverableError(); CreateContext(3, 3); AttachContext(); CheckContext(); m_swapinterval = (PFNGLXSWAPINTERVALEXTPROC)glXGetProcAddress((const GLubyte*) "glXSwapIntervalEXT"); PopulateGlFunction(); return true; }
void ALAuxiliaryEffectSlot::setGain(ALfloat gain) { if(!(gain >= 0.0f && gain <= 1.0f)) throw std::runtime_error("Gain out of range"); CheckContext(mContext); mContext->alAuxiliaryEffectSlotf(mId, AL_EFFECTSLOT_GAIN, gain); }
void ALAuxiliaryEffectSlot::applyEffect(const Effect *effect) { const ALEffect *eff = cast<const ALEffect*>(effect); if(!eff) throw std::runtime_error("Invalid Effect"); CheckContext(mContext); mContext->alAuxiliaryEffectSloti(mId, AL_EFFECTSLOT_EFFECT, eff->getId()); }
bool AvFormatDecoderDVD::ProcessDataPacket(AVStream *curstream, AVPacket *pkt, DecodeType decodetype) { bool ret = true; if (curstream->codec->codec_id == AV_CODEC_ID_DVD_NAV) { MythDVDContext* context = ringBuffer->DVD()->GetDVDContext(); if (context) m_contextList.append(context); if (m_curContext == NULL) { // If we don't have a current context, use // the first in the list CheckContext(m_contextList.first()->GetStartPTS()); if (m_lastVideoPkt && m_curContext) { // If there was no current context but there was // a video packet, we've almost certainly been // seeking so set the timestamps of the video // packet to the new context to ensure we don't // get sync errors. m_lastVideoPkt->pts = m_curContext->GetStartPTS(); m_lastVideoPkt->dts = m_lastVideoPkt->pts; } } else if (m_lastVideoPkt) { // If we've been generating frames, see whether this // new context should be used already (handles // situations where a VOBU consists of only a NAV // packet and nothing else) CheckContext(m_lastVideoPkt->pts); } } else { ret = AvFormatDecoder::ProcessDataPacket(curstream, pkt, decodetype); } return ret; }
// //////////////////////////////////////////////////////////////////////////// bool InsContext::CheckContext() const { std::vector<std::string> error_list; if (CheckContext(error_list)) return(true); std::copy(error_list.begin(), error_list.end(), std::ostream_iterator<std::string>(std::cout, "\n")); return(false); }
void ALAuxiliaryEffectSlot::release() { CheckContext(mContext); if(isInUse()) throw std::runtime_error("AuxiliaryEffectSlot is in use"); alGetError(); mContext->alDeleteAuxiliaryEffectSlots(1, &mId); if(alGetError() != AL_NO_ERROR) throw std::runtime_error("AuxiliaryEffectSlot failed to delete"); mId = 0; delete this; }
static PyObject *context_parsefds(PyObject *_context, PyObject *args) { PyUpb_Context *context = CheckContext(_context); struct upb_string str; if(!PyArg_ParseTuple(args, BYTES_FORMAT, &str.ptr, &str.byte_len)) return NULL; str.byte_size = 0; /* We don't own that mem. */ if(!upb_context_parsefds(context->context, &str)) { /* TODO: an appropriate error. */ PyErr_SetString(PyExc_TypeError, "Failed to parse."); \ return NULL; \ } Py_RETURN_NONE; }
bool GSWndEGL::Attach(void* handle, bool managed) { m_NativeWindow = *(Window*)handle; m_managed = managed; m_NativeDisplay = XOpenDisplay(NULL); OpenEGLDisplay(); CreateContext(3, 3); AttachContext(); CheckContext(); PopulateGlFunction(); return true; }
bool GSWndOGL::Attach(void* handle, bool managed) { m_NativeWindow = *(Window*)handle; m_managed = managed; m_NativeDisplay = XOpenDisplay(NULL); CreateContext(3, 3); AttachContext(); CheckContext(); m_swapinterval = (PFNGLXSWAPINTERVALEXTPROC)glXGetProcAddress((const GLubyte*) "glXSwapIntervalEXT"); PopulateGlFunction(); return true; }
bool GSWndOGL::Attach(void* handle, bool managed) { m_NativeWindow = *(Window*)handle; m_managed = managed; m_NativeDisplay = XOpenDisplay(NULL); if (theApp.GetConfig("reduce_gl_requirement_for_free_driver", 0) == 1) CreateContext(3, 0); else CreateContext(3, 3); AttachContext(); CheckContext(); m_swapinterval = (PFNGLXSWAPINTERVALEXTPROC)glXGetProcAddress((const GLubyte*) "glXSwapIntervalEXT"); PopulateGlFunction(); return true; }
bool GSWndEGL::Attach(void* handle, bool managed) { m_NativeWindow = *(Window*)handle; m_managed = managed; m_NativeDisplay = XOpenDisplay(NULL); OpenEGLDisplay(); #ifdef ENABLE_GLES // FIXME: update it to GLES 3.1 when they support it CreateContext(3, 0); #else CreateContext(3, 3); #endif AttachContext(); CheckContext(); PopulateGlFunction(); return true; }
bool GSWndWGL::Attach(void* handle, bool managed) { m_NativeWindow = (HWND)handle; m_managed = managed; if (!OpenWGLDisplay()) return false; if (!CreateContext(3, 3)) return false; AttachContext(); CheckContext(); // TODO //m_swapinterval = (PFNGLXSWAPINTERVALMESAPROC)glXGetProcAddress((const GLubyte*) "glXSwapIntervalMESA"); //PFNGLXSWAPINTERVALMESAPROC m_swapinterval = (PFNGLXSWAPINTERVALMESAPROC)glXGetProcAddress((const GLubyte*) "glXSwapInterval"); PopulateGlFunction(); UpdateWindow(m_NativeWindow); return true; }
bool AvFormatDecoderDVD::ProcessVideoPacket(AVStream *stream, AVPacket *pkt) { int64_t pts = pkt->pts; if (pts == AV_NOPTS_VALUE) pts = pkt->dts; CheckContext(pts); bool ret = AvFormatDecoder::ProcessVideoPacket(stream, pkt); if( ret && m_curContext && pts != AV_NOPTS_VALUE && pts + pkt->duration == m_curContext->GetSeqEndPTS()) { // If this video frame is the last in the sequence, // make a copy of it so we can 'generate' more // to fill in the gaps (e.g. when a single frame // should be displayed with audio) if (!m_lastVideoPkt) { m_lastVideoPkt = new AVPacket; memset(m_lastVideoPkt, 0, sizeof(AVPacket)); } else { av_free_packet(m_lastVideoPkt); } av_init_packet(m_lastVideoPkt); av_copy_packet(m_lastVideoPkt, pkt); m_lbaLastVideoPkt = m_curContext->GetLBA(); if (m_returnContext) { // After seeking in a slideshow, we needed to find // the previous video frame to display. // We've found it now, so we need to jump back to // where we originally wanted to be. LOG(VB_PLAYBACK, LOG_DEBUG, LOC + QString( "Found video packet, jumping back to sector %1") .arg(m_returnContext->GetLBA())); ringBuffer->DVD()->SectorSeek(m_returnContext->GetLBA()); ReleaseContext(m_returnContext); } else { if (m_lastVideoPkt->pts != AV_NOPTS_VALUE) m_lastVideoPkt->pts += pkt->duration; if (m_lastVideoPkt->dts != AV_NOPTS_VALUE) m_lastVideoPkt->dts += pkt->duration; m_framesReq = m_curContext->GetNumFrames() - m_curContext->GetNumFramesPresent(); LOG(VB_PLAYBACK, LOG_DEBUG, LOC + QString( "SeqEnd @ %1 - require %2 frame(s)") .arg(pkt->pts) .arg(m_framesReq)); } } return ret; }
int16_t check_dict_voc (SOBJ * obj, LTIMG ** wrddef, struct dict_state * dict, user_voc voc_array[], int16_t voc_no ) { int16_t wordlth; int16_t max_dep = 0; int16_t direction; SWORD *wrd; wrd = obj -> word; direction = CheckContext ( obj, wrddef, wrd -> lth); wordlth=wrd -> lth-1; if ( direction == All_dicts) { obj -> voc_kind =2; if (wrd -> voc_prob = findstat(&wordlth,wrddef,dict)) wrd -> voc_kind = 2; /* the word found in static voc */ if (max_dep < wordlth) max_dep = wordlth; if ( wrd -> voc_prob) goto Br; if (wrd -> type_sp & T_APF) { int16_t l, r; if (test_apppostrof (obj -> wordchar, wrd, &l,&r)) { wordlth = r-l-1; if (wrd -> voc_prob = findstat(&wordlth,wrddef+l,dict)) wrd -> voc_kind = 2; /* the word found in static voc */ if (max_dep < wordlth+l) max_dep = wordlth+l; if (wrd -> voc_prob) goto Br; } } } else /* Not all dicts */ max_dep = obj -> lthok; if ( voc_no) if (!wrd -> voc_prob) { int16_t i, lth; obj ->voc_kind = 1; for(i =0; i<voc_no; i++) { lth = wordlth = wrd -> lth-1; if (wrd -> voc_prob = check_and_look(&wordlth,wrddef,&voc_array[i].voc,VOC_R,0)) wrd -> voc_kind = 1; /* the word found in dynamic voc */ if ( wordlth > lth) wordlth = lth; if (max_dep < wordlth) max_dep = wordlth; if ( wrd -> voc_prob) goto Br; // Le 06-02-95 06:52pm if (wrd -> type_sp & T_APF) { int16_t l, r; if (test_apppostrof (obj -> wordchar, wrd, &l,&r)) { wordlth = r-l-1; if (wrd -> voc_prob = check_and_look(&wordlth,wrddef +l,&voc_array[i].voc,VOC_R,0)) wrd -> voc_kind = 1; /* the word found in dynamic voc */ if (max_dep < wordlth+l) max_dep = wordlth+l; if (wrd -> voc_prob) goto Br; } } // Le 06-02-95 06:52pm } } wordlth= wrd->lth - 1; /* if((!wordlth) || ((wordlth - obj->part->blank_nmb) >= 1)) */ if ( (!( wrd -> type & T_LOW )) || ( wrd -> type & T_DIG )) if (wrd -> voc_prob = check_art_dict (obj->wordchar, &wordlth, &(wrd -> voc_kind)) ) { if (max_dep < wordlth) max_dep = wordlth; } Br: if (wrd -> voc_prob) obj->nmb_wrdfound++; if( (obj ->part -> type & T_CHEESE ) && !(wrd -> type_sp & T_BLANK) ) obj->lthok= wrd -> lth-1; else obj->lthok = max_dep; return (wrd -> voc_prob) ? Ok : No ; }
void ALAuxiliaryEffectSlot::setSendAuto(bool sendauto) { CheckContext(mContext); mContext->alAuxiliaryEffectSloti(mId, AL_EFFECTSLOT_AUXILIARY_SEND_AUTO, sendauto ? AL_TRUE : AL_FALSE); }