void CNetworkConnection::OnAboutToClose() { if(m_bDelayedClose && (!GetOutputBuffer()->isEmpty() || !m_pOutput->isEmpty())) { writeToNetwork(m_pOutput->size() + GetOutputBuffer()->size()); } emit aboutToClose(); }
void CNetworkConnection::OnAboutToClose() { qDebug() << "about to close"; if( !GetOutputBuffer()->isEmpty() || !m_pOutput->isEmpty() ) { qDebug() << "writing data"; writeToNetwork(m_pOutput->size() + GetOutputBuffer()->size()); } }
OMX_ERRORTYPE VideoFilter::HandleLastOutput(OMX_U32 flags) { OMX_PTR pBuffer = NULL; OMX_S32 nOutSize=0; OMX_BUFFERHEADERTYPE *pBufferHdr = NULL; bLastOutput = OMX_TRUE; GetOutputBuffer(&pBuffer,&nOutSize); if(pBuffer != NULL) pBufferHdr = GetOutBufferHdrFromList(pBuffer); if(pBuffer == NULL || pBufferHdr == NULL) { pBufferHdr = GetFirstOutBufferHdrFromList(); if(pBufferHdr == NULL) { LOG_ERROR("No buffer holded by VideoFilter.\n"); return OMX_ErrorUndefined; } } pBufferHdr->nFilledLen = nOutSize;//0; pBufferHdr->nFlags = OMX_BUFFERFLAG_EOS|flags; ReturnOutputBuffer(pBufferHdr,flags); LOG_INFO("VideoFilter send last output frame.\n"); return OMX_ErrorNone; }
int JackWinMMEDriver::Write() { for (int chan = 0; chan < fPlaybackChannels; chan++) { if (fGraphManager->GetConnectionsNum(fPlaybackPortList[chan]) > 0) { JackMidiBuffer* midi_buffer = GetOutputBuffer(chan); // TODO : use timestamp for (unsigned int j = 0; j < midi_buffer->event_count; j++) { JackMidiEvent* ev = &midi_buffer->events[j]; if (ev->size <= 3) { jack_midi_data_t *d = ev->GetData(midi_buffer); DWORD winev = 0; if (ev->size > 0) winev |= d[0]; if (ev->size > 1) winev |= (d[1] << 8); if (ev->size > 2) winev |= (d[2] << 16); MMRESULT res = midiOutShortMsg((HMIDIOUT)fMidiSource[chan].fHandle, winev); if (res != MMSYSERR_NOERROR) jack_error ("midiOutShortMsg error res %d", res); } else { } } } } return 0; }
void CG2Node::FlushSendQueue(bool bFullFlush) { QByteArray* pOutput = GetOutputBuffer(); while( bytesToWrite() == 0 && m_lSendQueue.size() ) { while( pOutput->size() < 4096 && m_lSendQueue.size() ) { G2Packet* pPacket = m_lSendQueue.dequeue(); pPacket->ToBuffer(pOutput); pPacket->Release(); } emit readyToTransfer(); } if( bFullFlush ) { if( m_bCompressedOutput && pOutput->size() ) m_bOutputPending = true; } if( pOutput->size() ) emit readyToTransfer(); }
int JackPortAudioDriver::Write() { for (int i = 0; i < fPlaybackChannels; i++) { memcpy(fOutputBuffer[i], GetOutputBuffer(i), sizeof(jack_default_audio_sample_t) * fEngineControl->fBufferSize); } return 0; }
void CChatSessionG2::SendPacket(G2Packet *pPacket, bool bRelease) { qDebug() << "Sending packet" << pPacket->GetType(); pPacket->ToBuffer(GetOutputBuffer()); if(bRelease) pPacket->Release(); emit readyToTransfer(); }
void CNetworkConnection::Close(bool bDelayed) { if(bDelayed) { m_bDelayedClose = true; if(!GetOutputBuffer()->isEmpty() || !m_pOutput->isEmpty()) { writeToNetwork(m_pOutput->size() + GetOutputBuffer()->size()); m_pSocket->flush(); } m_pSocket->close(); } else { m_pSocket->abort(); } emit disconnected(); }
void BaseVideoFilter::TakeScreenshot(VideoFilterType filterType, string filename, std::stringstream *stream) { uint32_t* pngBuffer; FrameInfo frameInfo; uint32_t* frameBuffer = nullptr; { auto lock = _frameLock.AcquireSafe(); if(_bufferSize == 0 || !GetOutputBuffer()) { return; } frameBuffer = (uint32_t*)new uint8_t[_bufferSize]; memcpy(frameBuffer, GetOutputBuffer(), _bufferSize); frameInfo = GetFrameInfo(); } pngBuffer = frameBuffer; uint32_t rotationAngle = EmulationSettings::GetScreenRotation(); shared_ptr<RotateFilter> rotateFilter; if(rotationAngle > 0) { rotateFilter.reset(new RotateFilter(rotationAngle)); pngBuffer = rotateFilter->ApplyFilter(pngBuffer, frameInfo.Width, frameInfo.Height); frameInfo = rotateFilter->GetFrameInfo(frameInfo); } shared_ptr<ScaleFilter> scaleFilter = ScaleFilter::GetScaleFilter(filterType); if(scaleFilter) { pngBuffer = scaleFilter->ApplyFilter(pngBuffer, frameInfo.Width, frameInfo.Height); frameInfo = scaleFilter->GetFrameInfo(frameInfo); } VideoHud hud; hud.DrawHud((uint8_t*)pngBuffer, frameInfo, EmulationSettings::GetOverscanDimensions()); if(!filename.empty()) { PNGHelper::WritePNG(filename, pngBuffer, frameInfo.Width, frameInfo.Height); } else { PNGHelper::WritePNG(*stream, pngBuffer, frameInfo.Width, frameInfo.Height); } delete[] frameBuffer; }
void LogManager::Log(const std::string& tag, const std::string& msg, const char* funcName, const char* fileName, unsigned int lineNum) { m_CritSection.Lock(); Tags::iterator findIt = m_Tags.find(tag); if (findIt != m_Tags.end()) { std::string buffer; GetOutputBuffer(buffer, tag, msg, funcName, fileName, lineNum); OutputBufferToLogs(buffer, findIt->second); } m_CritSection.Unlock(); }
int JackProxyDriver::Write() { int i; void *from, *to; size_t buflen = sizeof(jack_default_audio_sample_t) * fEngineControl->fBufferSize; for (i = 0; i < fPlaybackChannels; i++) { if (fUpstreamCapturePortConnected[i]) { to = jack_port_get_buffer(fUpstreamCapturePorts[i], fEngineControl->fBufferSize); from = GetOutputBuffer(i); memcpy(to, from, buflen); } } return 0; }
int JackLoopbackDriver::ProcessReadAsync() { int res = 0; // Loopback copy for (int i = 0; i < fCaptureChannels; i++) { memcpy(GetInputBuffer(i), GetOutputBuffer(i), sizeof(jack_default_audio_sample_t) * fEngineControl->fBufferSize); } // Resume connected clients in the graph if (ResumeRefNum() < 0) { jack_error("JackLoopbackDriver::ProcessReadAsync - ResumeRefNum error"); res = -1; } return res; }
bool FFMpegVideoDecoder::ProcessInput( const PlayerProcedureKey&, void* data ) { FFMpegVideoPackage* package = (FFMpegVideoPackage*)data; int got_frame = 0; bool ret = (avcodec_decode_video2( package->codecContext, m_pFrame, &got_frame, &package->packet ) >= 0); av_free_packet( &package->packet ); if ( got_frame ) { uint32 size = GetTextureSize( m_pFrame->width, m_pFrame->height ); m_pPlayerContext->mediaInfo.video.frameWidth = m_pFrame->width; m_pPlayerContext->mediaInfo.video.frameHeight = m_pFrame->height; m_pImageConvertContext = sws_getCachedContext( m_pImageConvertContext, m_pFrame->width, m_pFrame->height, (AVPixelFormat)m_pFrame->format, size, size, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL ); if ( NULL == m_pImageConvertContext ) { av_frame_unref( m_pFrame ); return false; } RGBATexture* texture = (RGBATexture*)GetOutputBuffer( STRETCHED_RGBA_TEXTURE, sizeof(RGBATexture) + size * size * 4 ); if ( NULL != texture ) { AVPicture pict = { { 0 } }; texture->pixelData = (uint8*)texture + sizeof(RGBATexture); texture->width = size; texture->pixelCount = size * size; int64_t pts = av_frame_get_best_effort_timestamp( m_pFrame ); m_lastTimeStamp += (int64)((float32)(pts - m_lastPts) * (float32)m_pPlayerContext->mediaInfo.video.frameNum / (float32)m_pPlayerContext->mediaInfo.video.fraquency * 1000.f); m_lastPts = pts; if ( m_lastTimeStamp < 0 ) { m_lastTimeStamp = 0; } texture->time = m_lastTimeStamp; pict.data[0] = (uint8_t*)texture->pixelData; pict.linesize[0] = texture->width * 4; sws_scale( m_pImageConvertContext, m_pFrame->data, m_pFrame->linesize, 0, m_pFrame->height, pict.data, pict.linesize ); CommitOutputBuffer( STRETCHED_RGBA_TEXTURE ); } av_frame_unref( m_pFrame ); } return ret; }
void LogManager::Error(const string& errorMessage, bool isFatal, const char* funcName, const char* sourceFile, int lineNum) { string tag = ((isFatal) ? ("FATAL") : ("ERROR")); // buffer for our final output string string buffer = GetOutputBuffer(tag, errorMessage, funcName, sourceFile, lineNum); OutputToDebugLogs(buffer); // Show the dialog box. int result = ::MessageBoxA(NULL, buffer.c_str(), tag.c_str(), MB_ABORTRETRYIGNORE|MB_ICONERROR|MB_DEFBUTTON3); // Act upon the choice. switch (result) { case IDIGNORE : return; case IDABORT : __debugbreak(); return; // Assembly language instruction to break into the debugger. case IDRETRY : return; default : return; } }
/* Call ProcesOutputSpec to expand parser variables then call the * interpreter passing the resulting string. * Arguments: * Input buffer (string) to be expanded and passed. * Pointer to element under consideration. */ void CallInterpreter( char *ib, Element_t *e ) { int result; char line[20]; int recursive; /* save the value of this "e" to be used by Tcl_PrintLocation in * the case of a user error */ tclE = e; /* if there's something in the output buffer, we're recursing, * just append, don't call the interpreter or clear the buffer */ recursive = OutputBufferActive(); ProcesOutputSpec(ib, e, 0, 1); if (!recursive) { result = Tcl_Eval(interpreter, GetOutputBuffer()); ClearOutputBuffer(); if (result != TCL_OK) { static char errMessConst[] = "puts stderr $errorInfo"; char errMessVar[sizeof(errMessConst)]; fprintf(stderr, "Interpreter (internal to DtDocBook) error\n"); strcpy(errMessVar, errMessConst); Tcl_Eval(interpreter, errMessVar); exit(1); } } }
void CG2Node::SendPacket(G2Packet* pPacket, bool bBuffered, bool bRelease) { m_nPacketsOut++; if( bBuffered ) { pPacket->AddRef(); // FlushSendQueue will release if( m_lSendQueue.size() < 128 ) m_lSendQueue.enqueue(pPacket); else pPacket->Release(); } else { //m_lSendQueue.prepend(pPacket); pPacket->ToBuffer(GetOutputBuffer()); } if( bRelease ) pPacket->Release(); FlushSendQueue(!bBuffered); }
LogManager::ErrorDialogResult LogManager::Error(const std::string& msg, bool isFatal, const char* funcName, const char* fileName, unsigned int lineNum) { std::string tag = ((isFatal) ? "FATAL" : "ERROR"); std::string buffer; GetOutputBuffer(buffer, tag, msg, funcName, fileName, lineNum); m_CritSection.Lock(); Tags::iterator findIt = m_Tags.find(tag); if (findIt != m_Tags.end()) OutputBufferToLogs(buffer, findIt->second); m_CritSection.Unlock(); int result; // show dialog box if (isFatal) result = MessageBox(nullptr, buffer.c_str(), tag.c_str(), MB_OK | MB_ICONERROR | MB_DEFBUTTON1); else result = MessageBox(nullptr, buffer.c_str(), tag.c_str(), MB_ABORTRETRYIGNORE | MB_ICONERROR | MB_DEFBUTTON3); switch (result) { case IDOK: exit(EXIT_FAILURE); case IDIGNORE: return LogManager::ERROR_DIALOG_IGNORE; case IDABORT: __debugbreak(); // Assembly instruction to open VS debugger. return LogManager::ERROR_DIALOG_ABORT; case IDRETRY: return LogManager::ERROR_DIALOG_RETRY; default: return LogManager::ERROR_DIALOG_RETRY; } }
OMX_ERRORTYPE VideoFilter::ProcessDataBuffer() { OMX_ERRORTYPE ret = OMX_ErrorNone; OMX_U32 flags=0; if(bInReturnBufferState == OMX_TRUE) return OMX_ErrorNoMore; ret = ProcessInputBuffer(); if(ret == OMX_ErrorNotReady) return OMX_ErrorNone; if(ret != OMX_ErrorNone) return ret; ret = ProcessOutputBuffer(); if(ret != OMX_ErrorNone) return ret; FilterBufRetCode DecRet = FILTER_OK; DecRet = FilterOneBuffer(); if(DecRet & FILTER_INPUT_CONSUMED) { DecRet = (FilterBufRetCode)(DecRet & ~FILTER_INPUT_CONSUMED); ReturnInputBuffer(); } if(DecRet & FILTER_ONE_FRM_DECODED) { OMX_S32 nStuffSize; OMX_S32 nFrmSize; OMX_PTR pFrm; ret=GetDecBuffer(&pFrm, &nStuffSize, &nFrmSize); if(ret == OMX_ErrorNone) { LOG_DEBUG("%s: get one decoded frm: 0x%X(%d,%d) \n",__FUNCTION__,(int)pFrm,(int)nStuffSize,(int)nFrmSize); tsmSetFrmBoundary(hTsHandle, nStuffSize, nFrmSize, pFrm); } else { LOG_ERROR("%s: get decoded buffer failure !\n",__FUNCTION__); } DecRet = (FilterBufRetCode)(DecRet & ~FILTER_ONE_FRM_DECODED); } switch(DecRet & FILTER_FLAGS_MASK) { case FILTER_FLAG_CODEC_DATA: flags=OMX_BUFFERFLAG_CODECCONFIG; break; case FILTER_FLAG_NONKEY_FRAME: flags=OMX_BUFFERFLAG_ENDOFFRAME; break; case FILTER_FLAG_KEY_FRAME: flags=OMX_BUFFERFLAG_SYNCFRAME|OMX_BUFFERFLAG_ENDOFFRAME; break; default: flags=0; break; } DecRet = (FilterBufRetCode)(DecRet & ~FILTER_FLAGS_MASK); if(DecRet > 0) { LOG_DEBUG("DecRet: %d\n", DecRet); } switch(DecRet) { case FILTER_OK: break; case FILTER_NO_INPUT_BUFFER: if(pInBufferHdr != NULL) SetInputBuffer(pInBufferHdr->pBuffer + pInBufferHdr->nOffset, pInBufferHdr->nFilledLen, bLastInput); else bNeedInputBuffer = OMX_TRUE; break; case FILTER_NO_OUTPUT_BUFFER: bNeedOutBuffer = OMX_TRUE; break; case FILTER_DO_INIT: ret = InitFilter(); if(ret == OMX_ErrorNone) bInit = OMX_TRUE; break; case FILTER_LAST_OUTPUT: HandleLastOutput(flags); ret = OMX_ErrorNoMore; break; case FILTER_HAS_OUTPUT: { OMX_PTR pBuffer = NULL; OMX_S32 nOutSize=0; OMX_BUFFERHEADERTYPE *pBufferHdr = NULL; GetOutputBuffer(&pBuffer,&nOutSize); pBufferHdr = GetOutBufferHdrFromList(pBuffer); if(pBufferHdr != NULL) { pBufferHdr->nFlags = flags; pBufferHdr->nFilledLen = nOutSize;//pBufferHdr->nAllocLen; ReturnOutputBuffer(pBufferHdr,flags); } else { SetOutputBuffer(pBuffer); //still need to return it to vpu to avoid the frame is isolated in the pipeline LOG_ERROR("Can't find related bufferhdr with frame: %p\n", pBuffer); } } break; case FILTER_SKIP_OUTPUT: tsmGetFrmTs(hTsHandle, NULL); break; case FILTER_ERROR: SendEvent(OMX_EventError, OMX_ErrorStreamCorrupt, 0, NULL); ret=OMX_ErrorStreamCorrupt; break; default: break; } return ret; }
void LogManager::Log(const string& tag, const string& message, const char* funcName, const char* sourceFile, int lineNum) { string output = GetOutputBuffer(tag, message, funcName, sourceFile, lineNum); OutputToDebugLogs(output); }
OMX_ERRORTYPE VideoFilter::ProcessDataBuffer() { OMX_ERRORTYPE ret = OMX_ErrorNone; OMX_U32 flags=0; if(bInReturnBufferState == OMX_TRUE) return OMX_ErrorNoMore; ret = ProcessInputBuffer(); if(ret == OMX_ErrorNotReady) return OMX_ErrorNone; if(ret != OMX_ErrorNone) return ret; ret = ProcessOutputBuffer(); if(ret != OMX_ErrorNone) return ret; FilterBufRetCode DecRet = FILTER_OK; DecRet = FilterOneBuffer(); if(DecRet & FILTER_INPUT_CONSUMED) { DecRet = (FilterBufRetCode)(DecRet & ~FILTER_INPUT_CONSUMED); ReturnInputBuffer(); } if(DecRet & FILTER_INPUT_CONSUMED_EXT_READ) { DecRet = (FilterBufRetCode)(DecRet & ~FILTER_INPUT_CONSUMED_EXT_READ); if(pInBufferHdr){ InBufferHdrList.Add(pInBufferHdr); pInBufferHdr = NULL; } else{ //for eos buffer with size=0, pInBufferHdr may be retured aleady before (in ProcessInputBuffer()) } } if(DecRet & FILTER_INPUT_CONSUMED_EXT_RETURN) { OMX_PTR ptr; DecRet = (FilterBufRetCode)(DecRet & ~FILTER_INPUT_CONSUMED_EXT_RETURN); GetReturnedInputDataPtr(&ptr); //since the list is FIFO, we needn't map ptr and pHdr if(InBufferHdrList.GetNodeCnt()>0){ OMX_BUFFERHEADERTYPE* pHdr; pHdr=InBufferHdrList.GetNode(0); if(pHdr==NULL){ LOG_ERROR("warning: get one null hdr from InBufferHdrList !\n"); } if(pHdr->pBuffer!=ptr){ LOG_ERROR("warning: the address doesn't match between ptr and pHdr->pBuffer !\n"); } InBufferHdrList.Remove(pHdr); ports[IN_PORT]->SendBuffer(pHdr); } else{ //this path is only for eos if((DecRet&FILTER_LAST_OUTPUT)==0){ LOG_ERROR("warning: the numbers between insert and get doesn't matched !\n"); } } } if(DecRet & FILTER_ONE_FRM_DECODED){ OMX_S32 nStuffSize; OMX_S32 nFrmSize; OMX_PTR pFrm; ret=GetDecBuffer(&pFrm, &nStuffSize, &nFrmSize); if(ret == OMX_ErrorNone){ LOG_DEBUG("%s: get one decoded frm: 0x%X(%d,%d) \n",__FUNCTION__,(int)pFrm,(int)nStuffSize,(int)nFrmSize); tsmSetFrmBoundary(hTsHandle, nStuffSize, nFrmSize, pFrm); } else{ LOG_ERROR("%s: get decoded buffer failure !\n",__FUNCTION__); } DecRet = (FilterBufRetCode)(DecRet & ~FILTER_ONE_FRM_DECODED); } switch(DecRet & FILTER_FLAGS_MASK) { case FILTER_FLAG_CODEC_DATA: flags=OMX_BUFFERFLAG_CODECCONFIG; break; case FILTER_FLAG_NONKEY_FRAME: flags=OMX_BUFFERFLAG_ENDOFFRAME; break; case FILTER_FLAG_KEY_FRAME: flags=OMX_BUFFERFLAG_SYNCFRAME|OMX_BUFFERFLAG_ENDOFFRAME; break; default: flags=0; break; } DecRet = (FilterBufRetCode)(DecRet & ~FILTER_FLAGS_MASK); if(DecRet > 0) { LOG_DEBUG("DecRet: %d\n", DecRet); } switch(DecRet) { case FILTER_OK: break; case FILTER_NO_INPUT_BUFFER: if(pInBufferHdr != NULL) SetInputBuffer(pInBufferHdr->pBuffer + pInBufferHdr->nOffset, pInBufferHdr->nFilledLen, bLastInput); else bNeedInputBuffer = OMX_TRUE; break; case FILTER_NO_OUTPUT_BUFFER: bNeedOutBuffer = OMX_TRUE; break; case FILTER_DO_INIT: ret = InitFilter(); if(ret == OMX_ErrorNone) bInit = OMX_TRUE; break; case FILTER_LAST_OUTPUT: HandleLastOutput(flags); ret = OMX_ErrorNoMore; break; case FILTER_HAS_OUTPUT: { OMX_PTR pBuffer = NULL; OMX_S32 nOutSize=0; OMX_BUFFERHEADERTYPE *pBufferHdr = NULL; GetOutputBuffer(&pBuffer,&nOutSize); if (nOutSize == 0) { nInvalidFrameCnt ++; if (nInvalidFrameCnt <= MOSAIC_COUNT) { SetOutputBuffer(pBuffer); //still need to return it to vpu to avoid the frame is isolated in the pipeline tsmGetFrmTs(hTsHandle, NULL); break; } } else { nInvalidFrameCnt = 0; } pBufferHdr = GetOutBufferHdrFromList(pBuffer); if(pBufferHdr != NULL) { pBufferHdr->nFlags = flags; pBufferHdr->nFilledLen = nOutSize;//pBufferHdr->nAllocLen; ReturnOutputBuffer(pBufferHdr,flags); } else{ SetOutputBuffer(pBuffer); //still need to return it to vpu to avoid the frame is isolated in the pipeline LOG_ERROR("Can't find related bufferhdr with frame: %p\n", pBuffer); } } break; case FILTER_SKIP_OUTPUT: tsmGetFrmTs(hTsHandle, NULL); break; case FILTER_ERROR: SendEvent(OMX_EventError, OMX_ErrorStreamCorrupt, 0, NULL); ret=OMX_ErrorStreamCorrupt; break; default: break; } return ret; }