bool CNetDDESvrApp::OnClose() { // Start the background timer. StopTimer(m_nTimerID); // Close the listening socket. m_oSvrSocket.Close(); // Close all client connections... for (size_t i = 0; i < m_aoConnections.Size(); ++i) { try { CNetDDESvrSocket* pConnection = m_aoConnections[i]; if (pConnection->IsOpen()) { // Delete the conversation list. for (size_t j = 0; j < pConnection->m_aoNetConvs.Size(); ++j) m_pDDEClient->DestroyConversation(pConnection->m_aoNetConvs[j]->m_pSvrConv); if (App.m_bTraceNetConns) App.Trace(TXT("NETDDE_SERVER_DISCONNECT:")); // Send disconnect message. CNetDDEPacket oPacket(CNetDDEPacket::NETDDE_SERVER_DISCONNECT); pConnection->SendPacket(oPacket); // Update stats. ++m_nPktsSent; } } catch (Core::Exception& /*e*/) { } } // Close all client connections. m_aoConnections.DeleteAll(); // Unnitialise the DDE client. m_pDDEClient->RemoveListener(this); m_pDDEClient.reset(); // Empty the link cache. m_oLinkCache.Purge(); // Terminate WinSock. CWinSock::Cleanup(); // Save settings. SaveConfig(); App.Trace(TXT("SERVER_STATUS: Server stopped")); return true; }
AkPacket MultiplexElement::iStream(const AkPacket &packet) { if (this->m_inputIndex >= 0 && packet.index() != this->m_inputIndex) return AkPacket(); if (!this->m_caps.isEmpty() && !packet.caps().isCompatible(this->m_caps)) return AkPacket(); AkPacket oPacket(packet); if (this->m_outputIndex >= 0) oPacket.setIndex(this->m_outputIndex); akSend(oPacket) }
void CNetDDESvrApp::CloseConnection(CNetDDESvrSocket* pConnection) { try { // Send disconnect message. CNetDDEPacket oPacket(CNetDDEPacket::NETDDE_SERVER_DISCONNECT); pConnection->SendPacket(oPacket); // Update stats. ++App.m_nPktsSent; } catch (CSocketException& /*e*/) { } pConnection->Close(); // Cleanup. OnClosed(pConnection, 0); }
AkPacket ConvertVideo::convert(const AkPacket &packet) { AkVideoPacket videoPacket(packet); // Convert input format. QString format = AkVideoCaps::pixelFormatToString(videoPacket.caps().format()); AVPixelFormat iFormat = av_get_pix_fmt(format.toStdString().c_str()); // Initialize rescaling context. this->m_scaleContext = sws_getCachedContext(this->m_scaleContext, videoPacket.caps().width(), videoPacket.caps().height(), iFormat, videoPacket.caps().width(), videoPacket.caps().height(), AV_PIX_FMT_BGRA, SWS_FAST_BILINEAR, NULL, NULL, NULL); if (!this->m_scaleContext) return AkPacket(); // Create iPicture. AVFrame iFrame; memset(&iFrame, 0, sizeof(AVFrame)); if (av_image_fill_arrays((uint8_t **) iFrame.data, iFrame.linesize, (const uint8_t *) videoPacket.buffer().constData(), iFormat, videoPacket.caps().width(), videoPacket.caps().height(), 1) < 0) return AkPacket(); // Create oPicture int frameSize = av_image_get_buffer_size(AV_PIX_FMT_BGRA, videoPacket.caps().width(), videoPacket.caps().height(), 1); QByteArray oBuffer(frameSize, Qt::Uninitialized); AVFrame oFrame; memset(&oFrame, 0, sizeof(AVFrame)); if (av_image_fill_arrays((uint8_t **) oFrame.data, oFrame.linesize, (const uint8_t *) oBuffer.constData(), AV_PIX_FMT_BGRA, videoPacket.caps().width(), videoPacket.caps().height(), 1) < 0) return AkPacket(); // Convert picture format sws_scale(this->m_scaleContext, iFrame.data, iFrame.linesize, 0, videoPacket.caps().height(), oFrame.data, oFrame.linesize); // Create packet AkVideoPacket oPacket(packet); oPacket.caps().format() = AkVideoCaps::Format_bgra; oPacket.buffer() = oBuffer; return oPacket.toPacket(); }
void VCapsConvertElement::iStream(const QbPacket &packet) { if (!packet.caps().isValid() || packet.caps().mimeType() != "video/x-raw" || this->state() != ElementStatePlaying) return; if (packet.caps() == this->m_caps) { emit this->oStream(packet); return; } int iWidth = packet.caps().property("width").toInt(); int iHeight = packet.caps().property("height").toInt(); QString format = packet.caps().property("format").toString(); PixelFormat iFormat = av_get_pix_fmt(format.toStdString().c_str()); QList<QByteArray> props = this->m_caps.dynamicPropertyNames(); int oWidth = props.contains("width")? this->m_caps.property("width").toInt(): iWidth; int oHeight = props.contains("height")? this->m_caps.property("height").toInt(): iHeight; PixelFormat oFormat; if (props.contains("format")) { QString oFormatString = this->m_caps.property("format").toString(); oFormat = av_get_pix_fmt(oFormatString.toStdString().c_str()); } else oFormat = iFormat; SwsContext *scaleContext = sws_getCachedContext(NULL, iWidth, iHeight, iFormat, oWidth, oHeight, oFormat, SWS_FAST_BILINEAR, NULL, NULL, NULL); if (!scaleContext) return; int oBufferSize = avpicture_get_size(oFormat, oWidth, oHeight); QSharedPointer<uchar> oBuffer(new uchar[oBufferSize]); AVPicture iPicture; avpicture_fill(&iPicture, (uint8_t *) packet.buffer().data(), iFormat, iWidth, iHeight); AVPicture oPicture; avpicture_fill(&oPicture, (uint8_t *) oBuffer.data(), oFormat, oWidth, oHeight); sws_scale(scaleContext, (uint8_t **) iPicture.data, iPicture.linesize, 0, iHeight, oPicture.data, oPicture.linesize); sws_freeContext(scaleContext); QbPacket oPacket(packet.caps().update(this->m_caps), oBuffer, oBufferSize); oPacket.setPts(packet.pts()); oPacket.setDuration(packet.duration()); oPacket.setTimeBase(packet.timeBase()); oPacket.setIndex(packet.index()); emit this->oStream(oPacket); }
void ACapsConvertElement::iStream(const QbPacket &packet) { if (!packet.caps().isValid() || packet.caps().mimeType() != "audio/x-raw" || this->state() != ElementStatePlaying) return; // Input Format AVSampleFormat iSampleFormat = av_get_sample_fmt(packet.caps().property("format").toString().toStdString().c_str()); int iNChannels = packet.caps().property("channels").toInt(); int64_t iChannelLayout = av_get_channel_layout(packet.caps().property("layout").toString().toStdString().c_str()); int iNPlanes = av_sample_fmt_is_planar(iSampleFormat)? iNChannels: 1; int iSampleRate = packet.caps().property("rate").toInt(); int iNSamples = packet.caps().property("samples").toInt(); if (iNSamples < 1) iNSamples = 1024; bool sameMimeType = packet.caps().mimeType() == this->m_caps.mimeType(); // Output Format AVSampleFormat oSampleFormat = (sameMimeType && this->m_caps.dynamicPropertyNames().contains("format"))? av_get_sample_fmt(this->m_caps.property("format").toString().toStdString().c_str()): iSampleFormat; int oNChannels = (sameMimeType && this->m_caps.dynamicPropertyNames().contains("channels"))? this->m_caps.property("channels").toInt(): iNChannels; int64_t oChannelLayout = (sameMimeType && this->m_caps.dynamicPropertyNames().contains("layout"))? av_get_channel_layout(this->m_caps.property("layout").toString().toStdString().c_str()): iChannelLayout; int oSampleRate = (sameMimeType && this->m_caps.dynamicPropertyNames().contains("rate"))? this->m_caps.property("rate").toInt(): iSampleRate; QVector<uint8_t *> iData(iNPlanes); int iLineSize; if (av_samples_fill_arrays(&iData.data()[0], &iLineSize, (const uint8_t *) packet.buffer().data(), iNChannels, iNSamples, iSampleFormat, 1) < 0) return; QbCaps caps1(packet.caps()); QbCaps caps2(this->m_curInputCaps); caps1.setProperty("samples", QVariant()); caps2.setProperty("samples", QVariant()); if (caps1 != caps2) { // create resampler context this->m_resampleContext = SwrContextPtr(swr_alloc(), this->deleteSwrContext); if (!this->m_resampleContext) return; // set options av_opt_set_int(this->m_resampleContext.data(), "in_channel_layout", iChannelLayout, 0); av_opt_set_int(this->m_resampleContext.data(), "in_sample_rate", iSampleRate, 0); av_opt_set_sample_fmt(this->m_resampleContext.data(), "in_sample_fmt", iSampleFormat, 0); av_opt_set_int(this->m_resampleContext.data(), "out_channel_layout", oChannelLayout, 0); av_opt_set_int(this->m_resampleContext.data(), "out_sample_rate", oSampleRate, 0); av_opt_set_sample_fmt(this->m_resampleContext.data(), "out_sample_fmt", oSampleFormat, 0); // initialize the resampling context if (swr_init(this->m_resampleContext.data()) < 0) return; this->m_curInputCaps = packet.caps(); } // compute destination number of samples int oNSamples = av_rescale_rnd(swr_get_delay(this->m_resampleContext.data(), iSampleRate) + iNSamples, oSampleRate, iSampleRate, AV_ROUND_UP); // buffer is going to be directly written to a rawaudio file, no alignment int oNPlanes = av_sample_fmt_is_planar(oSampleFormat)? oNChannels: 1; QVector<uint8_t *> oData(oNPlanes); int oLineSize; int oBufferSize = av_samples_get_buffer_size(&oLineSize, oNChannels, oNSamples, oSampleFormat, 1); QSharedPointer<uchar> oBuffer(new uchar[oBufferSize]); if (!oBuffer) return; if (av_samples_fill_arrays(&oData.data()[0], &oLineSize, (const uint8_t *) oBuffer.data(), oNChannels, oNSamples, oSampleFormat, 1) < 0) return; // convert to destination format if (swr_convert(this->m_resampleContext.data(), oData.data(), oNSamples, (const uint8_t **) iData.data(), iNSamples) < 0) return; const char *format = av_get_sample_fmt_name(oSampleFormat); char layout[256]; av_get_channel_layout_string(layout, sizeof(layout), oNChannels, oChannelLayout); QString caps = QString("audio/x-raw," "format=%1," "channels=%2," "rate=%3," "layout=%4," "samples=%5").arg(format) .arg(oNChannels) .arg(oSampleRate) .arg(layout) .arg(oNSamples); QbPacket oPacket(caps, oBuffer, oBufferSize); oPacket.setPts(packet.pts()); oPacket.setDuration(packet.duration()); oPacket.setTimeBase(packet.timeBase()); oPacket.setIndex(packet.index()); emit this->oStream(oPacket); }
void CNetDDESvrApp::OnAdvise(CDDELink* pLink, const CDDEData* pData) { ASSERT(pData != NULL); // Ignore Advise, if during an Advise Start. if (pLink == NULL) return; CDDEConv* pConv = pLink->Conversation(); CBuffer oData = pData->GetBuffer(); // Find the links' value in the cache. CLinkValue* pValue = m_oLinkCache.Find(pConv, pLink); // Discard duplicate updates. if ((App.m_bDiscardDups) && (pValue != NULL) && (pValue->m_oLastValue == oData)) { if (App.m_bTraceUpdates) App.Trace(TXT("DDE_ADVISE: %s %s (ignored)"), pConv->Service(), pLink->Item()); return; } // Create a cache entry, if a new link. if (pValue == NULL) pValue = m_oLinkCache.Create(pConv, pLink); ASSERT(pValue != NULL); // Update links' cached value. pValue->m_oLastValue = oData; pValue->m_tLastUpdate = CDateTime::Current(); // Create advise packet. HCONV hConv = pConv->Handle(); CBuffer oBuffer; CMemStream oStream(oBuffer); oStream.Create(); oStream.Write(&hConv, sizeof(hConv)); oStream << pLink->Item(); oStream << (uint32) pLink->Format(); oStream << oData; oStream << true; oStream.Close(); CNetDDEPacket oPacket(CNetDDEPacket::DDE_ADVISE, oBuffer); // Notify all NetDDEClients... for (size_t i = 0; i < m_aoConnections.Size(); ++i) { CNetDDESvrSocket* pConnection = m_aoConnections[i]; // Ignore, if connection severed. if (!pConnection->IsOpen()) continue; // Connection references link? if (pConnection->IsLinkUsed(pLink)) { try { if (App.m_bTraceUpdates) { uint nFormat = pLink->Format(); CString strData; if (nFormat == CF_TEXT) strData = oData.ToString(ANSI_TEXT); else if (nFormat == CF_UNICODETEXT) strData = oData.ToString(UNICODE_TEXT); else strData = CClipboard::FormatName(nFormat); App.Trace(TXT("DDE_ADVISE: %s %s [%s]"), pConv->Service(), pLink->Item(), strData); } // Send advise message. pConnection->SendPacket(oPacket); // Update stats. ++m_nPktsSent; } catch (CSocketException& e) { App.Trace(TXT("SOCKET_ERROR: %s"), e.twhat()); } } } }
void CNetDDESvrApp::OnDisconnect(CDDECltConv* pConv) { HCONV hConv = pConv->Handle(); CBuffer oBuffer; CMemStream oStream(oBuffer); oStream.Create(); oStream.Write(&hConv, sizeof(hConv)); oStream.Close(); CNetDDEPacket oPacket(CNetDDEPacket::DDE_DISCONNECT, oBuffer); // For all NetDDEClients... for (size_t i = 0; i < m_aoConnections.Size(); ++i) { CNetDDESvrSocket* pConnection = m_aoConnections[i]; bool bNotifyConn = true; // Clean-up all client conversations... for (int j = static_cast<int>(pConnection->m_aoNetConvs.Size())-1; j >= 0; --j) { CNetDDEConv* pNetConv = pConnection->m_aoNetConvs[j]; if (pNetConv->m_pSvrConv == pConv) { try { // Only send once per client. if (bNotifyConn) { bNotifyConn = false; if (App.m_bTraceConvs) App.Trace(TXT("DDE_DISCONNECT: %s, %s"), pConv->Service(), pConv->Topic()); // Send disconnect message. pConnection->SendPacket(oPacket); // Update stats. ++m_nPktsSent; } } catch (CSocketException& e) { App.Trace(TXT("SOCKET_ERROR: %s"), e.twhat()); } pConnection->m_aoNetConvs.Delete(j); } } } // Purge link cache. m_oLinkCache.Purge(pConv); uint nRefCount = pConv->RefCount(); // Free conversation. while (nRefCount--) m_pDDEClient->DestroyConversation(pConv); }
void CNetDDESvrApp::OnDDEStartAdvise(CNetDDESvrSocket& oConnection, CNetDDEPacket& oReqPacket) { ASSERT(oReqPacket.DataType() == CNetDDEPacket::DDE_START_ADVISE); bool bResult = false; CDDECltConv* pConv = NULL; CDDELink* pLink = NULL; HCONV hConv; uint32 nConvID; CString strItem; uint32 nFormat; bool bAsync; bool bReqVal; // Decode message. CMemStream oStream(oReqPacket.Buffer()); oStream.Open(); oStream.Seek(sizeof(CNetDDEPacket::Header)); oStream.Read(&hConv, sizeof(hConv)); oStream >> nConvID; oStream >> strItem; oStream >> nFormat; oStream >> bAsync; oStream >> bReqVal; oStream.Close(); if (App.m_bTraceAdvises) App.Trace(TXT("DDE_START_ADVISE: %s %s %s"), strItem, CClipboard::FormatName(nFormat), (bAsync) ? TXT("[ASYNC]") : TXT("")); try { // Locate the conversation. pConv = m_pDDEClient->FindConversation(hConv); if (pConv != NULL) { CNetDDEConv* pNetConv = oConnection.FindNetConv(pConv, nConvID); ASSERT(pNetConv != NULL); // Call DDE to create the link. pLink = pConv->CreateLink(strItem, nFormat); // Attach to the connection. pNetConv->m_aoLinks.Add(pLink); bResult = true; } } catch (CDDEException& e) { App.Trace(TXT("DDE_ERROR: %s"), e.twhat()); } // Sync advise start? if (!bAsync) { // Create response message. CBuffer oRspBuffer; CMemStream oRspStream(oRspBuffer); oRspStream.Create(); oRspStream << bResult; oRspStream.Close(); // Send response message. CNetDDEPacket oRspPacket(CNetDDEPacket::DDE_START_ADVISE, oReqPacket.PacketID(), oRspBuffer); oConnection.SendPacket(oRspPacket); // Update stats. ++m_nPktsSent; } // Failed async advise start? if ((bAsync) && (!bResult)) { // Create response message. CBuffer oRspBuffer; CMemStream oRspStream(oRspBuffer); oRspStream.Create(); oRspStream.Write(&hConv, sizeof(hConv)); oRspStream << strItem; oRspStream << nFormat; oRspStream << true; oRspStream.Close(); // Send response message. CNetDDEPacket oRspPacket(CNetDDEPacket::DDE_START_ADVISE_FAILED, oRspBuffer); oConnection.SendPacket(oRspPacket); // Update stats. ++m_nPktsSent; } CLinkValue* pLinkValue = NULL; // Link established AND 1st link AND need to request value? if ( (bResult) && (pLink->RefCount() == 1) && (bReqVal) ) { try { // Request links current value. CDDEData oData = pConv->Request(strItem, nFormat); // Find the links' value cache. if ((pLinkValue = m_oLinkCache.Find(pConv, pLink)) == NULL) pLinkValue = m_oLinkCache.Create(pConv, pLink); ASSERT(pLinkValue != NULL); // Update links' value cache. pLinkValue->m_oLastValue = oData.GetBuffer();; pLinkValue->m_tLastUpdate = CDateTime::Current(); } catch (CDDEException& e) { App.Trace(TXT("DDE_ERROR: %s"), e.twhat()); } } // Link established AND not 1st real link? else if ( (bResult) && (pLink->RefCount() > 1) ) { // Find last advise value. pLinkValue = m_oLinkCache.Find(pConv, pLink); } // Send initial advise? if (pLinkValue != NULL) { CBuffer oBuffer; CMemStream oStream(oBuffer); oStream.Create(); oStream.Write(&hConv, sizeof(hConv)); oStream << strItem; oStream << nFormat; oStream << pLinkValue->m_oLastValue; oStream << true; oStream.Close(); CNetDDEPacket oPacket(CNetDDEPacket::DDE_ADVISE, oBuffer); // Send links' last advise data. oConnection.SendPacket(oPacket); // Update stats. ++m_nPktsSent; if (App.m_bTraceUpdates) { CString strData; if (nFormat == CF_TEXT) strData = pLinkValue->m_oLastValue.ToString(ANSI_TEXT); else if (nFormat == CF_UNICODETEXT) strData = pLinkValue->m_oLastValue.ToString(UNICODE_TEXT); else strData = CClipboard::FormatName(nFormat); App.Trace(TXT("DDE_ADVISE: %s %u"), strItem, nFormat); } } }
QList<QbPacket> VideoStream::readPackets(AVPacket *packet) { QList<QbPacket> packets; if (!this->isValid()) return packets; AVFrame iFrame; avcodec_get_frame_defaults(&iFrame); int gotFrame; avcodec_decode_video2(this->codecContext(), &iFrame, &gotFrame, packet); if (!gotFrame) return packets; int frameSize = avpicture_get_size(this->codecContext()->pix_fmt, this->codecContext()->width, this->codecContext()->height); QSharedPointer<uchar> oBuffer(new uchar[frameSize]); if (!oBuffer) return packets; static bool sync; if (this->m_fst) { sync = av_frame_get_best_effort_timestamp(&iFrame)? false: true; this->m_pts = 0; this->m_duration = this->fps().invert().value() * this->timeBase().invert().value(); this->m_fst = false; } else this->m_pts += this->m_duration; avpicture_layout((AVPicture *) &iFrame, this->codecContext()->pix_fmt, this->codecContext()->width, this->codecContext()->height, (uint8_t *) oBuffer.data(), frameSize); QbCaps caps = this->caps(); caps.setProperty("sync", sync); QbPacket oPacket(caps, oBuffer, frameSize); oPacket.setPts(this->m_pts); oPacket.setDuration(this->m_duration); oPacket.setTimeBase(this->timeBase()); oPacket.setIndex(this->index()); packets << oPacket; return packets; }