// This method takes an attributed string and outputs a GlyphLayout data
    // structure that contains the glyph number and location of each inidividual glyph
    void getGlyphLayout (const AttributedString& text, GlyphLayout& glyphLayout)
    {
        // For now we are creating the DirectWrite Factory, System Font Collection,
        // D2D Factory and GDI Render target every time we layout text.
        // This is inefficient and we may be loading and unloading libraries each layout.
        // These four things should be created once at application startup and be destroyed
        // when the application exits. I'm not sure where the best place to do this so
        // for now I will just use the inefficient method.

        IDWriteFactory* dwFactory = nullptr;
        HRESULT hr = DWriteCreateFactory (DWRITE_FACTORY_TYPE_SHARED, __uuidof(IDWriteFactory),
            reinterpret_cast<IUnknown**>(&dwFactory));

        IDWriteFontCollection* dwFontCollection = nullptr;
        hr = dwFactory->GetSystemFontCollection (&dwFontCollection);

        // To add color to text, we need to create a D2D render target
        // Since we are not actually rendering to a D2D context we create a temporary GDI render target
        ID2D1Factory *d2dFactory = nullptr;
        hr = D2D1CreateFactory (D2D1_FACTORY_TYPE_SINGLE_THREADED, &d2dFactory);
        D2D1_RENDER_TARGET_PROPERTIES d2dRTProp = D2D1::RenderTargetProperties(
            D2D1_RENDER_TARGET_TYPE_SOFTWARE,
            D2D1::PixelFormat(
            DXGI_FORMAT_B8G8R8A8_UNORM,
            D2D1_ALPHA_MODE_IGNORE),
            0,
            0,
            D2D1_RENDER_TARGET_USAGE_GDI_COMPATIBLE,
            D2D1_FEATURE_LEVEL_DEFAULT
            );
        ID2D1DCRenderTarget* d2dDCRT = nullptr;
        hr = d2dFactory->CreateDCRenderTarget (&d2dRTProp, &d2dDCRT);

        // Initially we set the paragraph up with a default font and then apply the attributed string ranges later
        Font defaultFont;
        const float defaultFontHeightToEmSizeFactor = getFontHeightToEmSizeFactor (defaultFont, *dwFontCollection);
        // We should probably be detecting the locale instead of hard coding it to en-us
        String localeName("en-us");

        // We multiply the font height by the size factor so we layout text at the correct size
        IDWriteTextFormat* dwTextFormat = nullptr;
        hr = dwFactory->CreateTextFormat (
            defaultFont.getTypefaceName().toWideCharPointer(),
            dwFontCollection,
            DWRITE_FONT_WEIGHT_REGULAR,
            DWRITE_FONT_STYLE_NORMAL,
            DWRITE_FONT_STRETCH_NORMAL,
            defaultFont.getHeight() * defaultFontHeightToEmSizeFactor,
            localeName.toWideCharPointer(),
            &dwTextFormat
            );

        // Paragraph Attributes
        // Set Paragraph Alignment
        if (text.getTextAlignment() == AttributedString::left)
            dwTextFormat->SetTextAlignment (DWRITE_TEXT_ALIGNMENT_LEADING);
        if (text.getTextAlignment() == AttributedString::right)
            dwTextFormat->SetTextAlignment (DWRITE_TEXT_ALIGNMENT_TRAILING);
        if (text.getTextAlignment() == AttributedString::center)
            dwTextFormat->SetTextAlignment (DWRITE_TEXT_ALIGNMENT_CENTER);
        // DirectWrite cannot justify text, default to left alignment
        if (text.getTextAlignment() == AttributedString::justified)
            dwTextFormat->SetTextAlignment (DWRITE_TEXT_ALIGNMENT_LEADING);
        // Set Word Wrap
        if (text.getWordWrap() == AttributedString::none)
            dwTextFormat->SetWordWrapping (DWRITE_WORD_WRAPPING_NO_WRAP);
        if (text.getWordWrap() == AttributedString::byWord)
            dwTextFormat->SetWordWrapping (DWRITE_WORD_WRAPPING_WRAP);
        // DirectWrite does not support wrapping by character, default to wrapping by word
        if (text.getWordWrap() == AttributedString::byChar)
            dwTextFormat->SetWordWrapping (DWRITE_WORD_WRAPPING_WRAP);
        // DirectWrite does not automatically set reading direction
        // This must be set correctly and manually when using RTL Scripts (Hebrew, Arabic)
        if (text.getReadingDirection() == AttributedString::rightToLeft)
            dwTextFormat->SetReadingDirection (DWRITE_READING_DIRECTION_RIGHT_TO_LEFT);

        IDWriteTextLayout* dwTextLayout = nullptr;
        hr = dwFactory->CreateTextLayout (
            text.getText().toWideCharPointer(),
            text.getText().length(),
            dwTextFormat,
            glyphLayout.getWidth(),
            glyphLayout.getHeight(),
            &dwTextLayout
            );

        // Character Attributes
        int numCharacterAttributes = text.getCharAttributesSize();
        for (int i = 0; i < numCharacterAttributes; ++i)
        {
            Attr* attr = text.getCharAttribute (i);
            // Character Range Error Checking
            if (attr->range.getStart() > text.getText().length()) continue;
            if (attr->range.getEnd() > text.getText().length()) attr->range.setEnd (text.getText().length());
            if (attr->attribute == Attr::font)
            {
                AttrFont* attrFont = static_cast<AttrFont*>(attr);
                DWRITE_TEXT_RANGE dwRange;
                dwRange.startPosition = attrFont->range.getStart();
                dwRange.length = attrFont->range.getLength();
                dwTextLayout->SetFontFamilyName (attrFont->font.getTypefaceName().toWideCharPointer(), dwRange);
                // We multiply the font height by the size factor so we layout text at the correct size
                const float fontHeightToEmSizeFactor = getFontHeightToEmSizeFactor (attrFont->font, *dwFontCollection);
                dwTextLayout->SetFontSize (attrFont->font.getHeight() * fontHeightToEmSizeFactor, dwRange);
            }
            if (attr->attribute == Attr::foregroundColour)
            {
                AttrColour* attrColour = static_cast<AttrColour*>(attr);
                DWRITE_TEXT_RANGE dwRange;
                dwRange.startPosition = attrColour->range.getStart();
                dwRange.length = attrColour->range.getLength();
                ID2D1SolidColorBrush* d2dBrush = nullptr;
                d2dDCRT->CreateSolidColorBrush (D2D1::ColorF (D2D1::ColorF(attrColour->colour.getFloatRed(),
                    attrColour->colour.getFloatGreen(), attrColour->colour.getFloatBlue(),
                    attrColour->colour.getFloatAlpha())), &d2dBrush);
                // We need to call SetDrawingEffect with a legimate brush to get DirectWrite to break text based on colours
                dwTextLayout->SetDrawingEffect (d2dBrush, dwRange);
                safeRelease (&d2dBrush);
            }
        }

        UINT32 actualLineCount = 0;
        hr = dwTextLayout->GetLineMetrics (nullptr, 0, &actualLineCount);
        // Preallocate GlyphLayout Line Array
        glyphLayout.setNumLines (actualLineCount);
        HeapBlock <DWRITE_LINE_METRICS> dwLineMetrics (actualLineCount);
        hr = dwTextLayout->GetLineMetrics (dwLineMetrics, actualLineCount, &actualLineCount);
        int location = 0;
        // Create GlyphLine structures for each line in the layout
        for (UINT32 i = 0; i < actualLineCount; ++i)
        {
            // Get string range
            Range<int> lineStringRange (location, (int) location + dwLineMetrics[i].length);
            location = dwLineMetrics[i].length;
            GlyphLine* glyphLine = new GlyphLine();
            glyphLine->setStringRange (lineStringRange);
            glyphLayout.addGlyphLine (glyphLine);
        }

        // To copy glyph data from DirectWrite into our own data structures we must create our
        // own CustomTextRenderer. Instead of passing the draw method an actual graphics context,
        // we pass it the GlyphLayout object that needs to be filled with glyphs.
        CustomDirectWriteTextRenderer* textRenderer = nullptr;
        textRenderer = new CustomDirectWriteTextRenderer();
        hr = dwTextLayout->Draw (
            &glyphLayout,
            textRenderer,
            glyphLayout.getX(),
            glyphLayout.getY()
            );

        safeRelease (&textRenderer);
        safeRelease (&dwTextLayout);
        safeRelease (&dwTextFormat);
        safeRelease (&d2dDCRT);
        safeRelease (&d2dFactory);
        safeRelease (&dwFontCollection);
        safeRelease (&dwFactory);
    }
Пример #2
0
void SceneMgr::close()
{
#if defined(RELEASEDEBUG) || defined(_DEBUG)
	m_mapTileInfo.clear();
#endif
	m_nMapVersion = ORIGIN_MAP_VERSION;

	SceneManager * pRealSceneMgr = static_cast<SceneManager * >( m_pSceneManager);
	SceneEntityList::iterator it   =  m_EntityList.begin();
	SceneEntityList::iterator end  =  m_EntityList.end();
	for(;it != end;)
	{
		bool ret = pRealSceneMgr->IsReferencedBySceneManager( (*it) );
		if( ret ) 
		{
			++it;
			continue;
		}

		if (m_pSceneManager->getRunType() == RUN_TYPE_GAME)
		{
			EntityView *pEntity = (*it);
			pEntity->removeFlag(flagFade);
			// 地图上的物件(主要是magicview)在切换地图,原先没有正确释放掉,这里增加flagReleaseImmediate标志,使其立即释放避免内存泄露 [5/5/2011 zgz]
			pEntity->addFlag(flagReleaseImmediate);
			pEntity->update(0,0,0);
			safeRelease((*it));
		}		
		it = m_EntityList.erase(it);
	}
	safeDeleteArray(m_pMapTable);
	safeDeleteArray(m_pMultiValueTable);
	safeDeleteArray(m_pTiles);
	if(m_pTiles)m_nMatrixWidth = 0;
	m_EntityList.clear();
	
	m_LayerListCache.close();
}
Пример #3
0
void TCPServer::stop()
{
	if(_isStarted) {

		for(int i = 0; i < _threads; ++i) {
			PostQueuedCompletionStatus(getIOCP(), 0, (ULONG_PTR)0, nullptr);
		}

		WaitForMultipleObjects(_threads, _pThreads, true, INFINITE);

		for(int i = 0; i < _threads; ++i){
			safeRelease(_pThreads[i]);
		}

		safeDelete(_pThreads);
	}

	deInitIOCP();
	destroyListenerSocket();
	clearClients();

	_isStarted = false;
}
Пример #4
0
	bool RenderTargetD3D9Texture::switchDisplayMode(RenderEngineCreationParameters* param)
	{
		if( 0 == param )
			return false;

		//获取参数
		int width = param->w; 
		int height = param->h;
		bool alpha = (param->colorDepth>24) ? true:false;
		FilterOptions minFO = m_pTexture->getMinFilter();
		FilterOptions magFO = m_pTexture->getMagFilter();
		FilterOptions mipFO = m_pTexture->getMipFilter();
		TextureAddressingMode s = m_pTexture->getAddressS();
		TextureAddressingMode t = m_pTexture->getAddressT();

		//释放资源
		safeRelease(m_pTexture);
		COM_SAFE_RELEASE(m_pDepthStencilBuffer);

		//重新创建
		return create(width, height, alpha, minFO, magFO, mipFO, s,t);

	}
Пример #5
0
void KLController::open()
{
    if(isOpened() && checkThread && checkThread->isRunning()) {
        return;
    }
    checkThread->stop();
    checkThread->wait();
    safeRelease(sensor);

    HRESULT hr = GetDefaultKinectSensor(&sensor);
    if(SUCCEEDED(hr)) {
        hr = sensor->Open();
    } else {
        emit _hrError(hr);
    }
    if(SUCCEEDED(hr)) {
        hr = sensor->get_CoordinateMapper(&coordMapper);
    } else {
        emit _hrError(hr);
    }
    if(SUCCEEDED(hr)) {
        checkThread->setSensor(sensor);
        checkThread->start();
        qDebug()<<"[KLController] Opened";
        emit _open(true);
        if(isAvailable()) {
            qDebug()<<"[KLController] Connected";
            emit _available(true);
        } else {
            qDebug()<<"[KLController] Disconnected";
            emit _available(false);
        }
    } else {
        emit _hrError(hr);
    }
}
Пример #6
0
/** Cobbled together from:
 http://msdn.microsoft.com/en-us/library/dd757929(v=vs.85).aspx
 and http://msdn.microsoft.com/en-us/library/dd317928(VS.85).aspx
 -- Albert
 If anything in here fails, just bail. I'm not going to decode HRESULTS.
 -- Bill
 */
bool SoundSourceMediaFoundation::configureAudioStream(const AudioSourceConfig& audioSrcCfg) {
    HRESULT hr;

    // deselect all streams, we only want the first
    hr = m_pSourceReader->SetStreamSelection(
            MF_SOURCE_READER_ALL_STREAMS, false);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to deselect all streams";
        return false;
    }

    hr = m_pSourceReader->SetStreamSelection(
            kStreamIndex, true);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to select first audio stream";
        return false;
    }

    IMFMediaType* pAudioType = nullptr;

    hr = m_pSourceReader->GetCurrentMediaType(
            kStreamIndex, &pAudioType);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to get current media type from stream";
        return false;
    }

    //------ Get bitrate from the file, before we change it to get uncompressed audio
    UINT32 avgBytesPerSecond;

    hr = pAudioType->GetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, &avgBytesPerSecond);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "error getting MF_MT_AUDIO_AVG_BYTES_PER_SECOND";
        return false;
    }

    setBitrate( (avgBytesPerSecond * 8) / 1000);
    //------

    hr = pAudioType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set major type to audio";
        safeRelease(&pAudioType);
        return false;
    }

    hr = pAudioType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_Float);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set subtype format to float";
        safeRelease(&pAudioType);
        return false;
    }

    hr = pAudioType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, true);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set all samples independent";
        safeRelease(&pAudioType);
        return false;
    }

    hr = pAudioType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, true);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set fixed size samples";
        safeRelease(&pAudioType);
        return false;
    }

    hr = pAudioType->SetUINT32(
            MF_MT_AUDIO_BITS_PER_SAMPLE, kBitsPerSample);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set bits per sample:"
                << kBitsPerSample;
        safeRelease(&pAudioType);
        return false;
    }

    const UINT sampleSize = kLeftoverSize * kBytesPerSample;
    hr = pAudioType->SetUINT32(
            MF_MT_SAMPLE_SIZE, sampleSize);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set sample size:"
                << sampleSize;
        safeRelease(&pAudioType);
        return false;
    }

    UINT32 numChannels;
    hr = pAudioType->GetUINT32(
            MF_MT_AUDIO_NUM_CHANNELS, &numChannels);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to get actual number of channels";
        return false;
    } else {
        qDebug() << "Number of channels in input stream" << numChannels;
    }
    if (audioSrcCfg.hasValidChannelCount()) {
        numChannels = audioSrcCfg.getChannelCount();
        hr = pAudioType->SetUINT32(
                MF_MT_AUDIO_NUM_CHANNELS, numChannels);
        if (FAILED(hr)) {
            qWarning() << kLogPreamble << hr
                    << "failed to set number of channels:"
                    << numChannels;
            safeRelease(&pAudioType);
            return false;
        }
        qDebug() << "Requested number of channels" << numChannels;
    }

    UINT32 samplesPerSecond;
    hr = pAudioType->GetUINT32(
            MF_MT_AUDIO_SAMPLES_PER_SECOND, &samplesPerSecond);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to get samples per second";
        return false;
    } else {
        qDebug() << "Samples per second in input stream" << samplesPerSecond;
    }
    if (audioSrcCfg.hasValidSamplingRate()) {
        samplesPerSecond = audioSrcCfg.getSamplingRate();
        hr = pAudioType->SetUINT32(
                MF_MT_AUDIO_SAMPLES_PER_SECOND, samplesPerSecond);
        if (FAILED(hr)) {
            qWarning() << kLogPreamble << hr
                    << "failed to set samples per second:"
                    << samplesPerSecond;
            safeRelease(&pAudioType);
            return false;
        }
        qDebug() << "Requested samples per second" << samplesPerSecond;
    }

    // Set this type on the source reader. The source reader will
    // load the necessary decoder.
    hr = m_pSourceReader->SetCurrentMediaType(
            kStreamIndex, nullptr, pAudioType);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set media type";
        safeRelease(&pAudioType);
        return false;
    }

    // Finally release the reference before reusing the pointer
    safeRelease(&pAudioType);

    // Get the resulting output format.
    hr = m_pSourceReader->GetCurrentMediaType(
            kStreamIndex, &pAudioType);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to retrieve completed media type";
        return false;
    }

    // Ensure the stream is selected.
    hr = m_pSourceReader->SetStreamSelection(
            kStreamIndex, true);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to select first audio stream (again)";
        return false;
    }

    hr = pAudioType->GetUINT32(
            MF_MT_AUDIO_NUM_CHANNELS, &numChannels);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to get actual number of channels";
        return false;
    }
    setChannelCount(numChannels);

    hr = pAudioType->GetUINT32(
            MF_MT_AUDIO_SAMPLES_PER_SECOND, &samplesPerSecond);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to get the actual sample rate";
        return false;
    }
    setSamplingRate(samplesPerSecond);

    UINT32 leftoverBufferSizeInBytes = 0;
    hr = pAudioType->GetUINT32(MF_MT_SAMPLE_SIZE, &leftoverBufferSizeInBytes);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to get sample buffer size (in bytes)";
        return false;
    }
    DEBUG_ASSERT((leftoverBufferSizeInBytes % kBytesPerSample) == 0);
    m_sampleBuffer.resetCapacity(leftoverBufferSizeInBytes / kBytesPerSample);
    DEBUG_ASSERT(m_sampleBuffer.getCapacity() > 0);
    qDebug() << kLogPreamble
            << "Sample buffer capacity"
            << m_sampleBuffer.getCapacity();

            
    // Finally release the reference
    safeRelease(&pAudioType);

    return true;
}
Пример #7
0
//=============================================================================
// destructor
//=============================================================================
TextDX::~TextDX()
{
    safeRelease(dxFont);
}
Пример #8
0
SINT SoundSourceMediaFoundation::seekSampleFrame(
        SINT frameIndex) {
    DEBUG_ASSERT(isValidFrameIndex(m_currentFrameIndex));

    if (frameIndex >= getMaxFrameIndex()) {
        // EOF
        m_currentFrameIndex = getMaxFrameIndex();
        return m_currentFrameIndex;
    }

    if (frameIndex > m_currentFrameIndex) {
        // seeking forward
        SINT skipFramesCount = frameIndex - m_currentFrameIndex;
        // When to prefer skipping over seeking:
        // 1) The sample buffer would be discarded before seeking anyway and
        //    skipping those already decoded samples effectively costs nothing
        // 2) After seeking we need to decode at least kNumberOfPrefetchFrames
        //    before reaching the actual target position -> Only seek if we
        //    need to decode more than  2 * kNumberOfPrefetchFrames frames
        //    while skipping
        SINT skipFramesCountMax =
                samples2frames(m_sampleBuffer.getSize()) +
                2 * kNumberOfPrefetchFrames;
        if (skipFramesCount <= skipFramesCountMax) {
            skipSampleFrames(skipFramesCount);
        }
    }
    if (frameIndex == m_currentFrameIndex) {
        return m_currentFrameIndex;
    }

    // Discard decoded samples
    m_sampleBuffer.reset();

    // Invalidate current position (end of stream)
    m_currentFrameIndex = getMaxFrameIndex();

    if (m_pSourceReader == nullptr) {
        // reader is dead
        return m_currentFrameIndex;
    }

    // Jump to a position before the actual seeking position.
    // Prefetching a certain number of frames is necessary for
    // sample accurate decoding. The decoder needs to decode
    // some frames in advance to produce the same result at
    // each position in the stream.
    SINT seekIndex = std::max(SINT(frameIndex - kNumberOfPrefetchFrames), AudioSource::getMinFrameIndex());

    LONGLONG seekPos = m_streamUnitConverter.fromFrameIndex(seekIndex);
    DEBUG_ASSERT(seekPos >= 0);
    PROPVARIANT prop;
    HRESULT hrInitPropVariantFromInt64 =
            InitPropVariantFromInt64(seekPos, &prop);
    DEBUG_ASSERT(SUCCEEDED(hrInitPropVariantFromInt64)); // never fails
    HRESULT hrSetCurrentPosition =
            m_pSourceReader->SetCurrentPosition(GUID_NULL, prop);
    PropVariantClear(&prop);
    if (SUCCEEDED(hrSetCurrentPosition)) {
        // NOTE(uklotzde): After SetCurrentPosition() the actual position
        // of the stream is unknown until reading the next samples from
        // the reader. Please note that the first sample decoded after
        // SetCurrentPosition() may start BEFORE the actual target position.
        // See also: https://msdn.microsoft.com/en-us/library/windows/desktop/dd374668(v=vs.85).aspx
        //   "The SetCurrentPosition method does not guarantee exact seeking." ...
        //   "After seeking, the application should call IMFSourceReader::ReadSample
        //    and advance to the desired position.
        SINT skipFramesCount = frameIndex - seekIndex;
        if (skipFramesCount > 0) {
            // We need to fetch at least 1 sample from the reader to obtain the
            // current position!
            skipSampleFrames(skipFramesCount);
            // Now m_currentFrameIndex reflects the actual position of the reader
            if (m_currentFrameIndex < frameIndex) {
                // Skip more samples if frameIndex has not yet been reached
                skipSampleFrames(frameIndex - m_currentFrameIndex);
            }
            if (m_currentFrameIndex != frameIndex) {
                qWarning() << kLogPreamble
                        << "Seek to frame"
                        << frameIndex
                        << "failed";
                // Jump to end of stream (= invalidate current position)
                m_currentFrameIndex = getMaxFrameIndex();
            }
        } else {
            // We are at the beginning of the stream and don't need
            // to skip any frames. Calling IMFSourceReader::ReadSample
            // is not necessary in this special case.
            DEBUG_ASSERT(frameIndex == AudioSource::getMinFrameIndex());
            m_currentFrameIndex = frameIndex;
        }
    } else {
        qWarning() << kLogPreamble
                << "IMFSourceReader::SetCurrentPosition() failed"
                << hrSetCurrentPosition;
        safeRelease(&m_pSourceReader); // kill the reader
    }

    return m_currentFrameIndex;
}
Пример #9
0
SINT SoundSourceMediaFoundation::readSampleFrames(
        SINT numberOfFrames, CSAMPLE* sampleBuffer) {

    SINT numberOfFramesRemaining = numberOfFrames;
    CSAMPLE* pSampleBuffer = sampleBuffer;

    while (numberOfFramesRemaining > 0) {
        SampleBuffer::ReadableChunk readableChunk(
                m_sampleBuffer.readFromHead(
                        frames2samples(numberOfFramesRemaining)));
        DEBUG_ASSERT(readableChunk.size()
                <= frames2samples(numberOfFramesRemaining));
        if (readableChunk.size() > 0) {
            DEBUG_ASSERT(m_currentFrameIndex < getMaxFrameIndex());
            if (sampleBuffer != nullptr) {
                SampleUtil::copy(
                        pSampleBuffer,
                        readableChunk.data(),
                        readableChunk.size());
                pSampleBuffer += readableChunk.size();
            }
            m_currentFrameIndex += samples2frames(readableChunk.size());
            numberOfFramesRemaining -= samples2frames(readableChunk.size());
        }
        if (numberOfFramesRemaining == 0) {
            break; // finished reading
        }

        // No more decoded sample frames available
        DEBUG_ASSERT(m_sampleBuffer.isEmpty());

        if (m_pSourceReader == nullptr) {
            break; // abort if reader is dead
        }

        DWORD dwFlags = 0;
        LONGLONG streamPos = 0;
        IMFSample* pSample = nullptr;
        HRESULT hrReadSample =
                m_pSourceReader->ReadSample(
                        kStreamIndex, // [in]  DWORD dwStreamIndex,
                        0,            // [in]  DWORD dwControlFlags,
                        nullptr,      // [out] DWORD *pdwActualStreamIndex,
                        &dwFlags,     // [out] DWORD *pdwStreamFlags,
                        &streamPos,   // [out] LONGLONG *pllTimestamp,
                        &pSample);    // [out] IMFSample **ppSample
        if (FAILED(hrReadSample)) {
            qWarning() << kLogPreamble
                    << "IMFSourceReader::ReadSample() failed"
                    << hrReadSample
                    << "-> abort decoding";
            DEBUG_ASSERT(pSample == nullptr);
            break; // abort
        }
        if (dwFlags & MF_SOURCE_READERF_ERROR) {
            qWarning() << kLogPreamble
                    << "IMFSourceReader::ReadSample()"
                    << "detected stream errors"
                    << "(MF_SOURCE_READERF_ERROR)"
                    << "-> abort and stop decoding";
            DEBUG_ASSERT(pSample == nullptr);
            safeRelease(&m_pSourceReader); // kill the reader
            break; // abort
        } else if (dwFlags & MF_SOURCE_READERF_ENDOFSTREAM) {
            DEBUG_ASSERT(pSample == nullptr);
            break; // finished reading
        } else if (dwFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED) {
            qWarning() << kLogPreamble
                    << "IMFSourceReader::ReadSample()"
                    << "detected that the media type has changed"
                    << "(MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)"
                    << "-> abort decoding";
            DEBUG_ASSERT(pSample == nullptr);
            break; // abort
        }
        DEBUG_ASSERT(pSample != nullptr);
        SINT readerFrameIndex = m_streamUnitConverter.toFrameIndex(streamPos);
        DEBUG_ASSERT(
                (m_currentFrameIndex == getMaxFrameIndex()) || // unknown position after seeking
                (m_currentFrameIndex == readerFrameIndex));
        m_currentFrameIndex = readerFrameIndex;

        DWORD dwSampleBufferCount = 0;
        HRESULT hrGetBufferCount =
                pSample->GetBufferCount(&dwSampleBufferCount);
        if (FAILED(hrGetBufferCount)) {
            qWarning() << kLogPreamble
                    << "IMFSample::GetBufferCount() failed"
                    << hrGetBufferCount
                    << "-> abort decoding";
            safeRelease(&pSample);
            break; // abort
        }

        DWORD dwSampleTotalLengthInBytes = 0;
        HRESULT hrGetTotalLength = pSample->GetTotalLength(&dwSampleTotalLengthInBytes);
        if (FAILED(hrGetTotalLength)) {
            qWarning() << kLogPreamble
                    << "IMFSample::GetTotalLength() failed"
                    << hrGetTotalLength
                    << "-> abort decoding";
            safeRelease(&pSample);
            break; // abort
        }
        // Enlarge temporary buffer (if necessary)
        DEBUG_ASSERT((dwSampleTotalLengthInBytes % kBytesPerSample) == 0);
        SINT numberOfSamplesToBuffer =
            dwSampleTotalLengthInBytes / kBytesPerSample;
        SINT sampleBufferCapacity = m_sampleBuffer.getCapacity();
        DEBUG_ASSERT(sampleBufferCapacity > 0);
        while (sampleBufferCapacity < numberOfSamplesToBuffer) {
            sampleBufferCapacity *= 2;
        }
        if (m_sampleBuffer.getCapacity() < sampleBufferCapacity) {
            qDebug() << kLogPreamble
                    << "Enlarging sample buffer capacity"
                    << m_sampleBuffer.getCapacity()
                    << "->"
                    << sampleBufferCapacity;
            m_sampleBuffer.resetCapacity(sampleBufferCapacity);
        }

        DWORD dwSampleBufferIndex = 0;
        while (dwSampleBufferIndex < dwSampleBufferCount) {
            IMFMediaBuffer* pMediaBuffer = nullptr;
            HRESULT hrGetBufferByIndex = pSample->GetBufferByIndex(dwSampleBufferIndex, &pMediaBuffer);
            if (FAILED(hrGetBufferByIndex)) {
                qWarning() << kLogPreamble
                        << "IMFSample::GetBufferByIndex() failed"
                        << hrGetBufferByIndex
                        << "-> abort decoding";
                DEBUG_ASSERT(pMediaBuffer == nullptr);
                break; // prematurely exit buffer loop
            }

            CSAMPLE* pLockedSampleBuffer = nullptr;
            DWORD lockedSampleBufferLengthInBytes = 0;
            HRESULT hrLock = pMediaBuffer->Lock(
                    reinterpret_cast<quint8**>(&pLockedSampleBuffer),
                    nullptr,
                    &lockedSampleBufferLengthInBytes);
            if (FAILED(hrLock)) {
                qWarning() << kLogPreamble
                        << "IMFMediaBuffer::Lock() failed"
                        << hrLock
                        << "-> abort decoding";
                safeRelease(&pMediaBuffer);
                break; // prematurely exit buffer loop
            }

            DEBUG_ASSERT((lockedSampleBufferLengthInBytes % sizeof(pLockedSampleBuffer[0])) == 0);
            SINT lockedSampleBufferCount =
                    lockedSampleBufferLengthInBytes / sizeof(pLockedSampleBuffer[0]);
            SINT copySamplesCount = std::min(
                    frames2samples(numberOfFramesRemaining),
                    lockedSampleBufferCount);
            if (copySamplesCount > 0) {
                // Copy samples directly into output buffer if possible
                if (pSampleBuffer != nullptr) {
                    SampleUtil::copy(
                            pSampleBuffer,
                            pLockedSampleBuffer,
                            copySamplesCount);
                    pSampleBuffer += copySamplesCount;
                }
                pLockedSampleBuffer += copySamplesCount;
                lockedSampleBufferCount -= copySamplesCount;
                m_currentFrameIndex += samples2frames(copySamplesCount);
                numberOfFramesRemaining -= samples2frames(copySamplesCount);
            }
            // Buffer the remaining samples
            SampleBuffer::WritableChunk writableChunk(
                    m_sampleBuffer.writeToTail(lockedSampleBufferCount));
            // The required capacity has been calculated in advance (see above)
            DEBUG_ASSERT(writableChunk.size() == lockedSampleBufferCount);
            SampleUtil::copy(
                    writableChunk.data(),
                    pLockedSampleBuffer,
                    writableChunk.size());
            HRESULT hrUnlock = pMediaBuffer->Unlock();
            VERIFY_OR_DEBUG_ASSERT(SUCCEEDED(hrUnlock)) {
                qWarning() << kLogPreamble
                        << "IMFMediaBuffer::Unlock() failed"
                        << hrUnlock;
                // ignore and continue
            }
            safeRelease(&pMediaBuffer);
            ++dwSampleBufferIndex;
        }
        safeRelease(&pSample);
        if (dwSampleBufferIndex < dwSampleBufferCount) {
            // Failed to read data from all buffers -> kill the reader
            qWarning() << kLogPreamble
                    << "Failed to read all buffered samples"
                    << "-> abort and stop decoding";
            safeRelease(&m_pSourceReader);
            break; // abort
        }
    }

    return numberOfFrames - numberOfFramesRemaining;
}
Пример #10
0
void ChangePartManager::setWeaponPos(WeaponPosition pos)
{
	SPersonMainPartChangeContext &  context = m_CurrentPart[EEntityPart_Weapon];
	IEntityClient* pEntityClient = gGlobalClient->getEntityClient();
	if (NULL == pEntityClient)
	{
		return;
	}
	ISchemeCenter* pSchemeCenter = pEntityClient->GetSchemeCenter();
	if (!pSchemeCenter)
	{
		return;
	}
	m_weaponBindPos = pos;
	if( !context.perform)//没有装备武器
	{
		if( m_weaponBindPos == WeaponPos_Back)
		{
			//对于卷轴类武器,特殊处理
			if( context.weaponsubclass == EQUIPMENT_SUBCLASS_SORCERER_WEAPON_1_CLASS)
			{
				for( uint i=0; i<2;++i)
				{
					if( context.bindPoint[i] == EBindPoint_LeftHand)
						context.bindPoint[i] = EBindPoint_RightBack ;
					else if( context.bindPoint[i] == EBindPoint_RightHand)
						context.bindPoint[i] = EBindPoint_LeftBack;
				}
			}
			else
			{
				for( uint i=0; i<2;++i)
				{
					if( context.bindPoint[i] == EBindPoint_LeftHand)
						context.bindPoint[i] = EBindPoint_LeftBack;
					else if( context.bindPoint[i] == EBindPoint_RightHand)
						context.bindPoint[i] = EBindPoint_RightBack;
				}
			}

		}
		else
		{
			//对于卷轴类武器,特殊处理
			if( context.weaponsubclass == EQUIPMENT_SUBCLASS_SORCERER_WEAPON_1_CLASS)
			{
				for( uint i =0; i<2; ++i)
				{
					if( context.bindPoint[i] == EBindPoint_LeftBack)
						context.bindPoint[i] = EBindPoint_RightHand ;
					else if( context.bindPoint[i] == EBindPoint_RightBack)
						context.bindPoint[i] = EBindPoint_LeftHand;
				}
			}
			else
			{
				for( uint i =0; i<2; ++i)
				{
					if( context.bindPoint[i] == EBindPoint_LeftBack)
						context.bindPoint[i] = EBindPoint_LeftHand;
					else if( context.bindPoint[i] == EBindPoint_RightBack)
						context.bindPoint[i] = EBindPoint_RightHand;
				}
			}		
		}
		return;
	}
	else
	{
		ModelNode* pNode[2];
		memset( &pNode[0], 0, sizeof(pNode));
		if(m_pModelNode)//移除
		{		
			for( uint i=0; i<2;++i)
			{
				const char* pchar = pSchemeCenter->getBindPoint(context.bindPoint[i]);
				pNode[i] = m_pModelNode->getFirstChildNodeByBone(pchar);
				if( pNode[i])
				{
					m_pModelNode->removeChild(pNode[i]);
					pNode[i]->setPosition(xs::Vector3::ZERO);
					pNode[i]->setOrientation(xs::Quaternion::IDENTITY);
				}
			}
		}

		//绑定
		if( m_weaponBindPos == WeaponPos_Back)
		{
			for( uint i=0; i<2;++i)
			{
				if( context.bindPoint[i] == EBindPoint_LeftHand)
					context.bindPoint[i] = EBindPoint_RightBack;
				else if( context.bindPoint[i] == EBindPoint_RightHand)
					context.bindPoint[i] = EBindPoint_LeftBack;
			}

			////对于卷轴类武器,特殊处理
			//if( context.weaponsubclass == EQUIPMENT_SUBCLASS_SORCERER_WEAPON_1_CLASS)
			//{
			//	for( uint i=0; i<2;++i)
			//	{
			//		if( context.bindPoint[i] == EBindPoint_LeftHand)
			//			context.bindPoint[i] = EBindPoint_RightBack ;
			//		else if( context.bindPoint[i] == EBindPoint_RightHand)
			//			context.bindPoint[i] = EBindPoint_LeftBack;
			//	}
			//}
			//else
			//{
			//	for( uint i=0; i<2;++i)
			//	{
			//		if( context.bindPoint[i] == EBindPoint_LeftHand)
			//			context.bindPoint[i] = EBindPoint_LeftBack;
			//		else if( context.bindPoint[i] == EBindPoint_RightHand)
			//			context.bindPoint[i] = EBindPoint_RightBack;
			//	}
			//}		
		}
		else
		{
			for( uint i =0; i<2; ++i)
			{
				if( context.bindPoint[i] == EBindPoint_LeftBack)
					context.bindPoint[i] = EBindPoint_RightHand;
				else if( context.bindPoint[i] == EBindPoint_RightBack)
					context.bindPoint[i] = EBindPoint_LeftHand;
			}
			//对于卷轴类武器,特殊处理
			//if( context.weaponsubclass == EQUIPMENT_SUBCLASS_SORCERER_WEAPON_1_CLASS)
			//{
			//	for( uint i =0; i<2; ++i)
			//	{
			//		if( context.bindPoint[i] == EBindPoint_LeftBack)
			//			context.bindPoint[i] = EBindPoint_RightHand;
			//		else if( context.bindPoint[i] == EBindPoint_RightBack)
			//			context.bindPoint[i] =  EBindPoint_LeftHand;
			//	}
			//}
			//else
			//{
			//	for( uint i =0; i<2; ++i)
			//	{
			//		if( context.bindPoint[i] == EBindPoint_LeftBack)
			//			context.bindPoint[i] = EBindPoint_LeftHand;
			//		else if( context.bindPoint[i] == EBindPoint_RightBack)
			//			context.bindPoint[i] = EBindPoint_RightHand;
			//	}
			//}	
		}

		if(m_pModelNode)
		{
			for(uint i=0; i<2;++i)
			{
				if( !pNode[i]) continue;
				bool ret = m_pModelNode->addChild(pNode[i], gGlobalClient->getEntityClient()->GetSchemeCenter()->getBindPoint(context.bindPoint[i]));
				if(!ret)
				{
					safeRelease(pNode[i]);
				}
				else
				{
					if( m_weaponBindPos == WeaponPos_Back)
					{
						//设置武器朝向
						if(context.weaponsubclass == EQUIPMENT_SUBCLASS_WARRIOR_WEAPON_2_CLASS)
						{
							xs::Quaternion rotx;
							rotx.FromAngleAxis(180.0f, Vector3(1.0f, 0.0f, 0.0f));
							xs::Quaternion roty;
							roty.FromAngleAxis(180.0f, Vector3(0.0f,1.0f,0.0f));
							xs::Quaternion rot = roty * rotx;
							pNode[i]->setOrientation(rot);
							xs::Vector3 origin = xs::Vector3::ZERO;
							origin.y += pNode[i]->getModelInstance()->getBoundingSphere().getCenter().y;
							pNode[i]->setPosition(origin);
						}
						else if (context.weaponsubclass == EQUIPMENT_SUBCLASS_ARCHER_WEAPON_1_CLASS)
						{
							// 修改弓手武器的朝向,现在的弓手武器是按弓口朝Z轴方向,以前默认是朝X轴方向
							xs::Quaternion roty;
							roty.FromAngleAxis(90.0f, Vector3(0.0f,1.0f,0.0f));
							pNode[i]->setOrientation(roty);

							xs::Vector3 origin = xs::Vector3::ZERO;
							origin.y -= pNode[i]->getModelInstance()->getBoundingSphere().getCenter().y;
							pNode[i]->setPosition(origin);
						}
						else
						{
							xs::Vector3 origin = xs::Vector3::ZERO;
							origin.y -= pNode[i]->getModelInstance()->getBoundingSphere().getCenter().y;
							pNode[i]->setPosition(origin);
						}
						
					}
					else
					{
						// 修改弓手武器的朝向,现在的弓手武器是按弓口朝Z轴方向,以前默认是朝X轴方向
						if (context.weaponsubclass == EQUIPMENT_SUBCLASS_ARCHER_WEAPON_1_CLASS)
						{						
							xs::Quaternion roty;
							roty.FromAngleAxis(90.0f, Vector3(0.0f,1.0f,0.0f));
							pNode[i]->setOrientation(roty);
						}
						else
						{
							pNode[i]->setOrientation(xs::Quaternion::IDENTITY);
						}
						pNode[i]->setPosition(xs::Vector3::ZERO);
					}
				}
			}	
		}
	}
}
Пример #11
0
	bool Font::create(FontManager* pFontMgr, 
		IRenderSystem* pRenderSystem,
		const std::string & name, 
		const std::string & szFontPath,
		uint size,
		FontType fontType)
	{
		if( 0 == pFontMgr)
			return false;

		if(0 == pRenderSystem)
			return false;

		if( name.empty() || szFontPath.empty())
			return false;


		//初始化成员
		m_pRenderSystem = pRenderSystem;
		m_ui32Size = size;
		m_type = fontType;
		m_name = name;
		m_strFontName = szFontPath;
		m_pFontManager = pFontMgr;

		//创建字体
		xs::autostring wszFontPath(szFontPath.c_str());
		switch(fontType)
		{
		case FontType_GDI:
			{
				/*
				GDITextureFont* pGDIFont = new GDITextureFont();
				if( !pGDIFont->create(pRenderSystem, wszFontPath.c_wstr(), size) )
				{
					safeRelease(pGDIFont);
					return false;
				}
				else
				{
					m_pFTFont = pGDIFont;
				}
				*/
				return 0;
			}
			break;
		case FontType_PIX:
			{
				PixTextureFont* pPixFont = new PixTextureFont();
				if( !pPixFont->create(pRenderSystem, wszFontPath.c_wstr(), size) )
				{
					safeRelease(pPixFont);
					return false;
				}
				else
				{
					m_pFTFont = pPixFont;
				}
			}
			break;
		default:
			return false;
		}
		m_pFontManager->registerFont(this);
		
		return true;
	}
Пример #12
0
void KLController::run()
{
    isStop = false;
    while(!isStop) {

        if(!isAvailable()) {
            break; //Not continue, otherwise it will still use cpu
        }

        HRESULT hr;
        if(sourceMarker == SOURCE_TYPE::S_MULTI) {
            continue;
        }

        /* color source */
        if(sourceMarker & SOURCE_TYPE::S_COLOR) {
            if(!colorReader) {
                hr = sensor->get_ColorFrameSource(&colorSource);
                if(SUCCEEDED(hr)) {
                    safeRelease(colorReader);
                    hr = colorSource->OpenReader(&colorReader);
                } else {
                    emit _hrError(hr);
                    colorReader = NULL;
                }
                if(SUCCEEDED(hr)) {
                    safeRelease(colorDesc);
                    hr = colorSource->CreateFrameDescription(ColorImageFormat::ColorImageFormat_Rgba, &colorDesc);
                } else {
                    emit _hrError(hr);
                    colorReader = NULL;
                }
                if(SUCCEEDED(hr)) {
                    //
                    colorDesc->get_BytesPerPixel(&colorBytesPerPixel);
                    colorDesc->get_Height(&colorHeight);
                    colorDesc->get_Width(&colorWidth);
                    colorBuffer = new QVector<BYTE>(colorHeight * colorWidth * colorBytesPerPixel);

                    emit _readerInfo(true, SOURCE_TYPE::S_COLOR);
                } else {
                    emit _hrError(hr);
                    colorReader = NULL;
                }
            }
            if(colorReader) {
                KLComPtr<IColorFrame> colorFrame;
                hr = colorReader->AcquireLatestFrame(&colorFrame);
                if(SUCCEEDED(hr)) {
                    colorFrame->CopyConvertedFrameDataToArray(colorBuffer->size(), (colorBuffer->data()), ColorImageFormat::ColorImageFormat_Rgba);
                    emit _data(colorBuffer, RESOURCE_TYPE::R_COLOR);
                } else {
                    emit _hrError(hr);
                }
            }
        } else if(colorReader) {
            safeRelease(colorSource);
            safeRelease(colorReader);
            safeRelease(colorDesc);
            qDebug()<<"dfd";
            emit _readerInfo(false, SOURCE_TYPE::S_COLOR);
        }

        /* body source */
        if(sourceMarker & SOURCE_TYPE::S_BODY) {
            if(!bodyReader) {
                KLComPtr<IBodyFrameSource> bodySource;
                hr = sensor->get_BodyFrameSource(&bodySource);
                if(SUCCEEDED(hr)) {
                    safeRelease(bodyReader);
                    hr = bodySource->OpenReader(&bodyReader);
                } else {
                    emit _hrError(hr);
                    safeRelease(bodyReader);
                }

                if(SUCCEEDED(hr)) {
                    emit _readerInfo(true, SOURCE_TYPE::S_BODY);
                } else {
                    emit _hrError(hr);
                    safeRelease(bodyReader);
                }
            }
            if(bodyReader) {
                KLComPtr<IBodyFrame> bodyFrame;
                hr = bodyReader->AcquireLatestFrame(&bodyFrame);
                if(SUCCEEDED(hr)) {
                    hr = bodyFrame->GetAndRefreshBodyData(_countof(bodies), bodies);
                } else {
                    emit _hrError(hr);
                }
            }
        } else if(bodyReader) {
            safeRelease(bodyReader);
            for(int i = 0; i < BODY_COUNT; i++) {
                safeRelease(bodies[i]);
            }
            emit _readerInfo(false, SOURCE_TYPE::S_BODY);
        }

        /* face source */
        if(sourceMarker & SOURCE_TYPE::S_FACE_HD) {
            for(int i = 0; i < BODY_COUNT; i++) {
                faceData[i]->reset();
            }
            if(!faceHDReaders[0]) {
                bool hasError = false;
                for(int i = 0; i < BODY_COUNT; i++) {
                    hr = CreateHighDefinitionFaceFrameSource(sensor, &faceHDSources[i]);
                    if(SUCCEEDED(hr)) {
                        safeRelease(faceHDReaders[i]);
                        hr = faceHDSources[i]->OpenReader(&faceHDReaders[i]);
                    } else {
                        hasError = true;
                        break;
                    }
                    if(SUCCEEDED(hr)) {
                        continue;
                        //emit _readerInfo(true, SOURCE_TYPE::S_FACE_HD);
                    } else {
                        hasError = true;
                        break;
                    }
                }
                if(hasError) {
                    for(int i = 0; i < BODY_COUNT; i++) {
                        safeRelease(faceHDReaders[i]);
                        safeRelease(faceHDSources[i]);
                    }
                    emit _hrError(hr);
                } else {
                    emit _readerInfo(true, SOURCE_TYPE::S_FACE_HD);
                }
            }

            if(faceHDReaders[0]) {

                bool hasValidFaceTrack = false;

                for(int i = 0; i < BODY_COUNT; i++) {
                    BOOLEAN isFaceTracked = false;

                    //faceData[i]->frameHD = NULL;
                    hr = faceHDReaders[i]->AcquireLatestFrame(&faceHDFrames[i]);

                    if(SUCCEEDED(hr)) {
                        //qDebug()<<"huuray!!";
                        hr = faceHDFrames[i]->get_IsTrackingIdValid(&isFaceTracked);
                    }
                    if(SUCCEEDED(hr)) {
                        faceData[i]->frameHD = faceHDFrames[i];
                        faceData[i]->sourceHD = faceHDSources[i];
                        faceData[i]->readerHD = faceHDReaders[i];
                        faceData[i]->index = i;
                    }

                    if(!isFaceTracked) {
                        safeRelease(faceHDFrames[i]);
                        if(bodyReader) {
                            if(bodies[i] != NULL) {
                                BOOLEAN isBodyTracked = false;
                                hr = bodies[i]->get_IsTracked(&isBodyTracked);
                                if(SUCCEEDED(hr)) {
                                    if(isBodyTracked) {
                                        UINT64 bodyID;
                                        hr = bodies[i]->get_TrackingId(&bodyID);
                                        if(SUCCEEDED(hr)) {
                                            faceHDSources[i]->put_TrackingId(bodyID);
                                            faceData[i]->trackID = bodyID;
                                        }
                                    }
                                }
                            }
                        } else {
                            // open body source
                            sourceMarker |= SOURCE_TYPE::S_BODY;
                        }
                    } else {
                        hasValidFaceTrack = true;
                        faceData[i]->isValid = true;
                    }
                }

                if(hasValidFaceTrack) {
                    //qDebug()<<"p1";
                    emit _data(faceData, RESOURCE_TYPE::R_FACE_HD);
                }
            }
        } else if(faceHDReaders[0]) {
            for(int i = 0; i < BODY_COUNT; i++) {
                safeRelease(faceHDReaders[i]);
                safeRelease(faceHDFrames[i]);
                safeRelease(faceHDSources[i]);
                faceData[i]->reset();
            }
            emit _readerInfo(false, SOURCE_TYPE::S_FACE_HD);
        }

        if(sourceMarker == SOURCE_TYPE::S_NONE) {
            break;
        }

        usleep(floor((1.0 / fps) * 1000 * 1000));

        emit _pulse();
    }
}
Пример #13
0
bool LightShader::initializeShader(ID3D11Device * device, HWND hwnd, const WCHAR * vsFilename, const WCHAR * psFilename) {

	ID3D10Blob* errorBlob;
	ID3D10Blob* vertexShaderBlob;
	ID3D10Blob* pixelShaderBlob;


	UINT flags = D3DCOMPILE_ENABLE_STRICTNESS;
#if _DEBUG
	flags |= D3DCOMPILE_DEBUG;
#endif

	// Compile the vertex shader code.
	if (FAILED(D3DCompileFromFile(vsFilename, NULL, D3D_COMPILE_STANDARD_FILE_INCLUDE,
		"DiffuseLightVertexShader", Globals::VERTEX_SHADER_VERSION,
		flags, 0, &vertexShaderBlob, &errorBlob))) {
		if (errorBlob) {
			outputShaderErrorMessage(errorBlob, hwnd, vsFilename);
		} else {
			MessageBox(hwnd, vsFilename, L"Missing Vertex Shader File", MB_OK);
		}

		return false;
	}

	if (FAILED(D3DCompileFromFile(psFilename, NULL, D3D_COMPILE_STANDARD_FILE_INCLUDE,
		"DiffuseLightPixelShader", Globals::PIXEL_SHADER_VERSION,
		flags, 0, &pixelShaderBlob, &errorBlob))) {
		if (errorBlob) {
			outputShaderErrorMessage(errorBlob, hwnd, psFilename);
		} else {
			MessageBox(hwnd, psFilename, L"Missing Pixel Shader File", MB_OK);
		}

		return false;
	}

	// Create the vertex shader from the buffer.
	if (FAILED(device->CreateVertexShader(vertexShaderBlob->GetBufferPointer(),
		vertexShaderBlob->GetBufferSize(), NULL, &vertexShader))) {
		MessageBox(NULL, L"Error creating Vertex Shader", L"ERROR", MB_OK);
		return false;
	}

	// Create the pixel shader from the buffer.
	if (FAILED(device->CreatePixelShader(pixelShaderBlob->GetBufferPointer(),
		pixelShaderBlob->GetBufferSize(), NULL, &pixelShader))) {
		MessageBox(NULL, L"Error creating Pixel Shader", L"ERROR", MB_OK);
		return false;
	}


	if (FAILED(initInputLayout(device, vertexShaderBlob))) {
		MessageBox(NULL, L"Error creating Input Layout Buffer", L"ERROR", MB_OK);
		return false;
	}

	safeRelease(vertexShaderBlob);
	safeRelease(pixelShaderBlob);


	if (FAILED(initMatrixBuffer(device))) {
		MessageBox(NULL, L"Error creating Constant (Matrix) Buffer", L"ERROR", MB_OK);
		return false;
	}

	if (FAILED(initSamplerState(device))) {
		MessageBox(NULL, L"Error creating Sampler Shader", L"ERROR", MB_OK);
		return false;
	}

	if (FAILED(initLightBuffer(device))) {
		return false;
	}

	return true;
}
/** Cobbled together from:
    http://msdn.microsoft.com/en-us/library/dd757929(v=vs.85).aspx
    and http://msdn.microsoft.com/en-us/library/dd317928(VS.85).aspx
    -- Albert
    If anything in here fails, just bail. I'm not going to decode HRESULTS.
    -- Bill
    */
bool SoundSourceMediaFoundation::configureAudioStream()
{
    HRESULT hr(S_OK);

    // deselect all streams, we only want the first
    hr = m_pReader->SetStreamSelection(MF_SOURCE_READER_ALL_STREAMS, false);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to deselect all streams";
        return false;
    }

    hr = m_pReader->SetStreamSelection(MF_SOURCE_READER_FIRST_AUDIO_STREAM, true);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to select first audio stream";
        return false;
    }

    hr = MFCreateMediaType(&m_pAudioType);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to create media type";
        return false;
    }

    hr = m_pAudioType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set major type";
        return false;
    }

    hr = m_pAudioType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set subtype";
        return false;
    }

    hr = m_pAudioType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, true);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set samples independent";
        return false;
    }

    hr = m_pAudioType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, true);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set fixed size samples";
        return false;
    }

    hr = m_pAudioType->SetUINT32(MF_MT_SAMPLE_SIZE, kLeftoverSize);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set sample size";
        return false;
    }

    // MSDN for this attribute says that if bps is 8, samples are unsigned.
    // Otherwise, they're signed (so they're signed for us as 16 bps). Why
    // chose to hide this rather useful tidbit here is beyond me -bkgood
    hr = m_pAudioType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, kBitsPerSample);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set bits per sample";
        return false;
    }

    hr = m_pAudioType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT,
        kNumChannels * (kBitsPerSample / 8));
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set block alignment";
        return false;
    }

    hr = m_pAudioType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, kNumChannels);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set number of channels";
        return false;
    }

    hr = m_pAudioType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, kSampleRate);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set sample rate";
        return false;
    }

    // Set this type on the source reader. The source reader will
    // load the necessary decoder.
    hr = m_pReader->SetCurrentMediaType(
        MF_SOURCE_READER_FIRST_AUDIO_STREAM,
        NULL, m_pAudioType);

    // the reader has the media type now, free our reference so we can use our
    // pointer for other purposes. Do this before checking for failure so we
    // don't dangle.
    safeRelease(&m_pAudioType);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set media type";
        return false;
    }

    // Get the complete uncompressed format.
    hr = m_pReader->GetCurrentMediaType(
        MF_SOURCE_READER_FIRST_AUDIO_STREAM,
        &m_pAudioType);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to retrieve completed media type";
        return false;
    }

    // Ensure the stream is selected.
    hr = m_pReader->SetStreamSelection(
        MF_SOURCE_READER_FIRST_AUDIO_STREAM,
        true);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to select first audio stream (again)";
        return false;
    }

    // this may not be safe on all platforms as m_leftoverBufferSize is a
    // size_t and this function is writing a uint32. However, on 32-bit
    // Windows 7, size_t is defined as uint which is 32-bits, so we're safe
    // for all supported platforms -bkgood
    UINT32 leftoverBufferSize = 0;
    hr = m_pAudioType->GetUINT32(MF_MT_SAMPLE_SIZE, &leftoverBufferSize);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to get buffer size";
        return false;
    }
    m_leftoverBufferSize = static_cast<size_t>(leftoverBufferSize);
    m_leftoverBufferSize /= 2; // convert size in bytes to size in int16s
    m_leftoverBuffer = new qint16[m_leftoverBufferSize];

    return true;
}
Пример #15
0
/** Cobbled together from:
 http://msdn.microsoft.com/en-us/library/dd757929(v=vs.85).aspx
 and http://msdn.microsoft.com/en-us/library/dd317928(VS.85).aspx
 -- Albert
 If anything in here fails, just bail. I'm not going to decode HRESULTS.
 -- Bill
 */
bool SoundSourceMediaFoundation::configureAudioStream(const Mixxx::AudioSourceConfig& audioSrcCfg) {
    HRESULT hr(S_OK);

    // deselect all streams, we only want the first
    hr = m_pReader->SetStreamSelection(MF_SOURCE_READER_ALL_STREAMS, false);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to deselect all streams";
        return false;
    }

    hr = m_pReader->SetStreamSelection(MF_SOURCE_READER_FIRST_AUDIO_STREAM,
            true);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to select first audio stream";
        return false;
    }

    hr = MFCreateMediaType(&m_pAudioType);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to create media type";
        return false;
    }

    hr = m_pAudioType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set major type";
        return false;
    }

    hr = m_pAudioType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_Float);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set subtype";
        return false;
    }

    hr = m_pAudioType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, true);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set samples independent";
        return false;
    }

    hr = m_pAudioType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, true);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set fixed size samples";
        return false;
    }

    hr = m_pAudioType->SetUINT32(MF_MT_SAMPLE_SIZE, kLeftoverSize);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set sample size";
        return false;
    }

    hr = m_pAudioType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, kSampleRate);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set sample rate";
        return false;
    }

    // "Number of bits per audio sample in an audio media type."
    hr = m_pAudioType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, kBitsPerSample);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set bits per sample";
        return false;
    }

    if (isValidChannelCount(audioSrcCfg.channelCountHint)) {
        hr = m_pAudioType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, audioSrcCfg.channelCountHint);
        if (FAILED(hr)) {
            qWarning() << "SSMF: failed to set number of channels";
            return false;
        }
        setChannelCount(audioSrcCfg.channelCountHint);
    } else {
        UINT32 numChannels = 0;
        hr = m_pAudioType->GetUINT32(MF_MT_AUDIO_NUM_CHANNELS, &numChannels);
        if (FAILED(hr) || (0 >= numChannels)) {
            qWarning() << "SSMF: failed to get number of channels";
            return false;
        }
        setChannelCount(numChannels);
    }

    // "...the block alignment is equal to the number of audio channels
    // multiplied by the number of bytes per audio sample."
    hr = m_pAudioType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT,
            frames2samples(sizeof(m_leftoverBuffer[0])));
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set block alignment";
        return false;
    }

    // Set this type on the source reader. The source reader will
    // load the necessary decoder.
    hr = m_pReader->SetCurrentMediaType(MF_SOURCE_READER_FIRST_AUDIO_STREAM,
    NULL, m_pAudioType);

    // the reader has the media type now, free our reference so we can use our
    // pointer for other purposes. Do this before checking for failure so we
    // don't dangle.
    safeRelease(&m_pAudioType);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to set media type";
        return false;
    }

    // Get the complete uncompressed format.
    hr = m_pReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_AUDIO_STREAM,
            &m_pAudioType);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to retrieve completed media type";
        return false;
    }

    // Ensure the stream is selected.
    hr = m_pReader->SetStreamSelection(MF_SOURCE_READER_FIRST_AUDIO_STREAM,
            true);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to select first audio stream (again)";
        return false;
    }

    UINT32 leftoverBufferSize = 0;
    hr = m_pAudioType->GetUINT32(MF_MT_SAMPLE_SIZE, &leftoverBufferSize);
    if (FAILED(hr)) {
        qWarning() << "SSMF: failed to get buffer size";
        return false;
    }
    m_leftoverBufferSize = leftoverBufferSize;
    m_leftoverBufferSize /= sizeof(CSAMPLE); // convert size in bytes to sizeof(CSAMPLE)
    m_leftoverBuffer = new CSAMPLE[m_leftoverBufferSize];

    return true;
}
Пример #16
0
 Gem::~Gem()
 {
     safeRelease(root);
 }
int AudioDecoderMediaFoundation::read(int size, const SAMPLE *destination)
{
	assert(size < sizeof(m_destBufferShort));
    if (sDebug) { std::cout << "read() " << size << std::endl; }
	//TODO: Change this up if we want to support just short samples again -- Albert
    SHORT_SAMPLE *destBuffer = m_destBufferShort;
	size_t framesRequested(size / m_iChannels);
    size_t framesNeeded(framesRequested);

    // first, copy frames from leftover buffer IF the leftover buffer is at
    // the correct frame
    if (m_leftoverBufferLength > 0 && m_leftoverBufferPosition == m_nextFrame) {
        copyFrames(destBuffer, &framesNeeded, m_leftoverBuffer,
            m_leftoverBufferLength);
        if (m_leftoverBufferLength > 0) {
            if (framesNeeded != 0) {
                std::cerr << __FILE__ << __LINE__
                           << "WARNING: Expected frames needed to be 0. Abandoning this file.";
                m_dead = true;
            }
            m_leftoverBufferPosition += framesRequested;
        }
    } else {
        // leftoverBuffer already empty or in the wrong position, clear it
        m_leftoverBufferLength = 0;
    }

    while (!m_dead && framesNeeded > 0) {
        HRESULT hr(S_OK);
        DWORD dwFlags(0);
        __int64 timestamp(0);
        IMFSample *pSample(NULL);
        bool error(false); // set to true to break after releasing

        hr = m_pReader->ReadSample(
            MF_SOURCE_READER_FIRST_AUDIO_STREAM, // [in] DWORD dwStreamIndex,
            0,                                   // [in] DWORD dwControlFlags,
            NULL,                                // [out] DWORD *pdwActualStreamIndex,
            &dwFlags,                            // [out] DWORD *pdwStreamFlags,
            &timestamp,                          // [out] LONGLONG *pllTimestamp,
            &pSample);                           // [out] IMFSample **ppSample
        if (FAILED(hr)) {
            if (sDebug) { std::cout << "ReadSample failed." << std::endl; }
            break;
        }

        if (sDebug) {
            std::cout << "ReadSample timestamp: " << timestamp
                     << "frame: " << frameFromMF(timestamp)
                     << "dwflags: " << dwFlags
					 << std::endl;
        }

        if (dwFlags & MF_SOURCE_READERF_ERROR) {
            // our source reader is now dead, according to the docs
            std::cerr << "SSMF: ReadSample set ERROR, SourceReader is now dead";
            m_dead = true;
            break;
        } else if (dwFlags & MF_SOURCE_READERF_ENDOFSTREAM) {
            std::cout << "SSMF: End of input file." << std::endl;
            break;
        } else if (dwFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED) {
            std::cerr << "SSMF: Type change";
            break;
        } else if (pSample == NULL) {
            // generally this will happen when dwFlags contains ENDOFSTREAM,
            // so it'll be caught before now -bkgood
            std::cerr << "SSMF: No sample";
            continue;
        } // we now own a ref to the instance at pSample

        IMFMediaBuffer *pMBuffer(NULL);
        // I know this does at least a memcopy and maybe a malloc, if we have
        // xrun issues with this we might want to look into using
        // IMFSample::GetBufferByIndex (although MS doesn't recommend this)
        if (FAILED(hr = pSample->ConvertToContiguousBuffer(&pMBuffer))) {
            error = true;
            goto releaseSample;
        }
        short *buffer(NULL);
        size_t bufferLength(0);
        hr = pMBuffer->Lock(reinterpret_cast<unsigned __int8**>(&buffer), NULL,
            reinterpret_cast<DWORD*>(&bufferLength));
        if (FAILED(hr)) {
            error = true;
            goto releaseMBuffer;
        }
        bufferLength /= (m_iBitsPerSample / 8 * m_iChannels); // now in frames

        if (m_seeking) {
            __int64 bufferPosition(frameFromMF(timestamp));
            if (sDebug) {
                std::cout << "While seeking to "
                         << m_nextFrame << "WMF put us at " << bufferPosition
						 << std::endl;

            }
            if (m_nextFrame < bufferPosition) {
                // Uh oh. We are farther forward than our seek target. Emit
                // silence? We can't seek backwards here.
                SHORT_SAMPLE* pBufferCurpos = destBuffer +
                        (size - framesNeeded * m_iChannels);
                __int64 offshootFrames = bufferPosition - m_nextFrame;

                // If we can correct this immediately, write zeros and adjust
                // m_nextFrame to pretend it never happened.

                if (offshootFrames <= framesNeeded) {
                    std::cerr << __FILE__ << __LINE__
                               << "Working around inaccurate seeking. Writing silence for"
                               << offshootFrames << "frames";
                    // Set offshootFrames * m_iChannels samples to zero.
                    memset(pBufferCurpos, 0,
                           sizeof(*pBufferCurpos) * offshootFrames *
                           m_iChannels);
                    // Now m_nextFrame == bufferPosition
                    m_nextFrame += offshootFrames;
                    framesNeeded -= offshootFrames;
                } else {
                    // It's more complicated. The buffer we have just decoded is
                    // more than framesNeeded frames away from us. It's too hard
                    // for us to handle this correctly currently, so let's just
                    // try to get on with our lives.
                    m_seeking = false;
                    m_nextFrame = bufferPosition;
                    std::cerr << __FILE__ << __LINE__
                               << "Seek offshoot is too drastic. Cutting losses and pretending the current decoded audio buffer is the right seek point.";
                }
            }

            if (m_nextFrame >= bufferPosition &&
                m_nextFrame < bufferPosition + bufferLength) {
                // m_nextFrame is in this buffer.
                buffer += (m_nextFrame - bufferPosition) * m_iChannels;
                bufferLength -= m_nextFrame - bufferPosition;
                m_seeking = false;
            } else {
                // we need to keep going forward
                goto releaseRawBuffer;
            }
        }

        // If the bufferLength is larger than the leftover buffer, re-allocate
        // it with 2x the space.
        if (bufferLength * m_iChannels > m_leftoverBufferSize) {
            int newSize = m_leftoverBufferSize;

            while (newSize < bufferLength * m_iChannels) {
                newSize *= 2;
            }
            SHORT_SAMPLE* newBuffer = new SHORT_SAMPLE[newSize];
            memcpy(newBuffer, m_leftoverBuffer,
                   sizeof(m_leftoverBuffer[0]) * m_leftoverBufferSize);
            delete [] m_leftoverBuffer;
            m_leftoverBuffer = newBuffer;
            m_leftoverBufferSize = newSize;
        }
        copyFrames(destBuffer + (size - framesNeeded * m_iChannels),
            &framesNeeded, buffer, bufferLength);

releaseRawBuffer:
        hr = pMBuffer->Unlock();
        // I'm ignoring this, MSDN for IMFMediaBuffer::Unlock stipulates
        // nothing about the state of the instance if this fails so might as
        // well just let it be released.
        //if (FAILED(hr)) break;
releaseMBuffer:
        safeRelease(&pMBuffer);
releaseSample:
        safeRelease(&pSample);
        if (error) break;
    }

    m_nextFrame += framesRequested - framesNeeded;
    if (m_leftoverBufferLength > 0) {
        if (framesNeeded != 0) {
            std::cerr << __FILE__ << __LINE__
				<< "WARNING: Expected frames needed to be 0. Abandoning this file." << std::endl;
            m_dead = true;
        }
        m_leftoverBufferPosition = m_nextFrame;
    }
    long samples_read = size - framesNeeded * m_iChannels;
    m_iCurrentPosition += samples_read;
    if (sDebug) { std::cout << "read() " << size << " returning " << samples_read << std::endl; }
	
	const int sampleMax = 1 << (m_iBitsPerSample-1);
	//Convert to float samples
	if (m_iChannels == 2)
	{
		SAMPLE *destBufferFloat(const_cast<SAMPLE*>(destination));
		for (unsigned long i = 0; i < samples_read; i++)
		{
			destBufferFloat[i] = destBuffer[i] / (float)sampleMax;
		}
	}
	else //Assuming mono, duplicate into stereo frames...
	{
		SAMPLE *destBufferFloat(const_cast<SAMPLE*>(destination));
		for (unsigned long i = 0; i < samples_read; i++)
		{
			destBufferFloat[i] = destBuffer[i] / (float)sampleMax;
		}
	}
    return samples_read;
}
Пример #18
0
void TCPServer::deInitIOCP()
{
	safeRelease(_hIOCP);
}
Пример #19
0
/**
*@{ ******************** CResUpdater ********************
*/
CResUpdater::~CResUpdater()
{
    safeRelease(m_pResCrypto);
    Util::DestroyFileHead(m_pOldFileHead);
    DestroyPatchFileHead(m_pPatchFileHead);
}
Пример #20
0
	void ShaderProgramManagerD3D9::release()
	{
		safeRelease(m_pLowLevelShaderMgr);
		safeRelease(m_pHighLevelShaderMgr);
		delete this;
	}
Пример #21
0
void LightShader::release() {
	safeRelease(lightBuffer);
}
Пример #22
0
SINT SoundSourceMediaFoundation::readSampleFrames(
        SINT numberOfFrames, CSAMPLE* sampleBuffer) {
    if (sDebug) {
        qDebug() << "read()" << numberOfFrames;
    }
    SINT framesNeeded(numberOfFrames);

    // first, copy frames from leftover buffer IF the leftover buffer is at
    // the correct frame
    if (m_leftoverBufferLength > 0 && m_leftoverBufferPosition == m_nextFrame) {
        copyFrames(sampleBuffer, &framesNeeded, m_leftoverBuffer,
                m_leftoverBufferLength);
        if (m_leftoverBufferLength > 0) {
            if (framesNeeded != 0) {
                qWarning() << __FILE__ << __LINE__
                        << "WARNING: Expected frames needed to be 0. Abandoning this file.";
                m_dead = true;
            }
            m_leftoverBufferPosition += numberOfFrames;
        }
    } else {
        // leftoverBuffer already empty or in the wrong position, clear it
        m_leftoverBufferLength = 0;
    }

    while (!m_dead && framesNeeded > 0) {
        HRESULT hr(S_OK);
        DWORD dwFlags(0);
        qint64 timestamp(0);
        IMFSample *pSample(nullptr);
        bool error(false); // set to true to break after releasing

        hr = m_pReader->ReadSample(MF_SOURCE_READER_FIRST_AUDIO_STREAM, // [in] DWORD dwStreamIndex,
                0,                                 // [in] DWORD dwControlFlags,
                nullptr,                      // [out] DWORD *pdwActualStreamIndex,
                &dwFlags,                        // [out] DWORD *pdwStreamFlags,
                &timestamp,                     // [out] LONGLONG *pllTimestamp,
                &pSample);                         // [out] IMFSample **ppSample
        if (FAILED(hr)) {
            qWarning() << "ReadSample failed!";
            break; // abort
        }

        if (sDebug) {
            qDebug() << "ReadSample timestamp:" << timestamp << "frame:"
                    << frameFromMF(timestamp, getSamplingRate()) << "dwflags:" << dwFlags;
        }

        if (dwFlags & MF_SOURCE_READERF_ERROR) {
            // our source reader is now dead, according to the docs
            qWarning()
                    << "SSMF: ReadSample set ERROR, SourceReader is now dead";
            m_dead = true;
            break;
        } else if (dwFlags & MF_SOURCE_READERF_ENDOFSTREAM) {
            qDebug() << "SSMF: End of input file.";
            break;
        } else if (dwFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED) {
            qWarning() << "SSMF: Type change";
            break;
        } else if (pSample == nullptr) {
            // generally this will happen when dwFlags contains ENDOFSTREAM,
            // so it'll be caught before now -bkgood
            qWarning() << "SSMF: No sample";
            continue;
        } // we now own a ref to the instance at pSample

        IMFMediaBuffer *pMBuffer(nullptr);
        // I know this does at least a memcopy and maybe a malloc, if we have
        // xrun issues with this we might want to look into using
        // IMFSample::GetBufferByIndex (although MS doesn't recommend this)
        if (FAILED(hr = pSample->ConvertToContiguousBuffer(&pMBuffer))) {
            error = true;
            goto releaseSample;
        }
        CSAMPLE *buffer(nullptr);
        DWORD bufferLengthInBytes(0);
        hr = pMBuffer->Lock(reinterpret_cast<quint8**>(&buffer), nullptr, &bufferLengthInBytes);
        if (FAILED(hr)) {
            error = true;
            goto releaseMBuffer;
        }
        SINT bufferLength = samples2frames(bufferLengthInBytes / sizeof(buffer[0]));

        if (m_seeking) {
            qint64 bufferPosition(frameFromMF(timestamp, getSamplingRate()));
            if (sDebug) {
                qDebug() << "While seeking to " << m_nextFrame
                        << "WMF put us at" << bufferPosition;

            }
            if (m_nextFrame < bufferPosition) {
                // Uh oh. We are farther forward than our seek target. Emit
                // silence? We can't seek backwards here.
                CSAMPLE* pBufferCurpos = sampleBuffer
                        + frames2samples(numberOfFrames - framesNeeded);
                qint64 offshootFrames = bufferPosition - m_nextFrame;

                // If we can correct this immediately, write zeros and adjust
                // m_nextFrame to pretend it never happened.

                if (offshootFrames <= framesNeeded) {
                    qWarning() << __FILE__ << __LINE__
                            << "Working around inaccurate seeking. Writing silence for"
                            << offshootFrames << "frames";
                    // Set offshootFrames samples to zero.
                    memset(pBufferCurpos, 0,
                            sizeof(*pBufferCurpos) * frames2samples(offshootFrames));
                    // Now m_nextFrame == bufferPosition
                    m_nextFrame += offshootFrames;
                    framesNeeded -= offshootFrames;
                } else {
                    // It's more complicated. The buffer we have just decoded is
                    // more than framesNeeded frames away from us. It's too hard
                    // for us to handle this correctly currently, so let's just
                    // try to get on with our lives.
                    m_seeking = false;
                    m_nextFrame = bufferPosition;
                    qWarning() << __FILE__ << __LINE__
                            << "Seek offshoot is too drastic. Cutting losses and pretending the current decoded audio buffer is the right seek point.";
                }
            }

            if (m_nextFrame >= bufferPosition
                    && m_nextFrame < bufferPosition + bufferLength) {
                // m_nextFrame is in this buffer.
                buffer += frames2samples(m_nextFrame - bufferPosition);
                bufferLength -= m_nextFrame - bufferPosition;
                m_seeking = false;
            } else {
                // we need to keep going forward
                goto releaseRawBuffer;
            }
        }

        // If the bufferLength is larger than the leftover buffer, re-allocate
        // it with 2x the space.
        if (frames2samples(bufferLength) > m_leftoverBufferSize) {
            SINT newSize = m_leftoverBufferSize;

            while (newSize < frames2samples(bufferLength)) {
                newSize *= 2;
            }
            CSAMPLE* newBuffer = new CSAMPLE[newSize];
            memcpy(newBuffer, m_leftoverBuffer,
                    sizeof(m_leftoverBuffer[0]) * m_leftoverBufferSize);
            delete[] m_leftoverBuffer;
            m_leftoverBuffer = newBuffer;
            m_leftoverBufferSize = newSize;
        }
        copyFrames(
                sampleBuffer + frames2samples(numberOfFrames - framesNeeded),
                &framesNeeded,
                buffer, bufferLength);

        releaseRawBuffer: hr = pMBuffer->Unlock();
        // I'm ignoring this, MSDN for IMFMediaBuffer::Unlock stipulates
        // nothing about the state of the instance if this fails so might as
        // well just let it be released.
        //if (FAILED(hr)) break;
        releaseMBuffer: safeRelease(&pMBuffer);
        releaseSample: safeRelease(&pSample);
        if (error)
            break;
    }

    SINT framesRead = numberOfFrames - framesNeeded;
    m_iCurrentPosition += framesRead;
    m_nextFrame += framesRead;
    if (m_leftoverBufferLength > 0) {
        if (framesNeeded != 0) {
            qWarning() << __FILE__ << __LINE__
                    << "WARNING: Expected frames needed to be 0. Abandoning this file.";
            m_dead = true;
        }
        m_leftoverBufferPosition = m_nextFrame;
    }
    if (sDebug) {
        qDebug() << "read()" << numberOfFrames << "returning" << framesRead;
    }
    return framesRead;
}
Пример #23
0
void ChangePartManager::changeWeapon(SPersonMainPartChangeContext & context)
{
	SPersonMainPartChangeContext & curContext = m_CurrentPart[EEntityPart_Weapon];

	if( !m_bCanRequestRes)//不能申请资源了
	{
		curContext = context;
		return;
	}

	if(0 == m_pModelNode )//当前显示的节点还没有加载上来,只存储数据
	{
		curContext = context;
		return;
	}

	if(!gGlobalClient->getEntityClient())
		return;

	//首先卸载旧的模型
	for( uint i = 0; i < EBindResNum_MainPart; ++i)
	{
		const char * pBP = gGlobalClient->getEntityClient()->GetSchemeCenter()->getBindPoint(curContext.bindPoint[i] );
		ModelNode * pNode = m_pModelNode->getFirstChildNodeByBone(pBP);
		if( pNode ) m_pModelNode->destroyChild(pNode);	
	}

	//保存换装环境
	curContext = context;

	//加载或者卸载装备
	if( curContext.perform)//穿上装备
	{
		ModelNode * pNodeArr[EBindResNum_MainPart];
		memset( pNodeArr,0, sizeof(pNodeArr) );
		for( uint i =0; i < EBindResNum_MainPart; ++i)
		{
			ConfigCreature* pCreatureConfig = ConfigCreatures::Instance()->getCreature(curContext.resId[i]);
			if( pCreatureConfig)//首先从生物配置创建
			{
				pNodeArr[i] = ModelNodeCreater::create(pCreatureConfig);
				if( pNodeArr[i])
					pNodeArr[i]->setScale(xs::Vector3::UNIT_SCALE);
			}
			else//如果没有,就从资源配置创建
			{
				const std::string & filename = ConfigCreatureRes::Instance()->getResFromId(curContext.resId[i]);
				pNodeArr[i] = ModelNodeCreater::create(filename.c_str() );	
			}		
		}
		for( uint i=0; i<EBindResNum_MainPart; ++i)
		{
			if( pNodeArr[i] == 0 ) continue;
			const char * pBP = gGlobalClient->getEntityClient()->GetSchemeCenter()->getBindPoint(curContext.bindPoint[i]);
			bool ret = m_pModelNode->addChild(pNodeArr[i], pBP);
			if( !ret ) 
			{
				safeRelease(pNodeArr[i]);
			}
			else
			{
				//调整武器的位置
				setWeaponPos(m_weaponBindPos);

				//调整武器的大小
				pNodeArr[i]->setScale(m_fWeaponScale, m_fWeaponScale, m_fWeaponScale);

				//调整武器颜色
				pNodeArr[i]->setGlobalDiffuse(m_vWeaponColor);

			}
		}

		//显示飘带系统
		showRibbonSystem(m_bShowRibbonSystem);
	}
	else
	{
		//已经卸载了
	}

	return;
}
//=============================================================================
// Prepare the vertex buffers for drawing dialog background and buttons
//=============================================================================
void MessageDialog::prepareVerts()
{
    safeRelease(dialogVerts);
    safeRelease(borderVerts);
    safeRelease(buttonVerts);
    safeRelease(button2Verts);

    // border top left
    vtx[0].x = x;
    vtx[0].y = y;
    vtx[0].z = 0.0f;
    vtx[0].rhw = 1.0f;
    vtx[0].color = borderColor;

    // border top right
    vtx[1].x = x + width;
    vtx[1].y = y;
    vtx[1].z = 0.0f;
    vtx[1].rhw = 1.0f;
    vtx[1].color = borderColor;

    // border bottom right
    vtx[2].x = x + width;
    vtx[2].y = y + height;
    vtx[2].z = 0.0f;
    vtx[2].rhw = 1.0f;
    vtx[2].color = borderColor;

    // border bottom left
    vtx[3].x = x;
    vtx[3].y = y + height;
    vtx[3].z = 0.0f;
    vtx[3].rhw = 1.0f;
    vtx[3].color = borderColor;

    graphics->createVertexBuffer(vtx, sizeof vtx, borderVerts);

    // background top left
    vtx[0].x = x + messageDialogNS::BORDER;
    vtx[0].y = y + messageDialogNS::BORDER;
    vtx[0].z = 0.0f;
    vtx[0].rhw = 1.0f;
    vtx[0].color = backColor;

    // background top right
    vtx[1].x = x + width - messageDialogNS::BORDER;
    vtx[1].y = y + messageDialogNS::BORDER;
    vtx[1].z = 0.0f;
    vtx[1].rhw = 1.0f;
    vtx[1].color = backColor;

    // background bottom right
    vtx[2].x = x + width - messageDialogNS::BORDER;
    vtx[2].y = y + height - messageDialogNS::BORDER;
    vtx[2].z = 0.0f;
    vtx[2].rhw = 1.0f;
    vtx[2].color = backColor;

    // background bottom left
    vtx[3].x = x + messageDialogNS::BORDER;
    vtx[3].y = y + height - messageDialogNS::BORDER;
    vtx[3].z = 0.0f;
    vtx[3].rhw = 1.0f;
    vtx[3].color = backColor;

    graphics->createVertexBuffer(vtx, sizeof vtx, dialogVerts);

    // button top left
    vtx[0].x = x + width/2.0f - messageDialogNS::BUTTON_WIDTH/2.0f;
    vtx[0].y = y + height - messageDialogNS::BORDER - messageDialogNS::MARGIN - messageDialogNS::BUTTON_HEIGHT;
    vtx[0].z = 0.0f;
    vtx[0].rhw = 1.0f;
    vtx[0].color = buttonColor;

    // button top right
    vtx[1].x = x + width/2.0f + messageDialogNS::BUTTON_WIDTH/2.0f;
    vtx[1].y = vtx[0].y;
    vtx[1].z = 0.0f;
    vtx[1].rhw = 1.0f;
    vtx[1].color = buttonColor;

    // button bottom right
    vtx[2].x =  vtx[1].x;
    vtx[2].y = vtx[0].y + messageDialogNS::BUTTON_HEIGHT;
    vtx[2].z = 0.0f;
    vtx[2].rhw = 1.0f;
    vtx[2].color = buttonColor;

    // button bottom left
    vtx[3].x = vtx[0].x;
    vtx[3].y = vtx[2].y;
    vtx[3].z = 0.0f;
    vtx[3].rhw = 1.0f;
    vtx[3].color = buttonColor;

    graphics->createVertexBuffer(vtx, sizeof vtx, buttonVerts);

    // set buttonRect
    buttonRect.left   = (long)vtx[0].x;
    buttonRect.right  = (long)vtx[1].x;
    buttonRect.top    = (long)vtx[0].y;
    buttonRect.bottom = (long)vtx[2].y;

    // button2 top left
    vtx[0].x = x + width - messageDialogNS::BUTTON_WIDTH*1.2f;
    vtx[0].y = y + height - messageDialogNS::BORDER - messageDialogNS::MARGIN - messageDialogNS::BUTTON_HEIGHT;
    vtx[0].z = 0.0f;
    vtx[0].rhw = 1.0f;
    vtx[0].color = buttonColor;
    // button2 top right
    vtx[1].x = vtx[0].x + messageDialogNS::BUTTON_WIDTH;
    vtx[1].y = vtx[0].y;
    vtx[1].z = 0.0f;
    vtx[1].rhw = 1.0f;
    vtx[1].color = buttonColor;
    // button2 bottom right
    vtx[2].x =  vtx[1].x;
    vtx[2].y = vtx[0].y + messageDialogNS::BUTTON_HEIGHT;
    vtx[2].z = 0.0f;
    vtx[2].rhw = 1.0f;
    vtx[2].color = buttonColor;
    // button2 bottom left
    vtx[3].x = vtx[0].x;
    vtx[3].y = vtx[2].y;
    vtx[3].z = 0.0f;
    vtx[3].rhw = 1.0f;
    vtx[3].color = buttonColor;
    graphics->createVertexBuffer(vtx, sizeof vtx, button2Verts);

    // set button2Rect
    button2Rect.left   = (long)vtx[0].x;
    button2Rect.right  = (long)vtx[1].x;
    button2Rect.top    = (long)vtx[0].y;
    button2Rect.bottom = (long)vtx[2].y;
}
Пример #25
0
// 릴리즈 메서드
void Graphics::releaseAll()
{
    safeRelease(device3d);
    safeRelease(direct3d);
}
/** Cobbled together from:
    http://msdn.microsoft.com/en-us/library/dd757929(v=vs.85).aspx
    and http://msdn.microsoft.com/en-us/library/dd317928(VS.85).aspx
    -- Albert
    If anything in here fails, just bail. I'm not going to decode HRESULTS.
    -- Bill
    */
bool AudioDecoderMediaFoundation::configureAudioStream()
{
    HRESULT hr(S_OK);

    // deselect all streams, we only want the first
    hr = m_pReader->SetStreamSelection(MF_SOURCE_READER_ALL_STREAMS, false);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to deselect all streams";
        return false;
    }

    hr = m_pReader->SetStreamSelection(MF_SOURCE_READER_FIRST_AUDIO_STREAM, true);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to select first audio stream";
        return false;
    }

//Debugging:
//Let's get some info
	// Get the complete uncompressed format.
    //hr = m_pReader->GetCurrentMediaType(
    //    MF_SOURCE_READER_FIRST_AUDIO_STREAM,
    //   &m_pAudioType);
	hr = m_pReader->GetNativeMediaType(MF_SOURCE_READER_FIRST_AUDIO_STREAM,
									   0, //Index of the media type to retreive... (what does that even mean?)
									   &m_pAudioType);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to retrieve completed media type";
        return false;
    }
	UINT32 allSamplesIndependent	= 0;
	UINT32 fixedSizeSamples		= 0;
	UINT32 sampleSize				= 0;
	UINT32 bitsPerSample			= 0;
	UINT32 blockAlignment			= 0;
	UINT32 numChannels				= 0;
	UINT32 samplesPerSecond		= 0;
	hr = m_pAudioType->GetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, &allSamplesIndependent);
	hr = m_pAudioType->GetUINT32(MF_MT_FIXED_SIZE_SAMPLES, &fixedSizeSamples);
	hr = m_pAudioType->GetUINT32(MF_MT_SAMPLE_SIZE, &sampleSize);
	hr = m_pAudioType->GetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, &bitsPerSample);
	hr = m_pAudioType->GetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, &blockAlignment);
	hr = m_pAudioType->GetUINT32(MF_MT_AUDIO_NUM_CHANNELS, &numChannels);
	hr = m_pAudioType->GetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, &samplesPerSecond);

	std::cout << "bitsPerSample: " << bitsPerSample << std::endl;
	std::cout << "allSamplesIndependent: " << allSamplesIndependent << std::endl;
	std::cout << "fixedSizeSamples: " << fixedSizeSamples << std::endl;
	std::cout << "sampleSize: " << sampleSize << std::endl;
	std::cout << "bitsPerSample: " << bitsPerSample << std::endl;
	std::cout << "blockAlignment: " << blockAlignment << std::endl;
	std::cout << "numChannels: " << numChannels << std::endl;
	std::cout << "samplesPerSecond: " << samplesPerSecond << std::endl;

	m_iChannels = numChannels;
	m_iSampleRate = samplesPerSecond;
	m_iBitsPerSample = bitsPerSample;
	//For compressed files, the bits per sample is undefined, so by convention we're
	//going to get 16-bit integers out.
	if (m_iBitsPerSample == 0)
	{
		m_iBitsPerSample = kBitsPerSample;
	}

    hr = MFCreateMediaType(&m_pAudioType);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to create media type";
        return false;
    }

    hr = m_pAudioType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to set major type";
        return false;
    }

    hr = m_pAudioType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to set subtype";
        return false;
    }
/*
    hr = m_pAudioType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, true);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to set samples independent";
        return false;
    }

    hr = m_pAudioType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, true);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to set fixed size samples";
        return false;
    }

    hr = m_pAudioType->SetUINT32(MF_MT_SAMPLE_SIZE, kLeftoverSize);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to set sample size";
        return false;
    }

    // MSDN for this attribute says that if bps is 8, samples are unsigned.
    // Otherwise, they're signed (so they're signed for us as 16 bps). Why
    // chose to hide this rather useful tidbit here is beyond me -bkgood
    hr = m_pAudioType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, kBitsPerSample);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to set bits per sample";
        return false;
    }


    hr = m_pAudioType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT,
        numChannels * (kBitsPerSample / 8));
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to set block alignment";
        return false;
    }
	*/

	/*
	//MediaFoundation will not convert between mono and stereo without a transform!
    hr = m_pAudioType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, kNumChannels);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to set number of channels";
        return false;
    }

	
	//MediaFoundation will not do samplerate conversion without a transform in the pipeline.
    hr = m_pAudioType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, kSampleRate);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to set sample rate";
        return false;
    }
	*/

    // Set this type on the source reader. The source reader will
    // load the necessary decoder.
    hr = m_pReader->SetCurrentMediaType(
        MF_SOURCE_READER_FIRST_AUDIO_STREAM,
        NULL, m_pAudioType);

    // the reader has the media type now, free our reference so we can use our
    // pointer for other purposes. Do this before checking for failure so we
    // don't dangle.
    safeRelease(&m_pAudioType);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to set media type";
        return false;
    }

    // Get the complete uncompressed format.
    hr = m_pReader->GetCurrentMediaType(
        MF_SOURCE_READER_FIRST_AUDIO_STREAM,
        &m_pAudioType);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to retrieve completed media type";
        return false;
    }

    // Ensure the stream is selected.
    hr = m_pReader->SetStreamSelection(
        MF_SOURCE_READER_FIRST_AUDIO_STREAM,
        true);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to select first audio stream (again)";
        return false;
    }

    // this may not be safe on all platforms as m_leftoverBufferSize is a
    // size_t and this function is writing a uint32. However, on 32-bit
    // Windows 7, size_t is defined as uint which is 32-bits, so we're safe
    // for all supported platforms -bkgood
    UINT32 leftoverBufferSize = 0;
    hr = m_pAudioType->GetUINT32(MF_MT_SAMPLE_SIZE, &leftoverBufferSize);
    if (FAILED(hr)) {
        std::cerr << "SSMF: failed to get buffer size";
		leftoverBufferSize = 32;
       // return false;
    }
    m_leftoverBufferSize = static_cast<size_t>(leftoverBufferSize);
    m_leftoverBufferSize /= 2; // convert size in bytes to size in int16s
    m_leftoverBuffer = new short[m_leftoverBufferSize];

    return true;
}