TRef<IObject> Read(IBinaryReaderSite* psite, ObjectStack& stack)
    {
        TRef<FrameDataListValue> plistValue = new FrameDataListValue();

        DWORD count = psite->GetDWORD();

        for(DWORD index = 0; index < count; index++) {
            ZString str = psite->GetString();

            Vector* pvecPosition;
            psite->GetStructure(pvecPosition);
            Vector* pvecForward;
            psite->GetStructure(pvecForward);
            Vector* pvecUp;
            psite->GetStructure(pvecUp);

            plistValue->GetList().PushEnd(
                FrameData(
                    str,
                    *pvecPosition,
                    *pvecForward,
                    *pvecUp
                )
            );
        }

        return plistValue;
    }
    TRef<IObject> Apply(ObjectStack& stack)
    {
        TRef<FrameDataListValue> plistValue = new FrameDataListValue();

        TRef<IObjectList> plist;
        CastTo(plist, (IObject*)stack.Pop());

        while (plist->GetCurrent()) {
            IObjectPair* ppair;
            CastTo(ppair, plist->GetCurrent());

            ZString strName     = GetString(ppair->GetNth(0));
            Vector  vecPosition = GetVector(ppair->GetNth(1));
            Vector  vecForward  = GetVector(ppair->GetNth(2));
            Vector  vecUp       = GetVector(ppair->GetLastNth(3));

            plistValue->GetList().PushEnd(
                FrameData(
                    strName,
                    vecPosition,
                    vecForward,
                    vecUp
                )
            );

            plist->GetNext();
        }

        return plistValue;
    }
FrameData StringVoice::NextFrame()
{
    double frame = 0.0f;
    
//    if (isnan(this->currentFrame)) {
//        std::cout << "nan" << std::endl;
//    }

    buffer[this->currentFrame] = \
        ((buffer[this->currentFrame] + buffer[this->currentFrame+1]) / 2.0f) * this->feedbackValue;

    frame = buffer[this->currentFrame] * this->gain * this->envelope.NextFrame();

    this->currentFrame++;
    if (this->currentFrame >= this->cycleSize) {
        this->currentFrame -= this->cycleSize;
    }

    else if (this->needsReset && !this->envelope.IsActive()) {
        this->Reset();
    }

    return FrameData(
        frame,
        frame
    );
}
FrameData PolyBLEPOscillator::NextFrame()
{
    double value = 0.0;
    double t = phase / TWO_PI;
    
    if (this->oscType == kOscillatorTypeSine) {
        value = this->NaiveWaveformForOscType(kOscillatorTypeSine);
    }
    else if (this->oscType == kOscillatorTypeSaw) {
        value = this->NaiveWaveformForOscType(kOscillatorTypeSaw);
        value -= this->PolyBlep(t);
    }
    
    this->phase += this->increment;
    if (this->phase >= TWO_PI) {
        this->phase -= TWO_PI;
    }
    
    return FrameData(value, value);
}
    TRef<Geo> Execute(const Matrix& mat, GroupGeo* pgroup)
    {
        ZString strName = pgroup->GetName();

        if (!strName.IsEmpty()) {
            if (   strName.Find("frm-") == 0
                    && (!pgroup->AnyChildGroups())
               ) {
                Vector vecPosition = mat.Transform(Vector(0, 0, 0));
                Vector vecForward  = mat.TransformDirection(Vector(0, 0, -1));
                Vector vecUp       = mat.TransformDirection(Vector(0, 1,  0));

                strName = strName.RightOf(4);

                if (strName.Find("SS") != -1) {
                    //
                    // a strobe light
                    //

                    ValueList* plist = pgroup->GetList();

                    if (plist->GetCount() == 1) {
                        MaterialGeo* pmatGeo;
                        CastTo(pmatGeo, plist->GetFirst());
                        Material* pmaterial = pmatGeo->GetMaterial();

                        AddLight(strName, pmaterial->GetDiffuse(), vecPosition);
                    } else {
                        AddLight(strName, Color(1, 1, 1), vecPosition);
                    }

                    return Geo::GetEmpty();
                } else if (
                    strName.Find("thrust") != -1
                    || strName.Find("smoke") != -1
                    || strName.Find("rocket") != -1
                ) {
                    //
                    // this is an engine
                    //

                    m_pframes->GetList().PushFront(
                        FrameData(strName, vecPosition, vecForward, vecUp)
                    );

                    return Geo::GetEmpty();
                } else if (
                    (strName.Find("weapon") != -1)
                    || (strName.Find("wepatt") != -1)
                    || (strName.Find("wepemt") != -1)
                    || (strName.Find("wepmnt") != -1)
                    || (strName.Find("trail")  != -1)
                ) {
                    //
                    // This is an attachment point
                    //

                    m_pframes->GetList().PushFront(
                        FrameData(strName, vecPosition, vecForward, vecUp)
                    );
                    return Geo::GetEmpty();
                } else if (
                    (strName.Find("garage") != -1)
                ) {
                    //
                    // This is a garage we need to leave the frame in the graph
                    //

                    m_pframes->GetList().PushFront(
                        FrameData(strName, vecPosition, vecForward, vecUp)
                    );
                }
            }
        }

        return NULL;
    }
Beispiel #6
0
HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
{
    IDeckLinkVideoFrame*				rightEyeFrame = NULL;
    IDeckLinkVideoFrame3DExtensions*	threeDExtensions = NULL;
    void*								frameBytes;
    void*								audioFrameBytes;

    // Handle Video Frame
    if (videoFrame)
    {
        // If 3D mode is enabled we retreive the 3D extensions interface which gives.
        // us access to the right eye frame by calling GetFrameForRightEye() .
        if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
             (threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
        {
            rightEyeFrame = NULL;
        }

        if (threeDExtensions)
            threeDExtensions->Release();

        if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
        {
            printf("Frame received (#%lu) - No input signal detected\n", g_frameCount);
        }
        else
        {
            const char *timecodeString = NULL;
            if (g_config.m_timecodeFormat != 0)
            {
                IDeckLinkTimecode *timecode;
                if (videoFrame->GetTimecode(g_config.m_timecodeFormat, &timecode) == S_OK)
                {
                    timecode->GetString(&timecodeString);
                }
            }

            int64_t timestampNow = bot_timestamp_now();

            if (g_config.m_lcmChannelName)
            {
                IDeckLinkMutableVideoFrame* outputFrame;
                g_deckLinkOutput->CreateVideoFrame(videoFrame->GetWidth(), videoFrame->GetHeight(), videoFrame->GetWidth()*4, bmdFormat8BitBGRA, bmdFrameFlagDefault, &outputFrame);
                HRESULT convertResult = g_conversionInst->ConvertFrame(videoFrame, outputFrame);

                frameConsumer.Queue.enqueue(FrameData(outputFrame, timestampNow));
            }

            static int64_t baseTime = timestampNow;
            static uint64_t frameCount = g_frameCount;
            double elapsedTime = (timestampNow - baseTime) * 1e-6;
            if (elapsedTime > 1.0)
            {
                printf("capturing at %.2f fps.\n", (g_frameCount - frameCount)/elapsedTime);
                baseTime = timestampNow;
                frameCount = g_frameCount;
            }

            if (timecodeString)
                free((void*)timecodeString);

            if (g_videoOutputFile != -1)
            {
                videoFrame->GetBytes(&frameBytes);
                write(g_videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());

                if (rightEyeFrame)
                {
                    rightEyeFrame->GetBytes(&frameBytes);
                    write(g_videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
                }
            }
        }

        if (rightEyeFrame)
            rightEyeFrame->Release();

        g_frameCount++;
    }

    // Handle Audio Frame
    if (audioFrame)
    {
        if (g_audioOutputFile != -1)
        {
            audioFrame->GetBytes(&audioFrameBytes);
            write(g_audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_config.m_audioChannels * (g_config.m_audioSampleDepth / 8));
        }
    }

    if (g_config.m_maxFrames > 0 && videoFrame && g_frameCount >= g_config.m_maxFrames)
    {
        g_do_exit = true;
        pthread_cond_signal(&g_sleepCond);
    }

    return S_OK;
}
Beispiel #7
0
const FrameData SequenceData::operator[](unsigned int frame) const {
    if (frame >= numFrames) {
        return FrameData(numChannels, invalidData);
    }
    return FrameData(numChannels, &data[frame * numChannels]);
}