Example #1
0
void OscilloscopeEngine::processOscilloscopeData(SamplesList leftChannelData, SamplesList rightChannelData)
{
    if (!m_isRunning) {
        return;
    }
//    qDebug() << "Got" << samples.length() << "samples";
    if (m_capturedChannels == CHANNEL_NONE) {
        return;
    }
    SamplesList *buffer = NULL;
    SamplesList *samples = NULL;
    // I.   Select primary channel
    if (m_capturedChannels != CHANNEL_BOTH) {
        // For single channel capture, process only one buffer.
        if (m_capturedChannels == CHANNEL_LEFT) {
            buffer = &m_samplesInputBufferLeft;
            samples = &leftChannelData;
        } else if (m_capturedChannels == CHANNEL_RIGHT) {
            buffer = &m_samplesInputBufferRight;
            samples = &rightChannelData;
        }
    } else {
        // For both channel capture, only one channel is selected for Normal or Single triggering
        if (m_triggerChannel == CHANNEL_LEFT) {
            buffer = &m_samplesInputBufferLeft;
        } else if (m_triggerChannel == CHANNEL_RIGHT) {
            buffer = &m_samplesInputBufferRight;
        } else if (m_triggerMode != TRIG_AUTO) {
            qWarning() << "For non-auto trigger mode an one channel have to be choosen for triggering";
            Q_ASSERT(false);
            return;
        }
    }
    // II.  For both channel capture we have to check buffers state - samples must be captured simultaneously
    if (m_capturedChannels == CHANNEL_BOTH) {
        // This is not a bug when
        if (m_samplesInputBufferLeft.size() != m_samplesInputBufferRight.size()) {
            qWarning() << "Audio input buffers are not synced. Reset them."
                       << "Left:" << m_samplesInputBufferLeft.size()
                       << "Right:" << m_samplesInputBufferRight.size();
            m_samplesInputBufferLeft.clear();
            m_samplesInputBufferRight.clear();
//            Q_ASSERT(false);
        }
        if (leftChannelData.size() != rightChannelData.size()) {
            qWarning() << "Fresh audio samples are not synced. Reset them."
                       << "Left:" << leftChannelData.size()
                       << "Right:" << rightChannelData.size();
            leftChannelData.clear();
            rightChannelData.clear();
//            Q_ASSERT(false);
        }
    }
    // III. Trigger mode: Automatic - display data immediatelly if there are enough data in buffer
    if (m_triggerMode == TRIG_AUTO) {
        if (m_capturedChannels != CHANNEL_BOTH) {
            buffer->append(*samples);
            if (buffer->size() < m_frameLength) {
                return;
            }
        } else {
            m_samplesInputBufferLeft.append(leftChannelData);
            m_samplesInputBufferRight.append(rightChannelData);
            if (m_samplesInputBufferLeft.size() < m_frameLength) {
                return;
            }
        }
        displayOscilloscopeChannelData(0, m_frameLength, m_frameLength);
        return;
    }
    // IV.  Trigger mode: Single - if data acqured, the graph have to hold data
    if ((m_triggerMode == TRIG_SINGLE) && m_dataForSingleCaptureAcqured) {
        return;
    }
    // V.   Trigger mode: Normal or Single (when trigger hasn't fired yet)
    if ((m_triggerMode == TRIG_NORMAL) or (m_triggerMode == TRIG_SINGLE)) {
        // Fill buffers with samples
        if (m_capturedChannels != CHANNEL_BOTH) {
            buffer->append(*samples);
            if (buffer->size() < m_frameLength * 2) {
                return;
            }
        } else {
            m_samplesInputBufferLeft.append(leftChannelData);
            m_samplesInputBufferRight.append(rightChannelData);
            if (m_samplesInputBufferLeft.size() < m_frameLength * 2) {
                return;
            }
        }
        // Find moments when signal intersects trigger line by required slope
        // Start searching with offset in half of frame length, because this half has been already plotted
        QList<int> eventsOffsets;
        for (int offset = m_frameLength / 2 - 1;  offset < buffer->size(); ++offset) {
            qreal cur = buffer->at(offset);
            qreal prev = buffer->at(offset - 1);
            if (m_triggerSlope == TRIG_RISING) {
                if ((prev <= m_triggerLevel) && (cur >= m_triggerLevel)) {
                    eventsOffsets.append(offset);
                }
            } else {
                if ((prev >= m_triggerLevel) && (cur <= m_triggerLevel)) {
                    eventsOffsets.append(offset);
                }
            }
        }
        if (eventsOffsets.size() > 0) {
            // take first event in next frame
            for (int i = 0; i < eventsOffsets.size(); ++i) {
                int offset = eventsOffsets[i];
                if (offset >= m_frameLength * 3 / 2) {
                    // there is not enough data for display full frame
                    eventsOffsets.clear();
                    break;
                }
                if (offset >= m_frameLength / 2) {
                    displayOscilloscopeChannelData(offset - m_frameLength / 2, m_frameLength, m_frameLength / 2);
                    m_dataForSingleCaptureAcqured = true;
                    return;
                }
            }
            // or select last event
            if (eventsOffsets.size() > 0) {
                int offset = eventsOffsets.last();
                displayOscilloscopeChannelData(offset - m_frameLength / 2, m_frameLength, m_frameLength / 2);
                m_dataForSingleCaptureAcqured = true;
                return;
            }
        }
        // buffer is overflowed
        if (buffer->size() > m_frameLength * 2) {
            if (m_capturedChannels != CHANNEL_BOTH) {
                *buffer = buffer->mid(m_frameLength);
            } else {
                m_samplesInputBufferLeft = m_samplesInputBufferLeft.mid(m_frameLength);
                m_samplesInputBufferRight = m_samplesInputBufferRight.mid(m_frameLength);
            }
        }
    }
}