void PhVideoPool::requestFrames(PhFrame frame, bool backward) { PHDBG(24) << frame; if(_frameLength == 0) { PHDBG(24) << "not ready"; return; } // clip to stream boundaries if(frame < 0) frame = 0; if (frame >= _frameLength) frame = _frameLength; // we make sure we have requested "readahead_count" frames for (int i = 0; i < _settings->videoReadhead(); i++) { int factor = i; if (backward) { factor *= -1; } PhFrame requestedFrame = frame + factor; if (!isFrameRequested(requestedFrame)) { requestFrame(requestedFrame); } } cleanup(frame); }
bool PhMidiInput::open(QString inputPortName) { PHDEBUG << inputPortName; close(); try { _midiIn = new RtMidiIn(); int portIndex = -1; for(unsigned int i = 0; i < _midiIn->getPortCount(); i++) { QString portName = convertName(_midiIn->getPortName(i)); PHDBG(22) << "-" << portName; if(inputPortName == portName) { portIndex = i; break; } } PHDBG(22) << "Opening" << inputPortName; if(portIndex >= 0) _midiIn->openPort(portIndex); else _midiIn->openVirtualPort(inputPortName.toStdString()); _midiIn->ignoreTypes( false, false, false ); _midiIn->setCallback(&PhMidiInput::callback, this); _midiIn->setErrorCallback(&PhMidiInput::errorCallback, this); return true; } catch(RtMidiError &error) { PHERR << "Midi error:" << QString::fromStdString(error.getMessage()); close(); return false; } }
int PhGraphicView::compare(QString imageFile, int threshold, int width, int height) { int ratio = this->windowHandle()->devicePixelRatio(); if(width == 0) width = this->width() * ratio; if(height == 0) height = this->height() * ratio; int totalDiff = 0; QImage result = this->renderPixmap(width, height).toImage(); QImage expected(imageFile); if((expected.width() == 0) || (expected.height() == 0)) { PHDBG(9) << QString("Bad expected file: %1").arg(imageFile); totalDiff = std::numeric_limits<int>::max(); } else if(expected.size() != QSize(width, height)) { PHDBG(9) << QString("Bad size for %1: %2x%3 / %4x%5") .arg(imageFile) .arg(expected.width()) .arg(expected.height()) .arg(width) .arg(height); totalDiff = std::numeric_limits<int>::max(); } else { for(int i = 0; i < width; i++) { for(int j = 0; j < height; j++) { QRgb a = result.pixel(i, j); QRgb b = expected.pixel(i, j); int diff = qPow(qRed(a) - qRed(b), 2) + qPow(qGreen(a) - qGreen(b), 2) + qPow(qBlue(a) - qBlue(b), 2); totalDiff += diff; } if(totalDiff < 0) { totalDiff = std::numeric_limits<int>::max(); break; } } } if(totalDiff > threshold) { QFileInfo info(imageFile); QString resultFile = info.completeBaseName() + ".result.bmp"; PHDBG(9) << "saving to " << resultFile; result.save(resultFile); } return totalDiff; }
void Generator::generateData(const QAudioFormat &format, qint64 durationUs, int sampleRate) { const int channelBytes = format.sampleSize() / 8; qint64 length = (format.sampleRate() * format.channelCount() * (format.sampleSize() / 8)) * durationUs / 1000000; m_buffer.resize(length); unsigned char *ptr = reinterpret_cast<unsigned char *>(m_buffer.data()); int sampleIndex = 0; PHDBG() << "Type :" << format.sampleType() << " Size : " << format.sampleSize() << " Channel Count : " << format.channelCount(); while (length) { const qreal x = qSin(2 * M_PI * sampleRate * qreal(sampleIndex % format.sampleRate()) / format.sampleRate()); for (int i = 0; i < format.channelCount(); ++i) { qint16 value = static_cast<qint16>(x * 32767); qToLittleEndian<qint16>(value, ptr); ptr += channelBytes; length -= channelBytes; } ++sampleIndex; } }
void PhVideoDecoder::cancelFrameRequest(PhVideoBuffer *frame) { int r = _requestedFrames.removeAll(frame); PHDBG(24) << frame->requestFrame() << " " << r; if (r > 0) { emit frameCancelled(frame); } }
void MidiToolWindow::onReaderTimeChanged(PhTime time) { PhTimeCodeType tcType = _mtcReader.timeCodeType(); PHDBG(2) << PhTimeCode::getAverageFps(tcType) << "/" << PhTimeCode::stringFromTime(time, tcType); ui->readerTimeCodeLabel->setText(PhTimeCode::stringFromTime(time, tcType)); int delay = (_mtcWriter.clock()->time() - _mtcReader.clock()->time()) / 24; ui->delayLabel->setText(QString("%0 ms").arg(delay)); }
void PhMidiInput::close() { if(_midiIn) { PHDBG(22); if(_midiIn->isPortOpen()) { _midiIn->closePort(); } delete _midiIn; _midiIn = NULL; } }
void PhVideoPool::requestFrame(PhFrame frame) { PhVideoBuffer * buffer; if (!_recycledPool.empty()) { buffer = _recycledPool.takeFirst(); } else { PHDBG(24) << "creating a new buffer"; buffer = new PhVideoBuffer(); } buffer->setFrame(0); buffer->setRequestFrame(frame); PHDBG(24) << frame; // ask the frame to the decoder. // Notice that the time origin for the decoder is 0 at the start of the file, it's not timeIn. emit decodeFrame(buffer); _requestedPool.append(buffer); }
void PhVideoDecoder::requestFrame(PhVideoBuffer *buffer) { bool topLevel = _requestedFrames.empty(); PHDBG(24) << buffer->requestFrame() << " " << topLevel; _requestedFrames.append(buffer); if (topLevel) { while (!_requestedFrames.empty()) { QCoreApplication::processEvents(); decodeFrame(); } } }
int PhLtcWriter::processAudio(const void *, void *outputBuffer, unsigned long) { unsigned int hhmmssff[4]; PhTimeCode::ComputeHhMmSsFfFromTime(hhmmssff, _clock.time(), _tcType); _st.hours = hhmmssff[0]; _st.mins = hhmmssff[1]; _st.secs = hhmmssff[2]; _st.frame = hhmmssff[3]; ltc_encoder_set_timecode(_encoder, &_st); PHDBG(21) << _st.hours << _st.mins << _st.secs << _st.frame; int len; ltcsnd_sample_t *buf; ltc_encoder_encode_frame(_encoder); buf = ltc_encoder_get_bufptr(_encoder, &len, 1); memcpy(outputBuffer, buf, len); _clock.elapse(PhTimeCode::timePerFrame(_tcType) / 4); return len; }
QList<QString> PhAudioOutput::outputList() { QList<QString> names; int numDevices = Pa_GetDeviceCount(); if( numDevices <= 0 ) PHDBG(0) << "ERROR: Pa_CountDevices returned " << numDevices; else { const PaDeviceInfo *deviceInfo; for(int i = 0; i < numDevices; i++ ) { deviceInfo = Pa_GetDeviceInfo( i ); if(deviceInfo->maxOutputChannels > 0) { //PHDEBUG << deviceInfo->name; names.append(QString::fromLatin1(deviceInfo->name)); } } } foreach(QString string, names) { PHDEBUG << string; } return names; }
void PhGraphicView::onRefresh() { if(this->refreshRate() > _maxRefreshRate) _maxRefreshRate = this->refreshRate(); addInfo(QString("refresh: %1x%2, %3 / %4") .arg(this->width()) .arg(this->height()) .arg(_maxRefreshRate) .arg(this->refreshRate())); addInfo(QString("Update : %1 %2").arg(_maxUpdateDuration).arg(_lastUpdateDuration)); addInfo(QString("drop: %1 %2").arg(_dropDetected).arg(_dropTimer.elapsed() / 1000)); QTime t; t.start(); updateGL(); _lastUpdateDuration = t.elapsed(); if(_lastUpdateDuration > _maxUpdateDuration) _maxUpdateDuration = _lastUpdateDuration; if(_lastUpdateDuration > static_cast<int>(1500.0 / _screenFrequency)) { _dropTimer.restart(); _dropDetected++; PHDBG(8) << "Drop detected:" << _dropDetected; } }
void PhMidiInput::onMessage(std::vector<unsigned char> *message) { if ( message->size() > 0 ) { QString messageStr = ""; foreach(unsigned char data, *message) messageStr += QString::number(data, 16) + " "; PHDBG(21) << messageStr; unsigned char status = message->at(0); switch (status) { // A SysEx message case 0xf0: if(message->size() < 4) PHDEBUG << "Bad SysEx message size:" << message->size() << "/" << messageStr; else { unsigned char manufactorId = message->at(1); #warning /// @todo Handle midi channel // unsigned char channel = message->at(2); unsigned char type = message->at(3); if(manufactorId == 0x7F) { switch (type) { // Timecode message type case 0x01: if(message->size() != 10) PHDEBUG << "Bad TC message size:" << message->size(); else switch(message->at(4)) { case 0x01: _mtcType = computeTimeCodeType(message->at(5) >> 5); _hh = message->at(5) & 0x1F; _mm = message->at(6); _ss = message->at(7); _ff = message->at(8); if(message->at(9) != 0xF7) PHDEBUG << "End of SysEx expected:" << QString::number(0xF7); PHDEBUG << "Full TC:" << _hh << _mm << _ss << _ff; onTimeCode(_hh, _mm, _ss, _ff, _mtcType); break; default: PHDEBUG << "Unknown TC type:" << message->at(4) << "/" << messageStr; break; } break; // Midi machine control message type case 0x06: switch(message->at(4)) { case 0x01: PHDEBUG << "MMC Stop" << messageStr; onStop(); break; case 0x02: PHDEBUG << "MMC Play" << messageStr; onPlay(); break; case 0x44: _mtcType = computeTimeCodeType(message->at(7) >> 5); _hh = message->at(7) & 0x1F; _mm = message->at(8); _ss = message->at(9); // It seems that the some information is sent to the frame byte too (not timecode type)... _ff = message->at(10) & 0x1F; PHDEBUG << "Go To" << _hh << _mm << _ss << _ff; onTimeCode(_hh, _mm, _ss, _ff, _mtcType); break; default: PHDEBUG << "Unknown MMC message:" << messageStr; break; } break; default: PHDEBUG << "Unknown SysEx type:" << QString::number(type, 16) << "/" << messageStr; break; } } // else // PHDEBUG << "Not a MMC message:" << messageStr; } break; // A quarter frame midi timecode message case 0xf1: if(message->size() != 2) PHDEBUG << "Bad QF MTC message size:" << message->size() << "/" << messageStr; else { unsigned char data = message->at(1); switch (data >> 4) { case 0: _ff = (_ff & 0xf0) | (data & 0x0f); // onTimeCode(_hh, _mm, _ss, _ff, _tcType); break; case 1: _ff = (_ff & 0x0f) | ((data & 0x0f) << 4); break; case 2: _ss = (_ss & 0xf0) | (data & 0x0f); break; case 3: _ss = (_ss & 0x0f) | ((data & 0x0f) << 4); // Because of the way MTC is structured, // the minutes place won't be updated on the frame // where it changes over. // Dumb? Yes. But this fixes it. // From https://github.com/Figure53/TimecodeDisplay/blob/master/MIDIReceiver.m#L197 // if((_ss == 0) && (_ff == 0)) // _mm++; //onTimeCode(_hh, _mm, _ss, _ff, _tcType); break; case 4: _mm = (_mm & 0xf0) | (data & 0x0f); break; case 5: _mm = (_mm & 0x0f) | ((data & 0x0f) << 4); break; case 6: _hh = (_hh & 0xf0) | (data & 0x0f); break; case 7: _hh = (_hh & 0x0f) | ((data & 0x01) << 4); _mtcType = computeTimeCodeType((data & 0x06) >> 1); break; } PHDBG(20) << QString("QF MTC (%1): %2:%3:%4:%5").arg(QString::number(data, 16)).arg(_hh).arg(_mm).arg(_ss).arg(_ff); onQuarterFrame(data); } break; default: PHDEBUG << "Unknown midi message:" << messageStr; break; } }
void PhVideoDecoder::decodeFrame() { if(!ready()) { PHDBG(24) << "not ready"; return; } if (_requestedFrames.empty()) { // all pending requests have been cancelled return; } // now proceed with the first requested frame PhVideoBuffer *buffer = _requestedFrames.takeFirst(); PhFrame frame = buffer->requestFrame(); // resize the buffer if needed int bufferSize = avpicture_get_size(AV_PIX_FMT_BGRA, width(), height()); if(bufferSize <= 0) { PHERR << "avpicture_get_size() returned" << bufferSize; return; } buffer->reuse(bufferSize); // clip to stream boundaries if(frame < 0) frame = 0; if (frame >= this->frameLength()) frame = this->frameLength(); // Stay with the same frame if the time has changed less than the time between two frames // Note that av_seek_frame will seek to the _closest_ frame, sometimes a little bit in the "future", // so it is necessary to use a little margin for the second comparison, otherwise a seek may // be performed on each call to decodeFrame if (frame == _currentFrame) { frameToRgb(_videoFrame, buffer); return; } // we need to perform a frame seek if the requested frame is: // 1) in the past // 2) after the next keyframe // how to know when the next keyframe is ?? // -> for now we take a arbitrary threshold of 20 frames if((frame < _currentFrame) || (frame >= _currentFrame + 20)) { // seek to the closest keyframe in the past int flags = AVSEEK_FLAG_BACKWARD; int64_t timestamp = PhFrame_to_AVTimestamp(frame); PHDBG(24) << "seek:" << buffer << " " << _currentFrame << " " << frame - _currentFrame << " " << timestamp; av_seek_frame(_formatContext, _videoStream->index, timestamp, flags); avcodec_flush_buffers(_videoStream->codec); } AVPacket packet; bool lookingForVideoFrame = true; while(lookingForVideoFrame) { int error = av_read_frame(_formatContext, &packet); switch(error) { case 0: if(packet.stream_index == _videoStream->index) { int frameFinished = 0; avcodec_decode_video2(_videoStream->codec, _videoFrame, &frameFinished, &packet); if(frameFinished) { // update the current position of the engine // (Note that it is best not to do use '_currentTime = time' here, because the seeking operation may // not be 100% accurate: the actual time may be different from the requested time. So a time drift // could appear.) _currentFrame = AVTimestamp_to_PhFrame(av_frame_get_best_effort_timestamp(_videoFrame)); PHDBG(24) << frame << _currentFrame; if (frame < _currentFrame) { // something went wrong with the seeking // this is not going to work! we cannot go backward! // the loop will go until the end of the file, which is bad... // So stop here and just return what we have. PHERR << "current video time is larger than requested time... returning current frame!"; frameToRgb(_videoFrame, buffer); lookingForVideoFrame = false; } // convert and emit the frame if this is the one that was requested if (frame == _currentFrame) { PHDBG(24) << "decoded!"; frameToRgb(_videoFrame, buffer); lookingForVideoFrame = false; } } // if frame decode is not finished, let's read another packet. } else if(_audioStream && (packet.stream_index == _audioStream->index)) { int ok = 0; avcodec_decode_audio4(_audioStream->codec, _audioFrame, &ok, &packet); if(ok) { PHDBG(24) << "audio:" << _audioFrame->nb_samples; } } break; case AVERROR_INVALIDDATA: case AVERROR_EOF: default: { char errorStr[256]; av_strerror(error, errorStr, 256); PHDBG(24) << frame << "error:" << errorStr; lookingForVideoFrame = false; break; } } //Avoid memory leak av_free_packet(&packet); } }
bool PhAudioOutput::init(QString deviceName) { PHDBG(0) << deviceName; if(!PhAudio::init(deviceName)) { return false; } int deviceCount = Pa_GetDeviceCount(); if( deviceCount <= 0 ) { PHDBG(0) << "ERROR: Pa_CountDevices returned " << deviceCount; return false; } PaStreamParameters streamParameters; streamParameters.device = Pa_GetDefaultOutputDevice(); streamParameters.channelCount = 1; streamParameters.sampleFormat = paInt8; streamParameters.suggestedLatency = 0; streamParameters.hostApiSpecificStreamInfo = NULL; bool isThereOutput = false; bool deviceFound = false; for(int i = 0; i < deviceCount; i++ ) { const PaDeviceInfo *deviceInfo; deviceInfo = Pa_GetDeviceInfo( i ); if(deviceInfo->maxOutputChannels > 0 ) { isThereOutput = true; if(deviceName == QString::fromLatin1(deviceInfo->name)) { deviceFound = true; streamParameters.device = i; break; } } } if(!isThereOutput) { PHDBG(0) << "No output device"; return false; } if(deviceName.length() and !deviceFound) { PHDBG(0) << "Desired output not found :" << deviceName; return false; } #warning /// @todo use the settings for sample rate and frame per buffer PaError err = Pa_OpenStream(&_stream, NULL, &streamParameters, 48000, 1920, paNoFlag, audioCallback, this); if(err != paNoError) { PHDBG(0) << "Error while opening the stream : " << Pa_GetErrorText(err); return false; } err = Pa_StartStream( _stream ); if(err != paNoError) { PHDBG(0) << "Error while opening the stream : " << Pa_GetErrorText(err); return false; } PHDEBUG << Pa_GetDeviceInfo(streamParameters.device)->name << "is now open."; return true; }
void MidiToolWindow::onWriterTimeChanged(PhTime time) { PHDBG(2) << PhTimeCode::getAverageFps(_mtcWriter.timeCodeType()) << "/" << PhTimeCode::stringFromTime(time, _mtcWriter.timeCodeType()); }