CelFrame& CelFile::operator[] (size_t index) { assert(index < numFrames()); assert(index >= 0); if(mCache.count(index)) return mCache[index]; CelFrame frame; std::vector<Colour> rawImage; frame.mRawImage = rawImage; frame.mWidth = getFrame(mFrames[index], index, frame.mRawImage); frame.mHeight = frame.mRawImage.size() / frame.mWidth; mCache[index] = frame; #ifdef CEL_DEBUG std::cout << "w: " << frame.mWidth << ", h: " << frame.mHeight << std::endl; #endif return mCache[index]; }
//--------------------------------------------------------------------------- void Plots::Zoom_Move( int Begin ) { const int n = m_frameInterval.count(); const int from = qMax( Begin, 0 ); const int to = qMin( numFrames(), from + n ) - 1; setVisibleFrames( to - n + 1, to ); for ( size_t streamPos = 0; streamPos < m_fileInfoData->Stats.size(); streamPos++ ) if ( m_fileInfoData->Stats[streamPos] && m_plots[streamPos] ) { size_t type = m_fileInfoData->Stats[streamPos]->Type_Get(); for ( int group = 0; group < PerStreamType[type].CountOfGroups; group++ ) if (m_plots[streamPos][group]) m_plots[streamPos][group]->setAxisScale( QwtPlot::xBottom, m_timeInterval.from, m_timeInterval.to ); } m_scaleWidget->setScale( m_timeInterval.from, m_timeInterval.to); refresh(); m_scaleWidget->update(); replotAll(); }
void FrameObject::loadImage(int frameIdx) { // TODO: // this method gets called way too many times even // if just a single parameter was changed if (frameIdx==INT_INVALID || frameIdx >= numFrames()) { p_displayImage = QPixmap(); return; } if (p_source == NULL) return; // check if we have this frame index in our cache already CacheIdx cIdx(p_source->getName(), frameIdx); QPixmap* cachedFrame = frameCache.object(cIdx); if(cachedFrame == NULL) // load the corresponding frame from yuv file into the frame buffer { // add new QPixmap to cache and use its data buffer cachedFrame = new QPixmap(); if (p_source->pixelFormat() != YUVC_24RGBPixelFormat) { // read YUV444 frame from file - 16 bit LE words p_source->getOneFrame(&p_tmpBufferYUV444, frameIdx); // if requested, do some YUV math if( doApplyYUVMath() ) applyYUVMath(&p_tmpBufferYUV444, p_width, p_height, p_source->pixelFormat()); // convert from YUV444 (planar) - 16 bit words to RGB888 (interleaved) color format (in place) convertYUV2RGB(&p_tmpBufferYUV444, &p_PixmapConversionBuffer, YUVC_24RGBPixelFormat, p_source->pixelFormat()); } else { // read RGB24 frame from file p_source->getOneFrame(&p_PixmapConversionBuffer, frameIdx); } if (p_PixmapConversionBuffer.size() == 0) { // Conversion failed. This can happen for example when the pixel format could not be determined. QString pixelFmtName = p_source->pixelFormatList()[p_source->pixelFormat()].name(); QString errTxt = "Error converting image from pixel format type " + pixelFmtName + "."; setInfo(errTxt, true); p_displayImage = QPixmap(); return; } // add this frame into our cache, use MBytes as cost int sizeInMB = p_PixmapConversionBuffer.size() >> 20; // Convert the image in p_PixmapConversionBuffer to a QPixmap QImage tmpImage((unsigned char*)p_PixmapConversionBuffer.data(),p_width,p_height,QImage::Format_RGB888); //QImage tmpImage((unsigned char*)p_PixmapConversionBuffer.data(),p_width,p_height,QImage::Format_RGB30); cachedFrame->convertFromImage(tmpImage); frameCache.insert(cIdx, cachedFrame, sizeInMB); }
AudioFile::AudioFile(const char *filepath, AudioFileMode mode) { int sfmode; switch (mode) { case AudioFileModeReadOnly: sfmode = SFM_READ; break; case AudioFileModeWriteOnly: sfmode = SFM_WRITE; break; case AudioFileModeReadWrite: sfmode = SFM_RDWR; break; } _pimpl = new pimpl; _pimpl->mode = mode; _pimpl->sndfile = sf_open(filepath, sfmode, &_pimpl->sfInfo); _pimpl->totalSize = _pimpl->sfInfo.channels * _pimpl->sfInfo.frames; _pimpl->readIndex = 0; _pimpl->currentBufIndex = 0; _pimpl->samplesRead = 0; size_t framesPerBuffer = FRAMES_PER_FILE_BUFFER; if (framesPerBuffer > numFrames()) { framesPerBuffer = numFrames(); _pimpl->bufferSize = framesPerBuffer * numChannels(); _pimpl->needsBuffer = false; _pimpl->bufs[0] = new float[_pimpl->bufferSize]; _pimpl->bufs[1] = NULL; } else { _pimpl->bufferSize = framesPerBuffer * numChannels(); _pimpl->bufs[0] = new float[_pimpl->bufferSize]; _pimpl->bufs[1] = new float[_pimpl->bufferSize]; _pimpl->needsBuffer = true; } sf_seek(_pimpl->sndfile, 0, SF_SEEK_SET); _pimpl->framesBuffered = sf_read_float(_pimpl->sndfile, &(_pimpl->bufs[0][0]), _pimpl->bufferSize) / numChannels(); sf_seek(_pimpl->sndfile, _pimpl->framesBuffered, SF_SEEK_SET); AUtilDispatchThread(file_buffer_worker, _pimpl); }
bool pcl::ImageGrabberBase::atLastFrame () const { if (impl_->cur_frame_ == numFrames () - 1) return (true); else return (false); }
void KstRVector::setFromEnd() { ReqF0 = -1; if (ReqNF < 2) { ReqNF = numFrames(); if (ReqNF < 2) { ReqF0 = 0; } } }
void SerializableAnimation::write(std::ostream& ofs) { ByteConversion::toBytes(typeID_, byteBuffer, ByteSizes::uint32Size, EndiannessTypes::little, SignednessTypes::nosign); ofs.write(byteBuffer, ByteSizes::uint32Size); ByteConversion::toBytes(totalSize_, byteBuffer, ByteSizes::uint32Size, EndiannessTypes::little, SignednessTypes::nosign); ofs.write(byteBuffer, ByteSizes::uint32Size); ByteConversion::toBytes(static_cast<int>(looping_), byteBuffer, ByteSizes::uint32Size, EndiannessTypes::little, SignednessTypes::nosign); ofs.write(byteBuffer, ByteSizes::uint32Size); ByteConversion::toBytes(loopStart_, byteBuffer, ByteSizes::uint32Size, EndiannessTypes::little, SignednessTypes::nosign); ofs.write(byteBuffer, ByteSizes::uint32Size); ByteConversion::toBytes(loopEnd_, byteBuffer, ByteSizes::uint32Size, EndiannessTypes::little, SignednessTypes::nosign); ofs.write(byteBuffer, ByteSizes::uint32Size); ByteConversion::toBytes(numFrames(), byteBuffer, ByteSizes::uint32Size, EndiannessTypes::little, SignednessTypes::nosign); ofs.write(byteBuffer, ByteSizes::uint32Size); ByteConversion::toBytes(calculateFrameDataSize(), byteBuffer, ByteSizes::uint32Size, EndiannessTypes::little, SignednessTypes::nosign); ofs.write(byteBuffer, ByteSizes::uint32Size); for (SerializableAnimationFrameCollection::iterator it = frames_.begin(); it != frames_.end(); it++) { (*it)->write(ofs); } }
std::string FormatDescriptor::framesToString() const { std::ostringstream out; out << formatindent << "Frames (" << numFrames() << "):" << std::endl; std::vector<USBDescriptorPtr>::const_iterator i; for (i = frames.begin(); i != frames.end(); i++) { out << *i; } return out.str(); }
// (Re)set the NCameraSystem -- which clears the frames as well. void MultiFrame::resetCameraSystemAndFrames( const cameras::NCameraSystem & cameraSystem) { cameraSystem_ = cameraSystem; frames_.clear(); // erase -- for safety frames_.resize(cameraSystem.numCameras()); // copy cameras for(size_t c = 0; c<numFrames(); ++c){ frames_[c].setGeometry(cameraSystem.cameraGeometry(c)); } }
int nulVersion1FormatReader::previousFrame() { if(isPreviousFrameAvailable()) { --m_currentFrameIndex; qDebug() << "Frame: " << m_currentFrameIndex << " Seeking to:" << m_currentFrameIndex*m_frameLength << endl; fileStream.seekg((m_currentFrameIndex-1)*m_frameLength,std::ios_base::beg); qDebug() << fileStream.tellg(); fileStream >> rawImageBuffer; emit rawImageChanged(&rawImageBuffer); emit frameChanged(currentFrame(),numFrames()); } return currentFrame(); }
int nulVersion1FormatReader::nextFrame() { if(isNextFrameAvailable()) { m_currentFrameIndex++; fileStream >> rawImageBuffer; qDebug() << fileStream.tellg(); emit rawImageChanged(&rawImageBuffer); emit frameChanged(currentFrame(),numFrames()); } return currentFrame(); }
void FrameObject::clearCurrentCache() { if (p_source != NULL) { if (duplicateList.count(p_source->getName()) <= 1) { for (int frameIdx = p_startFrame; frameIdx <= numFrames(); frameIdx++) { CacheIdx cIdx(p_source->getName(), frameIdx); if (frameCache.contains(cIdx)) frameCache.remove(cIdx); } } } }
int nulVersion1FormatReader::openFile(const QString& filename) { m_fileInformation.setFile(filename); closeFile(); fileStream.open(filename.toStdString().c_str(),std::ios_base::in|std::ios_base::binary); if(fileGood()) { fileStream.seekg(0,std::ios_base::beg); int x,y; fileStream.read(reinterpret_cast<char*>(&x), sizeof(x)); fileStream.read(reinterpret_cast<char*>(&y), sizeof(y)); fileStream.seekg(0,std::ios_base::beg); m_frameLength = (2*sizeof(x) + x*y*sizeof(int) + sizeof(double)); m_totalFrames = m_fileInformation.size() / m_frameLength; } return numFrames(); }
void pcl::ImageGrabberBase::ImageGrabberImpl::loadNextCloud () { if (cur_frame_ >= numFrames ()) { if (repeat_) cur_frame_ = 0; else { valid_ = false; return; } } valid_ = getCloudAt (cur_frame_, next_cloud_, origin_, orientation_, focal_length_x_, focal_length_y_, principal_point_x_, principal_point_y_); cur_frame_++; }
void StatisticsObject::loadImage(int frameIdx) { if (frameIdx==INT_INVALID || frameIdx >= numFrames()) { p_displayImage = QPixmap(); return; } // create empty image QImage tmpImage(internalScaleFactor()*width(), internalScaleFactor()*height(), QImage::Format_ARGB32); tmpImage.fill(qRgba(0, 0, 0, 0)); // clear with transparent color p_displayImage.convertFromImage(tmpImage); // draw statistics drawStatisticsImage(frameIdx); p_lastIdx = frameIdx; }
//--------------------------------------------------------------------------- void Plots::zoomXAxis( ZoomTypes zoomType ) { m_zoomType = zoomType; if ( zoomType == ZoomIn ) m_zoomFactor++; else if ( zoomType == ZoomOut && m_zoomFactor ) m_zoomFactor--; else if ( zoomType == ZoomOneToOne) m_zoomFactor = 0; qDebug() << "m_zoomFactor: " << m_zoomFactor; int numVisibleFrames = m_fileInfoData->Frames_Count_Get() >> m_zoomFactor; if(m_zoomType == ZoomOneToOne) { numVisibleFrames = plot(0, 0)->canvas()->contentsRect().width(); m_zoomFactor = log(m_fileInfoData->Frames_Count_Get() / numVisibleFrames) / log(2); } int to = qMin( framePos() + numVisibleFrames / 2, numFrames() ); int from = qMax( 0, to - numVisibleFrames ); if ( to - from < numVisibleFrames) to = from + numVisibleFrames; setVisibleFrames( from, to ); for ( size_t streamPos = 0; streamPos < m_fileInfoData->Stats.size(); streamPos++ ) if ( m_fileInfoData->Stats[streamPos] && m_plots[streamPos] ) { size_t type = m_fileInfoData->Stats[streamPos]->Type_Get(); for ( int group = 0; group < PerStreamType[type].CountOfGroups; group++ ) if (m_plots[streamPos][group]) m_plots[streamPos][group]->setAxisScale( QwtPlot::xBottom, m_timeInterval.from, m_timeInterval.to ); } m_scaleWidget->setScale( m_timeInterval.from, m_timeInterval.to); refresh(); m_scaleWidget->update(); replotAll(); }
//--------------------------------------------------------------------------- void Plots::onCurrentFrameChanged() { // position of the current frame has changed if ( isZoomed() ) { const int n = m_frameInterval.count(); const int from = qBound( 0, framePos() - n / 2, numFrames() - n ); const int to = from + n - 1; if ( from != m_frameInterval.from ) { setVisibleFrames( from, to ); replotAll(); } } setCursorPos( framePos() ); }
/*! Gets a string representation of the Seuqnce. set \a frameInfo to true if you want it to print frame info as well (default is false) */ QString MSequence::str(bool frameInfo) { // Simple info QString s = name; if (looping) s += "[L]"; s += " "; s += QString::number(numFrames()) + "f="; s += QString::number(__duration) + "ms"; // They want frame info? give them frame info if (frameInfo) { s += ": ["; auto iter = __frames.begin(); for (iter; iter != __frames.end(); iter++) { s += iter->frame.name + ":" + QString::number(iter->delay) + ", "; } s.chop(2); s += "]"; } return s; }
Listener::Listener(int numFrames_, Spatializer *spatializer) : mSpatializer(spatializer), mIsCompiled(false) { numFrames(numFrames_); }
bool VideoPlayer::seekToRatio(double ratio) { assert(ratio >= 0.0 && ratio <= 1.0); const size_t frameIdx = static_cast<size_t>(std::floor(ratio * numFrames())); return seekToFrame(frameIdx); }
const USBDescriptorPtr& FormatDescriptor::operator[](size_t frameindex) const { if (frameindex >= numFrames()) { throw std::length_error("frameindex outside frame range"); } return frames[frameindex]; }
bool nulVersion1FormatReader::isNextFrameAvailable() { return ( fileGood() && (numFrames() > 0) && (currentFrame() < numFrames()) ); }
bool nulVersion1FormatReader::isFirstFrameAvailable() { return (fileGood() && (numFrames() > 0) ); }
double VideoPlayer::getFramePosRatio() { return static_cast<double>(getFramePosIndex()) / numFrames(); }
qint64 Modulator::readData (char * data, qint64 maxSize) { static int j0=-1; static double toneFrequency0; double toneFrequency; if(maxSize==0) return 0; Q_ASSERT (!(maxSize % qint64 (bytesPerFrame ()))); // no torn frames Q_ASSERT (isOpen ()); qint64 numFrames (maxSize / bytesPerFrame ()); qint16 * samples (reinterpret_cast<qint16 *> (data)); qint16 * end (samples + numFrames * (bytesPerFrame () / sizeof (qint16))); qint64 framesGenerated (0); // qDebug () << "Modulator: " << numFrames << " requested, m_ic = " << m_ic << ", tune mode is " << m_tuning; // qDebug() << "C" << maxSize << numFrames << bytesPerFrame(); switch (m_state) { case Synchronizing: { if (m_silentFrames) { // send silence up to first second framesGenerated = qMin (m_silentFrames, numFrames); for ( ; samples != end; samples = load (0, samples)) { // silence } m_silentFrames -= framesGenerated; return framesGenerated * bytesPerFrame (); } Q_EMIT stateChanged ((m_state = Active)); m_cwLevel = false; m_ramp = 0; // prepare for CW wave shaping } // fall through case Active: { unsigned isym (m_tuning ? 0 : m_ic / (4.0 * m_nsps)); // Actual fsample=48000 if (isym >= m_symbolsLength && icw[0] > 0) { // start CW condition // Output the CW ID m_dphi = m_twoPi * m_frequency / m_frameRate; unsigned const ic0 = m_symbolsLength * 4 * m_nsps; unsigned j (0); while (samples != end) { j = (m_ic - ic0) / m_nspd + 1; // symbol of this sample bool level {bool (icw[j])}; m_phi += m_dphi; if (m_phi > m_twoPi) m_phi -= m_twoPi; qint16 sample ((SOFT_KEYING ? qAbs (m_ramp - 1) : (m_ramp ? 32767 : 0)) * qSin (m_phi)); if (int (j) <= icw[0] && j < NUM_CW_SYMBOLS) // stop condition { samples = load (postProcessSample (sample), samples); ++framesGenerated; ++m_ic; } else { Q_EMIT stateChanged ((m_state = Idle)); return framesGenerated * bytesPerFrame (); } // adjust ramp if ((m_ramp != 0 && m_ramp != std::numeric_limits<qint16>::min ()) || level != m_cwLevel) { // either ramp has terminated at max/min or direction // has changed m_ramp += RAMP_INCREMENT; // ramp } // if (m_cwLevel != level) // { // qDebug () << "@m_ic:" << m_ic << "icw[" << j << "] =" << icw[j] << "@" << framesGenerated << "in numFrames:" << numFrames; // } m_cwLevel = level; } return framesGenerated * bytesPerFrame (); } double const baud (12000.0 / m_nsps); // fade out parameters (no fade out for tuning) unsigned const i0 = m_tuning ? 999 * m_nsps : (m_symbolsLength - 0.017) * 4.0 * m_nsps; unsigned const i1 = m_tuning ? 999 * m_nsps : m_symbolsLength * 4.0 * m_nsps; for (unsigned i = 0; i < numFrames && m_ic <= i1; ++i) { isym = m_tuning ? 0 : m_ic / (4.0 * m_nsps); //Actual fsample=48000 if (isym != m_isym0) { // qDebug () << "@m_ic:" << m_ic << "itone[" << isym << "] =" << itone[isym] << "@" << i << "in numFrames:" << numFrames; if(m_toneSpacing==0.0) { toneFrequency0=m_frequency + itone[isym]*baud; } else { toneFrequency0=m_frequency + itone[isym]*m_toneSpacing; } m_dphi = m_twoPi * toneFrequency0 / m_frameRate; m_isym0 = isym; } int j=m_ic/480; if(m_fSpread>0.0 and j!=j0) { float x1=(float)rand()/RAND_MAX; float x2=(float)rand()/RAND_MAX; toneFrequency = toneFrequency0 + 0.5*m_fSpread*(x1+x2-1.0); m_dphi = m_twoPi * toneFrequency / m_frameRate; j0=j; } m_phi += m_dphi; if (m_phi > m_twoPi) m_phi -= m_twoPi; if (m_ic > i0) m_amp = 0.98 * m_amp; if (m_ic > i1) m_amp = 0.0; samples = load (postProcessSample (m_amp * qSin (m_phi)), samples); ++framesGenerated; ++m_ic; } if (m_amp == 0.0) { // TODO G4WJS: compare double with zero might not be wise if (icw[0] == 0) { // no CW ID to send Q_EMIT stateChanged ((m_state = Idle)); return framesGenerated * bytesPerFrame (); } m_phi = 0.0; } // done for this chunk - continue on next call return framesGenerated * bytesPerFrame (); } // fall through case Idle: break; } Q_ASSERT (Idle == m_state); return 0; }
AudioScene::AudioScene(int numFrames_) : mNumFrames(0), mSpeedOfSound(340), mPerSampleProcessing(false) { numFrames(numFrames_); }
bool Plots::isZoomed() const { return m_frameInterval.count() < numFrames(); }
//--------------------------------------------------------------------------- Plots::Plots( QWidget *parent, FileInformation* fileInformation ) : QWidget( parent ), m_zoomFactor ( 0 ), m_fileInfoData( fileInformation ), m_dataTypeIndex( Plots::AxisSeconds ) { setlocale(LC_NUMERIC, "C"); QGridLayout* layout = new QGridLayout( this ); layout->setSpacing( 1 ); layout->setContentsMargins( 0, 0, 0, 0 ); // bottom scale m_scaleWidget = new PlotScaleWidget(); m_scaleWidget->setFormat( Plots::AxisTime ); setVisibleFrames( 0, numFrames() - 1 ); // plots and legends m_plots = new Plot**[m_fileInfoData->Stats.size()]; m_plotsCount = 0; for ( size_t streamPos = 0; streamPos < m_fileInfoData->Stats.size(); streamPos++ ) { if (m_fileInfoData->Stats[streamPos]) { size_t type = m_fileInfoData->Stats[streamPos]->Type_Get(); size_t countOfGroups = PerStreamType[type].CountOfGroups; m_plots[streamPos] = new Plot*[countOfGroups + 1]; //+1 for axix for ( size_t group = 0; group < countOfGroups; group++ ) { if (m_fileInfoData->ActiveFilters[PerStreamType[type].PerGroup[group].ActiveFilterGroup]) { Plot* plot = new Plot( streamPos, type, group, fileInformation, this ); plot->addGuidelines(m_fileInfoData->BitsPerRawSample()); if(type == Type_Video) adjustGroupMax(group, m_fileInfoData->BitsPerRawSample()); // we allow to shrink the plot below height of the size hint plot->setSizePolicy( QSizePolicy::MinimumExpanding, QSizePolicy::Expanding ); plot->setAxisScaleDiv( QwtPlot::xBottom, m_scaleWidget->scaleDiv() ); initYAxis( plot ); updateSamples( plot ); connect( plot, SIGNAL( cursorMoved( int ) ), SLOT( onCursorMoved( int ) ) ); plot->canvas()->installEventFilter( this ); layout->addWidget( plot, m_plotsCount, 0 ); layout->addWidget( plot->legend(), m_plotsCount, 1 ); m_plots[streamPos][group] = plot; m_plotsCount++; qDebug() << "g: " << plot->group() << ", t: " << plot->type() << ", m_plotsCount: " << m_plotsCount; } else { m_plots[streamPos][group] = NULL; } } } else { m_plots[streamPos]=NULL; } } layout->addWidget( m_scaleWidget, m_plotsCount, 0, 1, 2 ); // combo box for the axis format XAxisFormatBox* xAxisBox = new XAxisFormatBox(); xAxisBox->setCurrentIndex( Plots::AxisTime ); connect( xAxisBox, SIGNAL( currentIndexChanged( int ) ), this, SLOT( onXAxisFormatChanged( int ) ) ); int axisBoxRow = layout->rowCount() - 1; #if 1 // one row below to have space enough for bottom scale tick labels layout->addWidget( xAxisBox, m_plotsCount + 1, 1 ); #else layout->addWidget( xAxisBox, layout_y, 1 ); #endif layout->setColumnStretch( 0, 10 ); layout->setColumnStretch( 1, 0 ); m_scaleWidget->setScale( m_timeInterval.from, m_timeInterval.to); setCursorPos( framePos() ); }