void VideoRendererEVR::notifyResize(const QSize &size, Phonon::VideoWidget::AspectRatio aspectRatio, Phonon::VideoWidget::ScaleMode scaleMode) { if (!isActive()) { RECT dummyRect = { 0, 0, 0, 0}; ComPointer<IMFVideoDisplayControl> filterControl = getService<IMFVideoDisplayControl>(m_filter, MR_VIDEO_RENDER_SERVICE, IID_IMFVideoDisplayControl); filterControl->SetVideoPosition(0, &dummyRect); return; } const QSize vsize = videoSize(); internalNotifyResize(size, vsize, aspectRatio, scaleMode); RECT dstRectWin = { 0, 0, size.width(), size.height()}; // Resize the Stream output rect instead of the destination rect. // Hacky workaround for flicker in the areas outside of the destination rect // This way these areas don't exist MFVideoNormalizedRect streamOutputRect = { float(m_dstX) / float(size.width()), float(m_dstY) / float(size.height()), float(m_dstWidth + m_dstX) / float(size.width()), float(m_dstHeight + m_dstY) / float(size.height())}; ComPointer<IMFVideoMixerControl> filterMixer = getService<IMFVideoMixerControl>(m_filter, MR_VIDEO_MIXER_SERVICE, IID_IMFVideoMixerControl); ComPointer<IMFVideoDisplayControl> filterControl = getService<IMFVideoDisplayControl>(m_filter, MR_VIDEO_RENDER_SERVICE, IID_IMFVideoDisplayControl); filterMixer->SetStreamOutputRect(0, &streamOutputRect); filterControl->SetVideoPosition(0, &dstRectWin); }
void WidgetRenderer::paintEvent(QPaintEvent *) { DPTR_D(WidgetRenderer); if (!d.scale_in_qt) { d.img_mutex.lock(); } QPainter p(this); if (!d.image.isNull()) { if (d.image.size() == QSize(d.width, d.height)) { //d.preview = d.image; p.drawImage(QPoint(), d.image); } else { //qDebug("size not fit. may slow. %dx%d ==> %dx%d" // , d.image.size().width(), d.image.size().height(), d.width, d.height); p.drawImage(rect(), d.image); //what's the difference? //p.drawImage(QPoint(), d.image.scaled(d.width, d.height)); } } else if (!d.preview.isNull()){ if (d.preview.size() == QSize(d.width, d.height)) { p.drawImage(QPoint(), d.preview); } else { p.drawImage(rect(), d.preview); } } else { d.preview = QImage(videoSize(), QImage::Format_RGB32); d.preview.fill(QColor(Qt::black)); p.drawImage(QPoint(), d.preview); } if (!d.scale_in_qt) { d.img_mutex.unlock(); } }
QSize AbstractVideoRenderer::sizeHint() const { QSize s = videoSize(); if (s.isNull()) { s = QSize(640, 480).boundedTo( QApplication::desktop()->availableGeometry().size() ); } return s; }
bool UBWindowsMediaVideoEncoder::start() { QString profile = UBFileSystemUtils::readTextFile(":/podcast/uniboard.prx"); profile.replace("{in.videoWidth}", QString("%1").arg(videoSize().width())); profile.replace("{in.videoHeight}", QString("%1").arg(videoSize().height())); profile.replace("{in.bitsPerSecond}", QString("%1").arg(videoBitsPerSecond())); profile.replace("{in.nanoSecondsPerFrame}", QString("%1").arg(10000000 / framesPerSecond())); qDebug() << profile; if(mRecordAudio) { mWaveRecorder = new UBWaveRecorder(this); bool audioAvailable = mWaveRecorder->init(audioRecordingDevice()) && mWaveRecorder->start(); if (!audioAvailable) { mWaveRecorder->deleteLater(); mWaveRecorder = 0; mRecordAudio = false; } } mWMVideo = new UBWindowsMediaFile(this); if(!mWMVideo->init(videoFileName().replace("/", "\\"), profile, framesPerSecond(), videoSize().width(), videoSize().height(), 32)) { mWMVideo->deleteLater(); return false; } if (mRecordAudio) { connect(mWaveRecorder, SIGNAL(newWaveBuffer(WAVEHDR*, long)), mWMVideo , SLOT(appendAudioBuffer(WAVEHDR*, long)), Qt::DirectConnection); connect(mWaveRecorder, SIGNAL(newWaveBuffer(WAVEHDR*, long)), this , SLOT(processAudioBuffer(WAVEHDR*, long)), Qt::DirectConnection); } mIsRecording = true; return true; }
void VideoRendererDefault::notifyResize(const QSize &size, Phonon::VideoWidget::AspectRatio aspectRatio, Phonon::VideoWidget::ScaleMode scaleMode) { if (!isActive()) { ComPointer<IBasicVideo> basic(m_filter, IID_IBasicVideo); if (basic) { basic->SetDestinationPosition(0, 0, 0, 0); } return; } ComPointer<IVideoWindow> video(m_filter, IID_IVideoWindow); OAHWND owner; HRESULT hr = video->get_Owner(&owner); if (FAILED(hr)) { return; } const OAHWND newOwner = reinterpret_cast<OAHWND>(m_target->winId()); if (owner != newOwner) { video->put_Owner(newOwner); video->put_MessageDrain(newOwner); video->put_WindowStyle(WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS); } //make sure the widget takes the whole size of the parent video->SetWindowPosition(0, 0, size.width(), size.height()); const QSize vsize = videoSize(); internalNotifyResize(size, vsize, aspectRatio, scaleMode); ComPointer<IBasicVideo> basic(m_filter, IID_IBasicVideo); if (basic) { basic->SetDestinationPosition(m_dstX, m_dstY, m_dstWidth, m_dstHeight); } }
void Player::forceResizeEvent() { QList<Tag*> videos, audios; //Player foreach(Tag *tag, tags) if(tag->player) { if((tag->player->isVideo) && (tag->player->isDisplayed())) videos << tag; else audios << tag; } //Space division QRect videoSpace(QPoint(0, 0), QSize(width(), height() - ui->globalFrameTransparent->height() - (qCeil((qreal)audios.count() / 2.) * 40))); QRect audioSpace(videoSpace.bottomLeft(), QSize(videoSpace.width(), height() - ui->globalFrameTransparent->height() - videoSpace.height())); //Distribution quint16 index = 0; foreach(Tag *video, videos) { QSize videoSize(videoSpace.width() / 2, videoSpace.height() / qCeil((qreal)videos.count() / 2)); if(video->player) video->player->setGeometry(QRect(videoSpace.topLeft() + QPoint((index % 2) * videoSize.width(), qFloor(index / 2) * videoSize.height()), videoSize)); index++; }
int FmDriveDetailsContent::getDataSizeByTraversePath( const QString &driveName, QList<FmDriveDetailsSize*> &detailsSizeList, volatile bool *isStopped ) { qint64 imageSize( 0 ); qint64 soundSize( 0 ); qint64 midpJavaSize( 0 ); qint64 nativeAppsSize( 0 ); qint64 videoSize( 0 ); qint64 documentsSize( 0 ); FmFileTypeRecognizer fileTypeRecognizer; QList<QDir> dirs; dirs.append( QDir( driveName ) ); // traverse the whole drive while (!dirs.isEmpty()) { QDir::Filters filter = QDir::NoDotAndDotDot | QDir::AllEntries; // do not summarize system and hidden files, these size will go into others category // if( isSysHiddenIncluded ) { // filter = filter | QDir::Hidden | QDir::System; // } QFileInfoList infoList = dirs.first().entryInfoList( filter ); for ( QFileInfoList::const_iterator it = infoList.begin(); it != infoList.end(); ++it ) { if ( *isStopped ){ return FmErrCancel; } if ( it->isFile() ) { FmFileTypeRecognizer::FileType fileType = fileTypeRecognizer.getType( it->absoluteFilePath() ); switch ( fileType ) { case FmFileTypeRecognizer::FileTypeImage: imageSize += it->size(); break; case FmFileTypeRecognizer::FileTypeTone: soundSize += it->size(); break; case FmFileTypeRecognizer::FileTypeJava: midpJavaSize += it->size(); break; case FmFileTypeRecognizer::FileTypeSisx: nativeAppsSize += it->size(); break; case FmFileTypeRecognizer::FileTypeVideo: videoSize += it->size(); break; case FmFileTypeRecognizer::FileTypeText: documentsSize += it->size(); break; default: // do not need handle other type break; } } else if ( it->isDir() ) { dirs.append( QDir( it->absoluteFilePath() ) ); } } dirs.removeFirst(); } // store result to detailsSizeList. detailsSizeList.append( new FmDriveDetailsSize( FmDriveDetailsSize::ETypeImages, imageSize ) ) ; detailsSizeList.append( new FmDriveDetailsSize( FmDriveDetailsSize::ETypeSoundFiles, soundSize ) ); detailsSizeList.append( new FmDriveDetailsSize( FmDriveDetailsSize::ETypeMidpJava, midpJavaSize ) ); detailsSizeList.append( new FmDriveDetailsSize( FmDriveDetailsSize::ETypeNativeApps, nativeAppsSize ) ); detailsSizeList.append( new FmDriveDetailsSize( FmDriveDetailsSize::ETypeVideos, videoSize ) ); detailsSizeList.append( new FmDriveDetailsSize( FmDriveDetailsSize::ETypeDocuments, documentsSize ) ); return FmErrNone; }
bool Writer::prepareExecuting() { if(!media::Writer::prepareExecuting()) return false; m_FFMpegBuffer.resize(8192); m_FFMpegIOContext = ::avio_alloc_context( m_FFMpegBuffer.data(), m_FFMpegBuffer.size(), 1, this, &Writer::_ffmpegReadPacket, &Writer::_ffmpegWritePacket, &Writer::_ffmpegSeek ); if(!m_FFMpegIOContext) { m_FFMpegBuffer.resize(0); return false; } if(::avformat_alloc_output_context2(&m_FFMpegFormatCtx, m_FFMpegOutputFormat, 0, 0) < 0) { ::av_freep(&m_FFMpegIOContext); m_FFMpegIOContext = 0; m_FFMpegBuffer.resize(0); return false; } if(m_ContainerFormat == kContainerFormatMPEG4) { //FFMpeg::av_opt_set_flag(m_FFMpegFormatCtx->priv_data, "frag_keyframe", "movflags"); } m_FFMpegFormatCtx->pb = m_FFMpegIOContext; //m_FFMpegFormatCtx->flags |= AVFMT_FLAG_CUSTOM_IO; ::AVCodecID srcCodecId = FFMpeg::avCodecIdFromVideoSampleKind(m_VideoInputPort.getSampleType().videoKind()); m_FFMpegVideoStream = ::avformat_new_stream(m_FFMpegFormatCtx, ::avcodec_find_encoder(srcCodecId)); if(!m_FFMpegVideoStream) { ::avformat_free_context(m_FFMpegFormatCtx); m_FFMpegFormatCtx = 0; ::av_freep(&m_FFMpegIOContext); m_FFMpegIOContext = 0; m_FFMpegBuffer.resize(0); return false; } m_FFMpegVideoStream->id = m_FFMpegFormatCtx->nb_streams-1; ::AVCodecContext *codecCtx = m_FFMpegVideoStream->codec; //codecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER; Vec2i const &videoSize = m_VideoInputPort.getVideoSampleSize(); float const videoSampleRate = m_VideoInputPort.getSampleRate(); codecCtx->codec_id = srcCodecId; codecCtx->codec_type = AVMEDIA_TYPE_VIDEO; codecCtx->width = videoSize(0); codecCtx->height = videoSize(1); codecCtx->time_base = (AVRational){1,static_cast<int>(videoSampleRate)}; codecCtx->bit_rate = 400000; codecCtx->gop_size = 5; codecCtx->max_b_frames = 1; codecCtx->profile = FF_PROFILE_H264_HIGH; codecCtx->pix_fmt = AV_PIX_FMT_YUV420P; //codecCtx->level = 41; codecCtx->sample_aspect_ratio = m_FFMpegVideoStream->sample_aspect_ratio; m_FFMpegVideoStream->time_base = codecCtx->time_base; m_FFMpegVideoStream->r_frame_rate = ::av_inv_q(codecCtx->time_base); m_FFMpegVideoStream->start_time = AV_NOPTS_VALUE; m_TestFile.open("test.mp4"); ::avformat_write_header(m_FFMpegFormatCtx, 0); return true; }