Пример #1
0
void VideoPlayer::writeVideoInfo(const Common::String &file, int16 varX, int16 varY,
		int16 varFrames, int16 varWidth, int16 varHeight) {

	Properties properties;

	int slot = openVideo(false, file, properties);
	if (slot >= 0) {
		Video &video = _videoSlots[slot];

		int16 x = -1, y = -1, width = -1, height = -1;

		x      = video.decoder->getDefaultX();
		y      = video.decoder->getDefaultY();
		width  = video.decoder->getWidth();
		height = video.decoder->getHeight();

		if (VAR_OFFSET(varX) == 0xFFFFFFFF)
			video.decoder->getFrameCoords(1, x, y, width, height);

		WRITE_VAR_OFFSET(varX     , x);
		WRITE_VAR_OFFSET(varY     , y);
		WRITE_VAR_OFFSET(varFrames, video.decoder->getFrameCount());
		WRITE_VAR_OFFSET(varWidth , width);
		WRITE_VAR_OFFSET(varHeight, height);

		closeVideo(slot);

	} else {
		WRITE_VAR_OFFSET(varX     , (uint32) -1);
		WRITE_VAR_OFFSET(varY     , (uint32) -1);
		WRITE_VAR_OFFSET(varFrames, (uint32) -1);
		WRITE_VAR_OFFSET(varWidth , (uint32) -1);
		WRITE_VAR_OFFSET(varHeight, (uint32) -1);
	}
}
Пример #2
0
MainWindow::MainWindow (QWidget *parent)
    : QMainWindow (parent)
    , ui (new Ui::MainWindow)
{
    // Create the UI.
    ui->setupUi (this);

    // This is required for SDL embeddding.
    ui->centralWidget->setAttribute (Qt::WA_NativeWindow);

    // These use the icon theme on Linux, with fallbacks to the icons specified in QtDesigner for other platforms.
    ui->actionOpen->setIcon (QIcon::fromTheme ("document-open", ui->actionOpen->icon ()));
    ui->actionPlay->setIcon (QIcon::fromTheme ("media-playback-start", ui->actionPlay->icon ()));
    ui->actionPause->setIcon (QIcon::fromTheme ("media-playback-pause", ui->actionPause->icon ()));

    // Connect UI signals.
    connect (ui->actionOpen, SIGNAL(triggered()), this, SLOT(openVideo()));
    connect (ui->actionPlay, SIGNAL(triggered()), this, SLOT(play()));
    connect (ui->actionPause, SIGNAL(triggered()), this, SLOT(pause()));

    // Create MLT controller and connect its signals.
    mlt = new MltController (ui->centralWidget);
    connect (mlt, SIGNAL(frameReceived (void*, unsigned)), this, SLOT(onShowFrame (void*, unsigned)));
#ifdef Q_WS_MAC
    gl = new GLWidget (this);
    QVBoxLayout *layout = new QVBoxLayout;
    layout->addWidget (gl);
    layout->setMargin (0);
    ui->centralWidget->setLayout (layout);
    connect (this, SIGNAL (showImageSignal (QImage)), gl, SLOT (showImage(QImage)));
#endif
}
Пример #3
0
void VideoPlayer::setSource(QString str)
{
    stop();
    char ch[1024];
    strcpy(ch,(const char*)str.toLocal8Bit());
    if (openVideo(ch))
    {
        currenttime = 0;
        av_read_frame(pFormatCtx, &nextPacket);
        if (curType == VideoType)
        {

            decodeVideoThread->setAVCodecContext(pCodecCtx);
            decodeVideoThread->setMutex(decodeVideoMutex);

            decodeVideoThread_2->setAVCodecContext(pCodecCtx);
            decodeVideoThread_2->setMutex(decodeVideoMutex);

            decodeVideoThread_3->setAVCodecContext(pCodecCtx);
            decodeVideoThread_3->setMutex(decodeVideoMutex);

        }
    }
    else
    {
        fprintf(stderr,"open %s erro!\n",ch);
    }
}
void matlabOpenVideo(char *filename) {
  pFormatCtx = openVideo(filename);
  videoStream = firstVideoStream(pFormatCtx);
  pCodecCtx = getCodec(pFormatCtx,videoStream);
  pFrame = avcodec_alloc_frame();
  pFrameRGB24 = avcodec_alloc_frame();
  if(!pFrameRGB24 || !pFrame) {
    mexErrMsgTxt("error: Can't allocate frame!");
  }
  buffer = (uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_RGB24,
						   pCodecCtx->width,
						   pCodecCtx->height) *
				sizeof(uint8_t));
  avpicture_fill((AVPicture *)pFrameRGB24, buffer, PIX_FMT_RGB24,
		 pCodecCtx->width, pCodecCtx->height);
  img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
				   pCodecCtx->pix_fmt,
				   pCodecCtx->width, pCodecCtx->height,
				   PIX_FMT_RGB24, SWS_BICUBIC,
				   NULL, NULL, NULL);
  videoFinished = 0;
  frame = 0;
  av_init_packet(&packet);
  matlabNextFrame();
}
Пример #5
0
void Widget::initVideo(){
    bool sucesso;
    for(int i = 0; i < 10; i++)
    {
        sucesso = openVideo(i);
        if(sucesso)
            ui->selectVideoComboBox->addItem(QString("Câmera") + QString::number(i));
    }
    for(int i = 0; i < 10; i++)
    {
        sucesso = openVideo(i);
        if(sucesso)
            break;
    }
    if(sucesso == false)
    {
        reportBad("Nenhum vídeo de 0 à 10 pode ser inicializado!");
        this->clock->stop();
    }
}
Пример #6
0
void FFMPEGInvoker::send(const SendRequest& req) {
    SendRequest reqCopy = req;

    if (iequals(req.name, "render.start")) {
        // create a new encoding context
        int ret;
        EncodingContext* ctx = new EncodingContext();
        tthread::lock_guard<tthread::recursive_mutex> lock(ctx->mutex);

        std::string context;
        Event::getParam(req.params, "context", context);

        ctx->extension = "mpeg";
        Event::getParam(req.params, "format", ctx->extension);

        Event::getParam(req.params, "width", ctx->width);
        Event::getParam(req.params, "height", ctx->height);

        if (!ctx->width || !ctx->height)
            return;

        ctx->filename = URL::getTmpFilename();

        /* allocate the output media context */
        avformat_alloc_output_context2(&ctx->formatCtx, NULL, ctx->extension.c_str(), ctx->filename.c_str());
        if (!ctx->formatCtx) {
            printf("Could not deduce output format from file extension: using MPEG.\n");
            avformat_alloc_output_context2(&ctx->formatCtx, NULL, "mpeg", ctx->filename.c_str());
        }
        if (!ctx->formatCtx) {
            return;
        }
        ctx->format = ctx->formatCtx->oformat;

        /* Add the audio and video streams using the default format codecs
         * and initialize the codecs. */
        ctx->videoStream = NULL;

        if (ctx->format->video_codec != AV_CODEC_ID_NONE) {
            ctx->videoStream = addStream(ctx, ctx->formatCtx, &ctx->videoCodec, ctx->format->video_codec);
        }

        /* Now that all the parameters are set, we can open the audio and
         * video codecs and allocate the necessary encode buffers. */
        if (ctx->videoStream)
            openVideo(ctx, ctx->formatCtx, ctx->videoCodec, ctx->videoStream);

        /* open the output file, if needed */
        if (!(ctx->format->flags & AVFMT_NOFILE)) {
            ret = avio_open(&ctx->formatCtx->pb, ctx->filename.c_str(), AVIO_FLAG_WRITE);
            if (ret < 0) {
                // fprintf(stderr, "Could not open '%s': %s\n", ctx->filename.c_str(),
                //        av_err2str(ret));
                return;
            }
        }

        /* Write the stream header, if any. */
        ret = avformat_write_header(ctx->formatCtx, NULL);
        if (ret < 0) {
            // fprintf(stderr, "Error occurred when opening output file: %s\n",
            //        av_err2str(ret));
            return;
        }

        if (ctx->frame)
            ctx->frame->pts = 0;

        _encoders[context] = ctx;
    } else if(iequals(req.name, "render.frame")) {
        _workQueue.push(req);
    } else if(iequals(req.name, "render.end")) {
        _workQueue.push(req);
    }
}
Пример #7
0
void Widget::on_selectVideoComboBox_currentIndexChanged(int index)
{
    openVideo(index);
}
Пример #8
0
int VideoPlayer::openVideo(bool primary, const Common::String &file, Properties &properties) {
	int slot = 0;

	Video *video = 0;
	if (!primary) {
		slot = getNextFreeSlot();
		if (slot < 0) {
			warning("VideoPlayer::openVideo(): Can't open video \"%s\": No free slot", file.c_str());
			return -1;
		}

		video = &_videoSlots[slot];
	} else
		video = &_videoSlots[0];

	// Different video already in the slot => close that video
	if (!video->isEmpty() && (video->fileName.compareToIgnoreCase(file) != 0))
		video->close();

	// No video => load the requested file
	if (video->isEmpty()) {
		// Open the video
		if (!(video->decoder = openVideo(file, properties)))
			return -1;

		if (video->decoder->hasVideo() && !(properties.flags & kFlagNoVideo) &&
		    (video->decoder->isPaletted() != !_vm->isTrueColor())) {
			if (!properties.switchColorMode)
				return -1;

			_vm->setTrueColor(!video->decoder->isPaletted());

			video->decoder->colorModeChanged();
		}

		// Set the filename
		video->fileName = file;

		// WORKAROUND: In some rare cases, the cursor should still be
		// displayed while a video is playing.
		_noCursorSwitch = false;
		if (primary && (_vm->getGameType() == kGameTypeLostInTime)) {
			if (!file.compareToIgnoreCase("PORTA03") ||
			    !file.compareToIgnoreCase("PORTA03A") ||
			    !file.compareToIgnoreCase("CALE1") ||
			    !file.compareToIgnoreCase("AMIL2") ||
			    !file.compareToIgnoreCase("AMIL3B") ||
			    !file.compareToIgnoreCase("DELB"))
				_noCursorSwitch = true;
		}

		// WORKAROUND: In Woodruff, Coh Cott vanished in one video on her party.
		// This is a bug in video, so we work around it.
		_woodruffCohCottWorkaround = false;
		if (primary && (_vm->getGameType() == kGameTypeWoodruff)) {
			if (!file.compareToIgnoreCase("SQ32-03"))
				_woodruffCohCottWorkaround = true;
		}

		if (!(properties.flags & kFlagNoVideo) && (properties.sprite >= 0)) {
			bool ownSurf    = (properties.sprite != Draw::kFrontSurface) && (properties.sprite != Draw::kBackSurface);
			bool screenSize = properties.flags & kFlagScreenSurface;

			if (ownSurf) {
				_vm->_draw->_spritesArray[properties.sprite] =
					_vm->_video->initSurfDesc(screenSize ? _vm->_width  : video->decoder->getWidth(),
					                          screenSize ? _vm->_height : video->decoder->getHeight(), 0);
			}

			if (!_vm->_draw->_spritesArray[properties.sprite] &&
			    (properties.sprite != Draw::kFrontSurface) &&
			    (properties.sprite != Draw::kBackSurface)) {
				properties.sprite = -1;
				video->surface.reset();
				video->decoder->setSurfaceMemory();
				properties.x = properties.y = 0;
			} else {
				video->surface = _vm->_draw->_spritesArray[properties.sprite];
				if (properties.sprite == Draw::kFrontSurface)
					video->surface = _vm->_draw->_frontSurface;
				if (properties.sprite == Draw::kBackSurface)
					video->surface = _vm->_draw->_backSurface;

				video->decoder->setSurfaceMemory(video->surface->getData(),
						video->surface->getWidth(), video->surface->getHeight(), video->surface->getBPP());

				if (!ownSurf || (ownSurf && screenSize)) {
					if ((properties.x >= 0) || (properties.y >= 0)) {
						properties.x = (properties.x < 0) ? 0xFFFF : properties.x;
						properties.y = (properties.y < 0) ? 0xFFFF : properties.y;
					} else
						properties.x = properties.y = -1;
				} else
					properties.x = properties.y = 0;
			}

		} else {
			properties.sprite = -1;
			video->surface.reset();
			video->decoder->setSurfaceMemory();
			properties.x = properties.y = 0;
		}
	}

	video->decoder->setXY(properties.x, properties.y);

	if (primary)
		_needBlit = (properties.flags & kFlagUseBackSurfaceContent) && (properties.sprite == Draw::kFrontSurface);

	properties.hasSound = video->decoder->hasSound();

	if (!video->decoder->hasSound())
		video->decoder->setFrameRate(_vm->_util->getFrameRate());

	WRITE_VAR(7, video->decoder->getFrameCount());

	return slot;
}
Пример #9
0
    bool VideoEncoder::init(const Desc& desc)
    {
        // Register the codecs
        av_register_all();

        // create the output context
        avformat_alloc_output_context2(&mpOutputContext, nullptr, nullptr, mFilename.c_str());
        if(mpOutputContext == nullptr)
        {
            // The sample tries again, while explicitly requesting mpeg format. I chose not to do it, since it might lead to a container with a wrong extension
            return error(mFilename, "File output format not recognized. Make sure you use a known file extension (avi/mpeg/mp4)");
        }

        // Get the output format of the container
        AVOutputFormat* pOutputFormat = mpOutputContext->oformat;
        assert((pOutputFormat->flags & AVFMT_NOFILE) == 0); // Problem. We want a file.

        // create the video codec
        AVCodec* pVideoCodec;
        mpOutputStream = createVideoStream(mpOutputContext, desc.fps, getCodecID(desc.codec), mFilename, pVideoCodec);
        if(mpOutputStream == nullptr)
        {
            return false;
        }

        mpCodecContext = createCodecContext(mpOutputContext, desc.width, desc.height, desc.fps, desc.bitrateMbps, desc.gopSize, getCodecID(desc.codec), pVideoCodec);
        if(mpCodecContext == nullptr)
        {
            return false;
        }

        // Open the video stream
        if(openVideo(pVideoCodec, mpCodecContext, mpFrame, mFilename) == false)
        {
            return false;
        }

        // copy the stream parameters to the muxer
        if(avcodec_parameters_from_context(mpOutputStream->codecpar, mpCodecContext) < 0)
        {
            return error(desc.filename, "Could not copy the stream parameters\n");
        }

        av_dump_format(mpOutputContext, 0, mFilename.c_str(), 1);

        // Open the output file
        assert((pOutputFormat->flags & AVFMT_NOFILE) == 0); // No output file required. Not sure if/when this happens.
        if(avio_open(&mpOutputContext->pb, mFilename.c_str(), AVIO_FLAG_WRITE) < 0)
        {
            return error(mFilename, "Can't open output file.");
        }

        // Write the stream header
        if(avformat_write_header(mpOutputContext, nullptr) < 0)
        {
            return error(mFilename, "Can't write file header.");
        }

        mFormat = desc.format;
        mRowPitch = getFormatBytesPerBlock(desc.format) * desc.width;
        if(desc.flipY)
        {
            mpFlippedImage = new uint8_t[desc.height * mRowPitch];
        }

        mpSwsContext = sws_getContext(desc.width, desc.height, getPictureFormatFromFalcorFormat(desc.format), desc.width, desc.height, mpCodecContext->pix_fmt, SWS_POINT, nullptr, nullptr, nullptr);
        if(mpSwsContext == nullptr)
        {
            return error(mFilename, "Failed to allocate SWScale context");
        }
        return true;
    }
Пример #10
0
void FFMpegManager::create(const QString &filePath, int formatId, const QStringList &paths, const QSize &size, int fps)
{
#ifdef HAVE_FFMPEG
	
	AVOutputFormat *fmt = guess_format(0, filePath.toLatin1().data(), 0);
	
	if ( !fmt )
	{
		fmt = guess_format("mpeg", NULL, NULL);
	}
	
// 	AVFormatParameters params, *ap = &params;
	
	switch(formatId)
	{
		case ExportInterface::ASF:
		{
			
		}
		break;
		case ExportInterface::AVI:
		{
			fmt->video_codec = CODEC_ID_MSMPEG4V3;
// 			video_st->codec.codec_tag = 0;
		}
		break;
		case ExportInterface::MOV:
		{
			
		}
		break;
		case ExportInterface::MPEG:
		{
		}
		break;
		case ExportInterface::RM:
		{
			
		}
		break;
		case ExportInterface::SWF:
		{
			
		}
		break;
		case ExportInterface::GIF:
		{
// 			AVImageFormat *imageFormat = guess_image_format(filePath.toLatin1().data());
// 			
// 			memset(ap, 0, sizeof(*ap));
// 			ap->image_format = imageFormat;
		}
		break;
		default: break;
	}
	
	AVFormatContext *oc = av_alloc_format_context();
	if ( !oc )
	{
		dError() << "Error while export";
		return;
	}

	
	oc->oformat = fmt;
	snprintf(oc->filename, sizeof(oc->filename), "%s", filePath.toLatin1().data());
	
	AVStream *video_st = addVideoStream(oc, fmt->video_codec, size.width(), size.height(), fps);
	
	if ( !video_st )
	{
		dError() << "Can't add video stream";
		return;
	}
	
	if (av_set_parameters(oc, 0) < 0)
	{
		dError() << "Invalid output format parameters";
		return ;
	}
	
	dump_format(oc, 0, filePath.toLatin1().data(), 1);
	
	if (!openVideo(oc, video_st) )
	{
		dError() << "Can't open video";
		return;
	}
	
	if (!(fmt->flags & AVFMT_NOFILE))
	{
		if (url_fopen(&oc->pb, filePath.toLatin1().data(), URL_WRONLY) < 0) 
		{
			dError() << "Could not open " << filePath.toLatin1().data();
			return;
		}
	}
	
	av_write_header(oc);
	
	double video_pts = 0.0;
	
	foreach(QString imagePath, paths)
	{
		if (video_st)
		{
			video_pts = (double)video_st->pts.val * video_st->time_base.num / video_st->time_base.den;
		}
		else
		{
			video_pts = 0.0;
		}
		if (!video_st || video_pts >= m_streamDuration )
		{
			break;
		}
		
		if (! writeVideoFrame(imagePath, oc, video_st, fps) )
		{
			break;
		}
	}
	
	closeVideo(oc, video_st);
	av_write_trailer(oc);
	
	for(int i = 0; i < oc->nb_streams; i++)
	{
		av_freep(&oc->streams[i]);
	}
	
	if (!(fmt->flags & AVFMT_NOFILE)) 
	{
		/* close the output file */
		url_fclose(&oc->pb);
	}
	
	av_free(oc);
#endif
}
Пример #11
0
MainWindow::MainWindow()
    : QMainWindow(0)
    , ui(new Ui::MainWindow)
    , m_isKKeyPressed(false)
{
    // Create the UI.
    ui->setupUi(this);
#ifndef Q_WS_X11
    ui->mainToolBar->setToolButtonStyle(Qt::ToolButtonTextUnderIcon);
#endif
    setCorner(Qt::TopLeftCorner, Qt::LeftDockWidgetArea);
    setCorner(Qt::TopRightCorner, Qt::RightDockWidgetArea);
    setDockNestingEnabled(true);

    // These use the icon theme on Linux, with fallbacks to the icons specified in QtDesigner for other platforms.
    ui->actionOpen->setIcon(QIcon::fromTheme("document-open", ui->actionOpen->icon()));
    ui->actionSave->setIcon(QIcon::fromTheme("document-save", ui->actionSave->icon()));
    ui->actionEncode->setIcon(QIcon::fromTheme("media-record", ui->actionEncode->icon()));

    // Connect UI signals.
    connect(ui->actionOpen, SIGNAL(triggered()), this, SLOT(openVideo()));
    connect(ui->actionAbout_Qt, SIGNAL(triggered()), qApp, SLOT(aboutQt()));

    // Accept drag-n-drop of files.
    this->setAcceptDrops(true);

    // Add the player widget.
    QLayout* layout = new QVBoxLayout(ui->playerPage);
    layout->setObjectName("centralWidgetLayout");
    layout->setMargin(0);
    m_player = new Player(this);
    layout->addWidget(m_player);
    connect(this, SIGNAL(producerOpened()), m_player, SLOT(onProducerOpened()));
    connect(m_player, SIGNAL(showStatusMessage(QString)), this, SLOT(showStatusMessage(QString)));
    connect(m_player, SIGNAL(inChanged(int)), this, SLOT(onCutModified()));
    connect(m_player, SIGNAL(outChanged(int)), this, SLOT(onCutModified()));

    // Add the docks.
    m_propertiesDock = new QDockWidget(tr("Properties"));
    m_propertiesDock->hide();
    m_propertiesDock->setObjectName("propertiesDock");
    m_propertiesDock->setWindowIcon(QIcon((":/icons/icons/view-form.png")));
    m_propertiesDock->toggleViewAction()->setIcon(QIcon::fromTheme("view-form", m_propertiesDock->windowIcon()));
    addDockWidget(Qt::LeftDockWidgetArea, m_propertiesDock);
    ui->menuView->addAction(m_propertiesDock->toggleViewAction());
    ui->mainToolBar->addAction(m_propertiesDock->toggleViewAction());
    connect(m_propertiesDock->toggleViewAction(), SIGNAL(triggered(bool)), this, SLOT(onPropertiesDockTriggered(bool)));

    m_recentDock = new RecentDock(this);
    m_recentDock->hide();
    addDockWidget(Qt::LeftDockWidgetArea, m_recentDock);
    ui->menuView->addAction(m_recentDock->toggleViewAction());
    ui->mainToolBar->addAction(m_recentDock->toggleViewAction());
    connect(m_recentDock, SIGNAL(itemActivated(QString)), this, SLOT(open(QString)));
    connect(m_recentDock->toggleViewAction(), SIGNAL(triggered(bool)), this, SLOT(onRecentDockTriggered(bool)));

    m_playlistDock = new PlaylistDock(this);
    m_playlistDock->hide();
    addDockWidget(Qt::LeftDockWidgetArea, m_playlistDock);
    ui->menuView->addAction(m_playlistDock->toggleViewAction());
    ui->mainToolBar->addAction(m_playlistDock->toggleViewAction());
    connect(m_playlistDock->toggleViewAction(), SIGNAL(triggered(bool)), this, SLOT(onPlaylistDockTriggered(bool)));
    connect(m_playlistDock, SIGNAL(clipOpened(void*,int,int)), this, SLOT(openCut(void*, int, int)));
    connect(m_playlistDock, SIGNAL(itemActivated(int)), this, SLOT(seekPlaylist(int)));
    connect(m_playlistDock, SIGNAL(showStatusMessage(QString)), this, SLOT(showStatusMessage(QString)));
    connect(m_playlistDock->model(), SIGNAL(created()), this, SLOT(onPlaylistCreated()));
    connect(m_playlistDock->model(), SIGNAL(cleared()), this, SLOT(onPlaylistCleared()));
    connect(m_playlistDock->model(), SIGNAL(closed()), this, SLOT(onPlaylistClosed()));
    connect(m_playlistDock->model(), SIGNAL(modified()), this, SLOT(onPlaylistModified()));
    connect(m_playlistDock->model(), SIGNAL(loaded()), this, SLOT(updateMarkers()));
    connect(m_playlistDock->model(), SIGNAL(modified()), this, SLOT(updateMarkers()));

    tabifyDockWidget(m_recentDock, m_propertiesDock);
    tabifyDockWidget(m_propertiesDock, m_playlistDock);
    m_recentDock->raise();

    m_encodeDock = new EncodeDock(this);
    m_encodeDock->hide();
    addDockWidget(Qt::RightDockWidgetArea, m_encodeDock);
    ui->menuView->addAction(m_encodeDock->toggleViewAction());
    ui->mainToolBar->addAction(ui->actionEncode);
    connect(this, SIGNAL(producerOpened()), m_encodeDock, SLOT(onProducerOpened()));
    connect(m_encodeDock, SIGNAL(visibilityChanged(bool)), ui->actionEncode, SLOT(setChecked(bool)));
    connect(m_encodeDock, SIGNAL(captureStateChanged(bool)), m_player, SLOT(onCaptureStateChanged(bool)));
    connect(m_encodeDock, SIGNAL(captureStateChanged(bool)), m_propertiesDock, SLOT(setDisabled(bool)));
    connect(m_encodeDock, SIGNAL(captureStateChanged(bool)), m_recentDock, SLOT(setDisabled(bool)));
    connect(m_encodeDock, SIGNAL(captureStateChanged(bool)), ui->actionOpen, SLOT(setDisabled(bool)));
    connect(m_encodeDock, SIGNAL(captureStateChanged(bool)), ui->actionOpenOther, SLOT(setDisabled(bool)));
    connect(m_encodeDock, SIGNAL(captureStateChanged(bool)), ui->actionExit, SLOT(setDisabled(bool)));
    connect(m_encodeDock, SIGNAL(captureStateChanged(bool)), this, SLOT(onCaptureStateChanged(bool)));

    m_jobsDock = new JobsDock(this);
    m_jobsDock->hide();
    addDockWidget(Qt::RightDockWidgetArea, m_jobsDock);
    tabifyDockWidget(m_encodeDock, m_jobsDock);
    ui->menuView->addAction(m_jobsDock->toggleViewAction());
    connect(&JOBS, SIGNAL(jobAdded()), m_jobsDock, SLOT(show()));
    connect(&JOBS, SIGNAL(jobAdded()), m_jobsDock, SLOT(raise()));
    connect(m_jobsDock, SIGNAL(visibilityChanged(bool)), this, SLOT(onJobsVisibilityChanged(bool)));

    // Connect signals.
    connect(this, SIGNAL(producerOpened()), this, SLOT(onProducerOpened()));

    // connect video widget signals
#if defined(Q_WS_MAC) || defined(Q_WS_WIN)
    Mlt::GLWidget* videoWidget = (Mlt::GLWidget*) &(MLT);
    connect(videoWidget, SIGNAL(dragStarted()), m_playlistDock, SLOT(onPlayerDragStarted()));
    connect(videoWidget, SIGNAL(seekTo(int)), m_player, SLOT(seek(int)));
#else
    if (m_settings.value("player/opengl", true).toBool()) {
        Mlt::GLWidget* videoWidget = (Mlt::GLWidget*) &(MLT);
        connect(videoWidget, SIGNAL(dragStarted()), m_playlistDock, SLOT(onPlayerDragStarted()));
        connect(videoWidget, SIGNAL(seekTo(int)), m_player, SLOT(seek(int)));
    }
    else {
        Mlt::SDLWidget* videoWidget = (Mlt::SDLWidget*) &(MLT);
        connect(videoWidget, SIGNAL(dragStarted()), m_playlistDock, SLOT(onPlayerDragStarted()));
        connect(videoWidget, SIGNAL(seekTo(int)), m_player, SLOT(seek(int)));
    }
#endif

    readSettings();
    setFocus();
    setCurrentFile("");
}
Пример #12
0
// setup libAV related structs.
bool AV::setupAV() {

	ct.of = av_guess_format(NULL, "roxlu.flv", NULL);
	if(!ct.of) {
		printf("Cannot create flv AVOutputFormat\n");
		return false;
	}
	
	ct.c = avformat_alloc_context();
	if(!ct.c) {
		printf("Cannot allocate the AVFormatContext\n");
		return false;
	}
	ct.c->video_codec_id = CODEC_ID_H264;
	ct.c->debug = 3;
	ct.c->oformat = ct.of;

//	const char* output_filename = "tcp://127.0.0.1:6665";
//	const char* output_filename = "rtmp://gethinlewis.rtmphost.com";
	const char* output_filename = "rtmp://gethinlewis.rtmphost.com/event/_definst_";
//	const char* output_filename = "test.flv";
	snprintf(ct.c->filename, sizeof(ct.c->filename), "%s", output_filename);
	//ct.vs = addVideoStream(ct, ct.of->video_codec);

	printf("%d -- %d \n", CODEC_ID_H264, ct.of->video_codec);
	ct.of->video_codec = CODEC_ID_H264;
	ct.vs = addVideoStream(ct, ct.of->video_codec);
	if(!ct.vs) {
		printf("Cannot create video stream: %d.\n", ct.of->video_codec);
		return false;
	}
	
	if(!openVideo(ct)) {
		printf("Cannot open video stream.\n");
		return false;
	}
	
	//av_dict_set(&ct.c->metadata, "streamName", "video_test", 0);
	av_dict_set(&ct.c->metadata, "streamName", "livefeed", 0);
	
	if(use_audio) {
		bool use_mp3 = true;
		if(!use_mp3) {
			ct.asample_fmt = AV_SAMPLE_FMT_S16;
			ct.abit_rate = 64000;
			ct.asample_rate = 8000;
			ct.as = addAudioStream(ct, CODEC_ID_SPEEX);
		}
		else {
			ct.asample_fmt = AV_SAMPLE_FMT_S16;
			ct.abit_rate = 64000;
			ct.asample_rate = 44100;
			ct.as = addAudioStream(ct, CODEC_ID_MP3);
		}
		
		if(!ct.as) {
			printf("Cannot create audio stream.\n");
			return false;
		}
		
		if(!openAudio(ct)) {
			printf("Cannot open audio stream.\n");
			return false;
		}
	}
	
	av_dump_format(ct.c, 0, output_filename, 1);
	
	if(!(ct.of->flags & AVFMT_NOFILE)) {
		if(avio_open(&ct.c->pb, output_filename, AVIO_FLAG_WRITE) < 0) {
			printf("Cannot open: %s\n", output_filename);
			return false;
		}
	}
	avformat_write_header(ct.c, NULL);
	
	return true;
}