Beispiel #1
0
/*	FUNCTION:		ViewObject :: DragDrop
	ARGUMENTS:		none
	RETURN:			n/a
	DESCRIPTION:	Hook function called when user drops files in application window.
					Essentially, determine if file is image or video.	
*/
void ViewObject :: DragDrop(entry_ref *ref, float mouse_x, float mouse_y)
{
	//	Image file?
	BPath path(ref);
	BBitmap *bitmap = BTranslationUtils::GetBitmap(path.Path(), NULL);
	if (bitmap)
	{
		MediaSource *media = new MediaSource(this);
		GLCreateTexture(media, bitmap);
		if (!SurfaceUpdate(media, mouse_x, mouse_y))
			delete media;
		delete bitmap;
		return;
	}
	
	//	Video file
	Video *video = new Video(ref);
	if (video->GetStatus() == B_OK)
	{
		MediaSource *media = new MediaSource(this);
		GLCreateTexture(media, video->GetBitmap());
		media->SetVideo(video);
		if (SurfaceUpdate(media, mouse_x, mouse_y))
			video->Start();
		else
			delete media;
		return;
	}
	else
		delete video;
	
	printf("Unsupported file\n");
}
void MediaSourceRegistry::registerURL(SecurityOrigin*, const KURL& url, URLRegistrable* registrable)
{
    DCHECK_EQ(&registrable->registry(), this);
    DCHECK(isMainThread());

    MediaSource* source = static_cast<MediaSource*>(registrable);
    source->addedToRegistry();
    m_mediaSources.set(url.getString(), source);
}
void MediaSourceRegistry::registerURL(SecurityOrigin*, const URL& url, URLRegistrable* registrable)
{
    ASSERT(&registrable->registry() == this);
    ASSERT(isMainThread());

    MediaSource* source = static_cast<MediaSource*>(registrable);
    source->addedToRegistry();
    m_mediaSources.set(url.string(), source);
}
static void durationAttrSetter(v8::Local<v8::String> name, v8::Local<v8::Value> value, const v8::AccessorInfo& info)
{
    MediaSource* imp = V8MediaSource::toNative(info.Holder());
    double v = static_cast<double>(value->NumberValue());
    ExceptionCode ec = 0;
    imp->setDuration(v, ec);
    if (UNLIKELY(ec))
        setDOMException(ec, info.GetIsolate());
    return;
}
void MediaSourceRegistry::unregisterURL(const KURL& url)
{
    DCHECK(isMainThread());
    PersistentHeapHashMap<String, Member<MediaSource>>::iterator iter = m_mediaSources.find(url.getString());
    if (iter == m_mediaSources.end())
        return;

    MediaSource* source = iter->value;
    m_mediaSources.remove(iter);
    source->removedFromRegistry();
}
static v8::Handle<v8::Value> activeSourceBuffersAttrGetter(v8::Local<v8::String> name, const v8::AccessorInfo& info)
{
    MediaSource* imp = V8MediaSource::toNative(info.Holder());
    RefPtr<SourceBufferList> result = imp->activeSourceBuffers();
    v8::Handle<v8::Value> wrapper = result.get() ? v8::Handle<v8::Value>(DOMDataStore::getWrapper(result.get(), info.GetIsolate())) : v8Undefined();
    if (wrapper.IsEmpty()) {
        wrapper = toV8(result.get(), info.Holder(), info.GetIsolate());
        if (!wrapper.IsEmpty())
            V8DOMWrapper::setNamedHiddenReference(info.Holder(), "activeSourceBuffers", wrapper);
    }
    return wrapper;
}
static v8::Handle<v8::Value> endOfStreamCallback(const v8::Arguments& args)
{
    MediaSource* imp = V8MediaSource::toNative(args.Holder());
    ExceptionCode ec = 0;
    {
    V8TRYCATCH_FOR_V8STRINGRESOURCE(V8StringResource<>, error, MAYBE_MISSING_PARAMETER(args, 0, DefaultIsNullString));
    imp->endOfStream(error, ec);
    if (UNLIKELY(ec))
        goto fail;
    return v8Undefined();
    }
    fail:
    return setDOMException(ec, args.GetIsolate());
}
Boolean FramedSource::lookupByName(UsageEnvironment& env, char const* sourceName,
				   FramedSource*& resultSource) {
  resultSource = NULL; // unless we succeed

  MediaSource* source;
  if (!MediaSource::lookupByName(env, sourceName, source)) return False;

  if (!source->isFramedSource()) {
    env.setResultMsg(sourceName, " is not a framed source");
    return False;
  }

  resultSource = (FramedSource*)source;
  return True;
}
Beispiel #9
0
void MediaObject::setNextSource(const MediaSource &source)
{
    if (source.type() == MediaSource::Invalid &&
        source.type() == MediaSource::Empty)
        return;
    m_nextSource = source;
}
static v8::Handle<v8::Value> dispatchEventCallback(const v8::Arguments& args)
{
    if (args.Length() < 1)
        return throwNotEnoughArgumentsError(args.GetIsolate());
    MediaSource* imp = V8MediaSource::toNative(args.Holder());
    ExceptionCode ec = 0;
    {
    V8TRYCATCH(Event*, event, V8Event::HasInstance(MAYBE_MISSING_PARAMETER(args, 0, DefaultIsUndefined)) ? V8Event::toNative(v8::Handle<v8::Object>::Cast(MAYBE_MISSING_PARAMETER(args, 0, DefaultIsUndefined))) : 0);
    bool result = imp->dispatchEvent(event, ec);
    if (UNLIKELY(ec))
        goto fail;
    return v8Boolean(result, args.GetIsolate());
    }
    fail:
    return setDOMException(ec, args.GetIsolate());
}
Boolean LiveAMRAudioRTPSink::sourceIsCompatibleWithUs( MediaSource& source )
{
  // Our source must be an AMR audio source:
  if (!source.isAMRAudioSource()) return False;

  // Also, the source must be wideband iff we asked for this:
  LiveAMRAudioDeviceSource& amrSource = (LiveAMRAudioDeviceSource&)source;
  if ((amrSource.isWideband()^sourceIsWideband()) != 0) return False;

  // Also, the source must have the same number of channels that we
  // specified.  (It could, in principle, have more, but we don't
  // support that.)
  if (amrSource.numChannels() != numChannels()) return False;

  // Also, because in our current implementation we output only one
  // frame in each RTP packet, this means that for multi-channel audio,
  // each 'frame-block' will be split over multiple RTP packets, which
  // may violate the spec.  Warn about this:
  if (amrSource.numChannels() > 1) {
    envir() << "AMRAudioRTPSink: Warning: Input source has " << amrSource.numChannels()
      << " audio channels.  In the current implementation, the multi-frame frame-block will be split over multiple RTP packets\n";
  }

  return True;
}
static v8::Handle<v8::Value> addSourceBufferCallback(const v8::Arguments& args)
{
    if (args.Length() < 1)
        return throwNotEnoughArgumentsError(args.GetIsolate());
    MediaSource* imp = V8MediaSource::toNative(args.Holder());
    ExceptionCode ec = 0;
    {
    V8TRYCATCH_FOR_V8STRINGRESOURCE(V8StringResource<>, type, MAYBE_MISSING_PARAMETER(args, 0, DefaultIsUndefined));
    RefPtr<SourceBuffer> result = imp->addSourceBuffer(type, ec);
    if (UNLIKELY(ec))
        goto fail;
    return toV8(result.release(), args.Holder(), args.GetIsolate());
    }
    fail:
    return setDOMException(ec, args.GetIsolate());
}
void MediaObject::setCurrentSource(const MediaSource &newSource)
{
    K_D(MediaObject);
    if (!k_ptr->backendObject()) {
        d->mediaSource = newSource;
        return;
    }

    pDebug() << Q_FUNC_INFO << newSource.url();

    stop(); // first call stop as that often is the expected state
            // for setting a new URL

    d->mediaSource = newSource;
#ifndef QT_NO_PHONON_ABSTRACTMEDIASTREAM
    d->kiofallback = 0; // kiofallback auto-deletes
#endif //QT_NO_PHONON_ABSTRACTMEDIASTREAM

//X         if (url.scheme() == "http") {
//X             d->kiofallback = Platform::createMediaStream(url, this);
//X             if (d->kiofallback) {
//X                 ...
//X                 return;
//X             }
//X         }

#ifndef QT_NO_PHONON_ABSTRACTMEDIASTREAM
    if (d->mediaSource.type() == MediaSource::Stream) {
        Q_ASSERT(d->mediaSource.stream());
        d->mediaSource.stream()->d_func()->setMediaObjectPrivate(d);
    } 
#endif //QT_NO_PHONON_ABSTRACTMEDIASTREAM

    INTERFACE_CALL(setSource(d->mediaSource));
}
static v8::Handle<v8::Value> removeSourceBufferCallback(const v8::Arguments& args)
{
    if (args.Length() < 1)
        return throwNotEnoughArgumentsError(args.GetIsolate());
    MediaSource* imp = V8MediaSource::toNative(args.Holder());
    ExceptionCode ec = 0;
    {
    V8TRYCATCH(SourceBuffer*, buffer, V8SourceBuffer::HasInstance(MAYBE_MISSING_PARAMETER(args, 0, DefaultIsUndefined)) ? V8SourceBuffer::toNative(v8::Handle<v8::Object>::Cast(MAYBE_MISSING_PARAMETER(args, 0, DefaultIsUndefined))) : 0);
    imp->removeSourceBuffer(buffer, ec);
    if (UNLIKELY(ec))
        goto fail;
    return v8Undefined();
    }
    fail:
    return setDOMException(ec, args.GetIsolate());
}
void MediaSourceTest::testIODevice()
{
    const QByteArray data("0192380");
    QBuffer *buffer = new QBuffer;
    buffer->setData(data);
    buffer->open(QIODevice::ReadOnly);

    MediaSource a(buffer);
    QCOMPARE(a.type(), MediaSource::Stream);
    QCOMPARE(a.fileName(), QString());
    QCOMPARE(a.url(), QUrl());
    QCOMPARE(a.discType(), Phonon::NoDisc);
    QVERIFY(a.stream() != 0);
    QCOMPARE(a.deviceName(), QString());
    //QCOMPARE(a.audioCaptureDevice(), AudioCaptureDevice());
    //QCOMPARE(a.videoCaptureDevice(), VideoCaptureDevice());
    MediaSource b(a);
    MediaSource c;
    c = a;
    QCOMPARE(a, b);
    QCOMPARE(a, c);
    QCOMPARE(b, c);

    QCOMPARE(b.type(), MediaSource::Stream);
    QCOMPARE(b.fileName(), QString());
    QCOMPARE(b.url(), QUrl());
    QCOMPARE(b.discType(), Phonon::NoDisc);
    QVERIFY(b.stream() != 0);
    QCOMPARE(b.deviceName(), QString());
    //QCOMPARE(b.audioCaptureDevice(), AudioCaptureDevice());
    //QCOMPARE(b.videoCaptureDevice(), VideoCaptureDevice());

    QCOMPARE(c.type(), MediaSource::Stream);
    QCOMPARE(c.fileName(), QString());
    QCOMPARE(c.url(), QUrl());
    QCOMPARE(c.discType(), Phonon::NoDisc);
    QVERIFY(c.stream() != 0);
    QCOMPARE(c.deviceName(), QString());
    //QCOMPARE(c.audioCaptureDevice(), AudioCaptureDevice());
    //QCOMPARE(c.videoCaptureDevice(), VideoCaptureDevice());

    delete buffer;
    QCOMPARE(a.type(), MediaSource::Invalid);
    QCOMPARE(b.type(), MediaSource::Invalid);
    QCOMPARE(c.type(), MediaSource::Invalid);
    const AbstractMediaStream *null = 0;
    QCOMPARE(a.stream(), null);
    QCOMPARE(b.stream(), null);
    QCOMPARE(c.stream(), null);
}
/** Add a track to this Playlist instance. */
void Playlist::append(const MediaSource &m)
{
	// Resolve metaDatas from TagLib
	TagLib::FileRef f(m.fileName().toLocal8Bit().data());
	if (!f.isNull()) {
		sources.append(m);
		QString title(f.tag()->title().toCString());
		if (title.isEmpty()) {
			// Filename in a MediaSource doesn't handle cross-platform QDir::separator(), so '/' is hardcoded
			title = m.fileName().split('/').last();
		}

		// Then, construct a new row with correct informations
		QList<QTableWidgetItem *> widgetItems;
		QTableWidgetItem *trackItem = new QTableWidgetItem(QString::number(f.tag()->track()));
		QTableWidgetItem *titleItem = new QTableWidgetItem(title);
		QTableWidgetItem *albumItem = new QTableWidgetItem(f.tag()->album().toCString());
		QTableWidgetItem *lengthItem = new QTableWidgetItem(this->convertTrackLength(f.audioProperties()->length()));
		QTableWidgetItem *artistItem = new QTableWidgetItem(f.tag()->artist().toCString());
		QTableWidgetItem *ratingItem = new QTableWidgetItem("***");
		QTableWidgetItem *yearItem = new QTableWidgetItem(QString::number(f.tag()->year()));

		widgetItems << trackItem << titleItem << albumItem << lengthItem << artistItem << ratingItem << yearItem;

		int currentRow = rowCount();
		insertRow(currentRow);

		QFont font = Settings::getInstance()->font(Settings::PLAYLIST);
		for (int i=0; i < widgetItems.length(); i++) {
			QTableWidgetItem *item = widgetItems.at(i);
			item->setFlags(Qt::ItemIsSelectable | Qt::ItemIsEnabled);
			item->setFont(font);
			setItem(currentRow, i, item);
			QFontMetrics fm(font);
			setRowHeight(currentRow, fm.height());
		}

		trackItem->setTextAlignment(Qt::AlignCenter);
		lengthItem->setTextAlignment(Qt::AlignCenter);
		ratingItem->setTextAlignment(Qt::AlignCenter);
		yearItem->setTextAlignment(Qt::AlignCenter);
	}
}
Beispiel #17
0
/*
 * !reimp
 */
void MediaObject::setSource(const MediaSource &source)
{
    QMultiMap<QString, QString> ret;

    ret.insert(QLatin1String("ARTIST"), "Nokia Dude");
    ret.insert(QLatin1String("ALBUM"), "Sound of silence");
    ret.insert(QLatin1String("DATE"), "2009");

    m_error = Phonon::NoError;
    setState(Phonon::LoadingState);

    m_source = source;
    currentPos = 0;

    if((source.fileName().contains(".avi")) ||
       (source.fileName().contains(".mp4"))) {
        m_hasVideo = true;
        emit hasVideoChanged(m_hasVideo);
    }
    if(source.fileName().contains(".wav")) {
        QFile file(source.fileName());
        if (file.open(QIODevice::ReadOnly)) {
            int len = file.read((char*)&header, sizeof(CombinedHeader));
            if(len == sizeof(CombinedHeader)) {
                if(memcmp(&header.riff.descriptor.id, riffId, 4) != 0) {
                    // Not a valid wav file, to satisfy unit test for mediaobject
                    m_error = Phonon::FatalError;
                    //m_state = Phonon::ErrorState;
                    m_errorString = "Invalid wav file";
                    setState(Phonon::ErrorState);
                    file.close();
                    return;
                }
            }
            file.close();
        }
    }
    emit metaDataChanged(ret);
    emit currentSourceChanged(source);
    emit totalTimeChanged(m_totalTime);

    setState(Phonon::StoppedState);
}
void MediaSourceTest::testStream()
{
    AbstractMediaStream *stream = new Stream;

    MediaSource a(stream);
    QCOMPARE(a.type(), MediaSource::Stream);
    QCOMPARE(a.fileName(), QString());
    QCOMPARE(a.url(), QUrl());
    QCOMPARE(a.discType(), Phonon::NoDisc);
    QCOMPARE(a.stream(), stream);
    QCOMPARE(a.deviceName(), QString());
    //QCOMPARE(a.audioCaptureDevice(), AudioCaptureDevice());
    //QCOMPARE(a.videoCaptureDevice(), VideoCaptureDevice());
    MediaSource b(a);
    MediaSource c;
    c = a;
    QCOMPARE(a, b);
    QCOMPARE(a, c);
    QCOMPARE(b, c);

    QCOMPARE(b.type(), MediaSource::Stream);
    QCOMPARE(b.fileName(), QString());
    QCOMPARE(b.url(), QUrl());
    QCOMPARE(b.discType(), Phonon::NoDisc);
    QCOMPARE(b.stream(), stream);
    QCOMPARE(b.deviceName(), QString());
    //QCOMPARE(b.audioCaptureDevice(), AudioCaptureDevice());
    //QCOMPARE(b.videoCaptureDevice(), VideoCaptureDevice());

    QCOMPARE(c.type(), MediaSource::Stream);
    QCOMPARE(c.fileName(), QString());
    QCOMPARE(c.url(), QUrl());
    QCOMPARE(c.discType(), Phonon::NoDisc);
    QCOMPARE(c.stream(), stream);
    QCOMPARE(c.deviceName(), QString());
    //QCOMPARE(c.audioCaptureDevice(), AudioCaptureDevice());
    //QCOMPARE(c.videoCaptureDevice(), VideoCaptureDevice());

    delete stream;
    QCOMPARE(a.type(), MediaSource::Invalid);
    QCOMPARE(b.type(), MediaSource::Invalid);
    QCOMPARE(c.type(), MediaSource::Invalid);
    const AbstractMediaStream *null = 0;
    QCOMPARE(a.stream(), null);
    QCOMPARE(b.stream(), null);
    QCOMPARE(c.stream(), null);
}
void JSMediaSource::visitAdditionalChildren(SlotVisitor& visitor)
{
    MediaSource* mediaSource = static_cast<MediaSource*>(&impl());
    visitor.addOpaqueRoot(mediaSource);

    // -- end boiler plate code --

    // Mark SourceBufferList
    SourceBufferList* sourceBufferList = mediaSource->sourceBuffers();
    if (sourceBufferList) {
        visitor.addOpaqueRoot(sourceBufferList);

        // Mark SourceBuffer
        for (unsigned long i = 0, len = sourceBufferList->length(); i < len; i++) {
            SourceBuffer* sourceBuffer = sourceBufferList->item(i);
            if (sourceBuffer)
                visitor.addOpaqueRoot(sourceBuffer);
        }
    }
}
Beispiel #20
0
int main(int argc, char ** argv)
{
    if (argc < 3) {
        fprintf(stderr, "need at least 3 arguments.\n");
        exit(1);
    }

    av_register_all();

    MediaSource ms;
    MediaMuxer muxer;

    ms.open(argv[1], 0);
    muxer.open(argv[2], ms.getFmtCtx(), true);

    AVPacket pkt;
    AVFrame * frame = NULL;

    while (!ms.read(&pkt)) {
        AVMediaType type = ms.getMediaType(&pkt);
        int ret = ms.decode(frame, &pkt);
        if (ret < 0) {
            fprintf(stderr, "decode error.\n");
            exit(1);
        }

        if (ret == 0)
            continue;

        switch(type) {
            case AVMEDIA_TYPE_AUDIO:

                break;
            case AVMEDIA_TYPE_VIDEO:

                break;
            default:
                av_log(NULL, AV_LOG_INFO, "Unsupported packet type.\n");
                break;
        }
        muxer.write(frame->data, frame->nb_samples, type);
        if (pkt.data)
            av_free_packet(&pkt);
    }

    muxer.close();
    ms.close();

    return 0;
}
void DisplayScreen::display_screen(const MediaSource& mediaSrc) {
    // Convert the media sources frame into the screen matrix representation
    uInt8* pi_curr_frame_buffer = mediaSrc.currentFrameBuffer();
    int ind_i, ind_j;
    for (int i = 0; i < screen_width * screen_height; i++) {
        uInt8 v = pi_curr_frame_buffer[i];
        ind_i = i / screen_width;
        ind_j = i - (ind_i * screen_width);
        screen_matrix[ind_i][ind_j] = v;
    }

    // Give our handlers a chance to mess with the screen
    for (int i=handlers.size()-1; i>=0; --i) {
        handlers[i]->display_screen(screen_matrix, screen_width, screen_height);
    }
}
void MediaObject::setCurrentSource(const MediaSource &newSource)
{
    K_D(MediaObject);
    if (!k_ptr->backendObject()) {
        d->mediaSource = newSource;
        return;
    }

    pDebug() << Q_FUNC_INFO << newSource.url();

    stop(); // first call stop as that often is the expected state
            // for setting a new URL

    MediaSource::Type oldSourceType = d->mediaSource.type();
    d->mediaSource = newSource;
    d->kiofallback = 0; // kiofallback auto-deletes

//X         if (url.scheme() == "http") {
//X             d->kiofallback = Platform::createMediaStream(url, this);
//X             if (d->kiofallback) {
//X                 ...
//X                 return;
//X             }
//X         }

    if (d->mediaSource.type() == MediaSource::Stream) {
        Q_ASSERT(d->mediaSource.stream());
        d->mediaSource.stream()->d_func()->setMediaObjectPrivate(d);
    } else if (d->mediaSource.type() == MediaSource::Invalid) {
        pWarning() << "requested invalid MediaSource for the current source of MediaObject";
        return;
    }
    if (d->mediaSource.type() == MediaSource::Url && oldSourceType != MediaSource::Url) {
        disconnect(d->m_backendObject, SIGNAL(stateChanged(Phonon::State, Phonon::State)), this, SIGNAL(stateChanged(Phonon::State, Phonon::State)));
        connect(d->m_backendObject, SIGNAL(stateChanged(Phonon::State, Phonon::State)), this, SLOT(_k_stateChanged(Phonon::State, Phonon::State)));
    } else if (d->mediaSource.type() != MediaSource::Url && oldSourceType == MediaSource::Url) {
        disconnect(d->m_backendObject, SIGNAL(stateChanged(Phonon::State, Phonon::State)), this, SLOT(_k_stateChanged(Phonon::State, Phonon::State)));
        connect(d->m_backendObject, SIGNAL(stateChanged(Phonon::State, Phonon::State)), this, SIGNAL(stateChanged(Phonon::State, Phonon::State)));
    }
    INTERFACE_CALL(setSource(d->mediaSource));
}
void MediaSourceTest::testDiscType()
{
    for (int i = 0; i <= Phonon::Vcd; ++i) {
        Phonon::DiscType discType = static_cast<Phonon::DiscType>(i);
        AbstractMediaStream *stream = 0;

        MediaSource a(discType);

        QCOMPARE(a.type(), MediaSource::Disc);
        QCOMPARE(a.fileName(), QString());
        QCOMPARE(a.url(), QUrl());
        QCOMPARE(a.discType(), discType);
        QCOMPARE(a.stream(), stream);
        QCOMPARE(a.deviceName(), QString());
        //QCOMPARE(a.audioCaptureDevice(), AudioCaptureDevice());
        //QCOMPARE(a.videoCaptureDevice(), VideoCaptureDevice());
        MediaSource b(a);
        MediaSource c;
        c = a;
        QCOMPARE(a, b);
        QCOMPARE(a, c);
        QCOMPARE(b, c);

        QCOMPARE(b.type(), MediaSource::Disc);
        QCOMPARE(b.fileName(), QString());
        QCOMPARE(b.url(), QUrl());
        QCOMPARE(b.discType(), discType);
        QCOMPARE(b.stream(), stream);
        QCOMPARE(b.deviceName(), QString());
        //QCOMPARE(b.audioCaptureDevice(), AudioCaptureDevice());
        //QCOMPARE(b.videoCaptureDevice(), VideoCaptureDevice());

        QCOMPARE(c.type(), MediaSource::Disc);
        QCOMPARE(c.fileName(), QString());
        QCOMPARE(c.url(), QUrl());
        QCOMPARE(c.discType(), discType);
        QCOMPARE(c.stream(), stream);
        QCOMPARE(c.deviceName(), QString());
        //QCOMPARE(c.audioCaptureDevice(), AudioCaptureDevice());
        //QCOMPARE(c.videoCaptureDevice(), VideoCaptureDevice());
    }
}
void MediaSourceTest::testUrl()
{
    QUrl url("http://www.example.com/music.ogg");
    AbstractMediaStream *stream = 0;

    MediaSource a(url);
    QCOMPARE(a.type(), MediaSource::Url);
    QCOMPARE(a.fileName(), QString());
    QCOMPARE(a.url(), url);
    QCOMPARE(a.discType(), Phonon::NoDisc);
    QCOMPARE(a.stream(), stream);
    QCOMPARE(a.deviceName(), QString());
    //QCOMPARE(a.audioCaptureDevice(), AudioCaptureDevice());
    //QCOMPARE(a.videoCaptureDevice(), VideoCaptureDevice());
    MediaSource b(a);
    MediaSource c;
    c = a;
    QCOMPARE(a, b);
    QCOMPARE(a, c);
    QCOMPARE(b, c);

    QCOMPARE(b.type(), MediaSource::Url);
    QCOMPARE(b.fileName(), QString());
    QCOMPARE(b.url(), url);
    QCOMPARE(b.discType(), Phonon::NoDisc);
    QCOMPARE(b.stream(), stream);
    QCOMPARE(b.deviceName(), QString());
    //QCOMPARE(b.audioCaptureDevice(), AudioCaptureDevice());
    //QCOMPARE(b.videoCaptureDevice(), VideoCaptureDevice());

    QCOMPARE(c.type(), MediaSource::Url);
    QCOMPARE(c.fileName(), QString());
    QCOMPARE(c.url(), url);
    QCOMPARE(c.discType(), Phonon::NoDisc);
    QCOMPARE(c.stream(), stream);
    QCOMPARE(c.deviceName(), QString());
    //QCOMPARE(c.audioCaptureDevice(), AudioCaptureDevice());
    //QCOMPARE(c.videoCaptureDevice(), VideoCaptureDevice());
}
void MediaSourceTest::testQtResource()
{
    const QString filename(":/ogg/zero.ogg");
    MediaSource a(filename);
    QCOMPARE(a.type(), MediaSource::Stream);
    QCOMPARE(a.fileName(), QString());
    QCOMPARE(a.url(), QUrl());
    QCOMPARE(a.discType(), Phonon::NoDisc);
    QVERIFY(a.stream() != 0);
    QCOMPARE(a.deviceName(), QString());
    //QCOMPARE(a.audioCaptureDevice(), AudioCaptureDevice());
    //QCOMPARE(a.videoCaptureDevice(), VideoCaptureDevice());
    MediaSource b(a);
    MediaSource c;
    c = a;
    QCOMPARE(a, b);
    QCOMPARE(a, c);
    QCOMPARE(b, c);

    QCOMPARE(b.type(), MediaSource::Stream);
    QCOMPARE(b.fileName(), QString());
    QCOMPARE(b.url(), QUrl());
    QCOMPARE(b.discType(), Phonon::NoDisc);
    QVERIFY(b.stream() != 0);
    QCOMPARE(b.deviceName(), QString());
    //QCOMPARE(b.audioCaptureDevice(), AudioCaptureDevice());
    //QCOMPARE(b.videoCaptureDevice(), VideoCaptureDevice());

    QCOMPARE(c.type(), MediaSource::Stream);
    QCOMPARE(c.fileName(), QString());
    QCOMPARE(c.url(), QUrl());
    QCOMPARE(c.discType(), Phonon::NoDisc);
    QVERIFY(c.stream() != 0);
    QCOMPARE(c.deviceName(), QString());
    //QCOMPARE(c.audioCaptureDevice(), AudioCaptureDevice());
    //QCOMPARE(c.videoCaptureDevice(), VideoCaptureDevice());
}
Beispiel #26
0
Boolean JPEGVideoRTPSink::sourceIsCompatibleWithUs(MediaSource& source) {
  return source.isJPEGVideoSource();
}
Beispiel #27
0
void VlcMediaWidget::play(const MediaSource &source)
{
	addPendingUpdates(PlaybackStatus | DvdMenu);
	QByteArray url = source.getUrl().toEncoded();
	playingDvd = false;

	switch (source.getType()) {
	case MediaSource::Url:
		if (url.endsWith(".iso")) {
			playingDvd = true;
		}

		break;
	case MediaSource::AudioCd:
		if (url.size() >= 7) {
			url.replace(0, 4, "cdda");
		} else {
			url = "cdda://";
		}

		break;
	case MediaSource::VideoCd:
		if (url.size() >= 7) {
			url.replace(0, 4, "vcd");
		} else {
			url = "vcd://";
		}

		break;
	case MediaSource::Dvd:
		if (url.size() >= 7) {
			url.replace(0, 4, "dvd");
		} else {
			url = "dvd://";
		}

		playingDvd = true;
		break;
	case MediaSource::Dvb:
		break;
	}

	libvlc_media_t *vlcMedia = libvlc_media_new_location(vlcInstance, url.constData());

	if (vlcMedia == NULL) {
		libvlc_media_player_stop(vlcMediaPlayer);
        Log("VlcMediaWidget::play: cannot create media") << source.getUrl().url();
		return;
	}

	libvlc_event_manager_t *eventManager = libvlc_media_event_manager(vlcMedia);
	libvlc_event_e eventTypes[] = { libvlc_MediaMetaChanged };

	for (uint i = 0; i < (sizeof(eventTypes) / sizeof(eventTypes[0])); ++i) {
		if (libvlc_event_attach(eventManager, eventTypes[i], vlcEventHandler, this) != 0) {
			Log("VlcMediaWidget::play: cannot attach event handler") << eventTypes[i];
		}
	}

	libvlc_media_player_set_media(vlcMediaPlayer, vlcMedia);
	libvlc_media_release(vlcMedia);

//	FIXME!

// 	if (source.subtitleUrl.isValid()) {
// 		if (libvlc_video_set_subtitle_file(vlcMediaPlayer,
// 		    source.subtitleUrl.toEncoded().constData()) == 0) {
// 			Log("VlcMediaWidget::play: cannot set subtitle file") <<
// 				source.subtitleUrl.prettyUrl();
// 		}
// 	}

	if (libvlc_media_player_play(vlcMediaPlayer) != 0) {
        Log("VlcMediaWidget::play: cannot play media") << source.getUrl().url();
	}
}
Boolean H264VideoRTPSink::sourceIsCompatibleWithUs(MediaSource& source) {
  // Our source must be an appropriate framer:
  return source.isH264VideoStreamFramer();
}
void MMF::MediaObject::createPlayer(const MediaSource &source)
{
    TRACE_CONTEXT(MediaObject::createPlayer, EAudioApi);
    TRACE_ENTRY("state %d source.type %d", state(), source.type());
    TRACE_ENTRY("source.type %d", source.type());

    MediaType mediaType = MediaTypeUnknown;

    AbstractPlayer* oldPlayer = m_player.data();

    const bool oldPlayerHasVideo = oldPlayer->hasVideo();
    const bool oldPlayerSeekable = oldPlayer->isSeekable();

    QString errorMessage;

    // Determine media type
    switch (source.type()) {
    case MediaSource::LocalFile:
        mediaType = fileMediaType(source.fileName());
        break;

    case MediaSource::Url:
        {
            const QUrl url(source.url());
            if (url.scheme() == QLatin1String("file")) {
                mediaType = fileMediaType(url.toLocalFile());
            }
            else {
                // Streaming playback is generally not supported by the implementation
                // of the audio player API, so we use CVideoPlayerUtility for both
                // audio and video streaming.
                mediaType = MediaTypeVideo;
            }
        }
        break;

    case MediaSource::Invalid:
    case MediaSource::Disc:
    case MediaSource::Stream:
        errorMessage = tr("Error opening source: type not supported");
        break;

    case MediaSource::Empty:
        TRACE_0("Empty media source");
        break;
    }

    if (oldPlayer)
        oldPlayer->close();

    AbstractPlayer* newPlayer = 0;

    // Construct newPlayer using oldPlayer (if not 0) in order to copy
    // parameters (volume, prefinishMark, transitionTime) which may have
    // been set on oldPlayer.

    switch (mediaType) {
    case MediaTypeUnknown:
        TRACE_0("Media type could not be determined");
        newPlayer = new DummyPlayer(oldPlayer);
        errorMessage = tr("Error opening source: media type could not be determined");
        break;

    case MediaTypeAudio:
        newPlayer = new AudioPlayer(this, oldPlayer);
        break;

    case MediaTypeVideo:
#ifdef PHONON_MMF_VIDEO_SURFACES
        newPlayer = SurfaceVideoPlayer::create(this, oldPlayer);
#else
        newPlayer = DsaVideoPlayer::create(this, oldPlayer);
#endif
        break;
    }

    if (oldPlayer)
        emit abstractPlayerChanged(0);
    m_player.reset(newPlayer);
    emit abstractPlayerChanged(newPlayer);

    if (oldPlayerHasVideo != hasVideo()) {
        emit hasVideoChanged(hasVideo());
    }

    if (oldPlayerSeekable != isSeekable()) {
        emit seekableChanged(isSeekable());
    }

    connect(m_player.data(), SIGNAL(totalTimeChanged(qint64)), SIGNAL(totalTimeChanged(qint64)));
    connect(m_player.data(), SIGNAL(stateChanged(Phonon::State,Phonon::State)), SIGNAL(stateChanged(Phonon::State,Phonon::State)));
    connect(m_player.data(), SIGNAL(finished()), SIGNAL(finished()));
    connect(m_player.data(), SIGNAL(bufferStatus(int)), SIGNAL(bufferStatus(int)));
    connect(m_player.data(), SIGNAL(metaDataChanged(QMultiMap<QString,QString>)), SIGNAL(metaDataChanged(QMultiMap<QString,QString>)));
    connect(m_player.data(), SIGNAL(aboutToFinish()), SIGNAL(aboutToFinish()));
    connect(m_player.data(), SIGNAL(prefinishMarkReached(qint32)), SIGNAL(prefinishMarkReached(qint32)));
    connect(m_player.data(), SIGNAL(prefinishMarkReached(qint32)), SLOT(handlePrefinishMarkReached(qint32)));
    connect(m_player.data(), SIGNAL(tick(qint64)), SIGNAL(tick(qint64)));

    // We need to call setError() after doing the connects, otherwise the
    // error won't be received.
    if (!errorMessage.isEmpty()) {
        Q_ASSERT(m_player);
        m_player->setError(errorMessage);
    }

    TRACE_EXIT_0();
}
void MPlayerMediaWidget::play(const MediaSource &source)
{
	resetState();
	QByteArray url = source.getUrl().toEncoded();

	switch (source.getType()) {
	case MediaSource::Url:
		if (url.endsWith(".iso")) {
			// FIXME use dvd://, dvdnav:// ?
			updateDvdMenu(true);
		}

		if (source.getUrl().isLocalFile()) {
			// mplayer can't deal with urls like "file:///tmp/te%20st.m2t"
			url = QFile::encodeName(source.getUrl().toLocalFile());
			url.replace(' ', "\\ ");
		}

		break;
	case MediaSource::AudioCd:
		if (url.size() >= 7) {
			// e.g. cdda:////dev/sr0
			url.replace(0, 5, "cdda:/");
		} else {
			url = "cdda://";
		}

		break;
	case MediaSource::VideoCd:
		if (url.size() >= 7) {
			// e.g. vcd:////dev/sr0
			url.replace(0, 5, "vcd:/");
		} else {
			url = "vcd://";
		}

		break;
	case MediaSource::Dvd:
		if (url.size() >= 7) {
			// e.g. dvdnav:////dev/sr0
			url.replace(0, 5, "dvdnav:/");
		} else {
			url = "dvdnav://";
		}

		updateDvdMenu(true);
		break;
	case MediaSource::Dvb:
		if (source.getUrl().isLocalFile()) {
			// mplayer can't deal with urls like "file:///tmp/te%20st.m2t"
			url = QFile::encodeName(source.getUrl().toLocalFile());
			url.replace(' ', "\\ ");
		}

		break;
	}

	updatePlaybackStatus(MediaWidget::Playing);
	updateSeekable(true);
	process.write("loadfile " + url + '\n');
	process.write("pausing_keep_force get_property path\n");
	sendCommand(SetDeinterlacing);
	sendCommand(SetVolume);
	timerId = startTimer(500);
}