static void com_media_ffmpeg_FFMpegPlayer_setDataSourceAndHeaders( JNIEnv *env, jobject thiz, jstring path, jobject headers) { MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return; } if (path == NULL) { jniThrowException(env, "java/lang/IllegalArgumentException", NULL); return; } const char *pathStr = env->GetStringUTFChars(path, NULL); if (pathStr == NULL) { // Out of memory jniThrowException(env, "java/lang/RuntimeException", "Out of memory"); return; } __android_log_print(ANDROID_LOG_INFO, TAG, "setDataSource: path %s", pathStr); status_t opStatus = mp->setDataSource(pathStr); __android_log_print(ANDROID_LOG_INFO, TAG, "opStatus---->0x%X", opStatus); // Make sure that local ref is released before a potential exception env->ReleaseStringUTFChars(path, pathStr); process_media_player_call( env, thiz, opStatus, "java/io/IOException", "setDataSource failed." ); }
LayoutSize RenderVideo::calculateIntrinsicSize() { HTMLVideoElement* video = videoElement(); // Spec text from 4.8.6 // // The intrinsic width of a video element's playback area is the intrinsic width // of the video resource, if that is available; otherwise it is the intrinsic // width of the poster frame, if that is available; otherwise it is 300 CSS pixels. // // The intrinsic height of a video element's playback area is the intrinsic height // of the video resource, if that is available; otherwise it is the intrinsic // height of the poster frame, if that is available; otherwise it is 150 CSS pixels. MediaPlayer* player = mediaElement()->player(); if (player && video->readyState() >= HTMLVideoElement::HAVE_METADATA) { LayoutSize size = player->naturalSize(); if (!size.isEmpty()) return size; } if (video->shouldDisplayPosterImage() && !m_cachedImageSize.isEmpty() && !imageResource()->errorOccurred()) return m_cachedImageSize; // <video> in standalone media documents should not use the default 300x150 // size since they also have audio-only files. By setting the intrinsic // size to 300x1 the video will resize itself in these cases, and audio will // have the correct height (it needs to be > 0 for controls to render properly). if (video->ownerDocument() && video->ownerDocument()->isMediaDocument()) return LayoutSize(defaultSize().width(), 1); return defaultSize(); }
static void vlcCallback(const libvlc_event_t *p_event, void *p_user_data) { if (p_event->type == libvlc_MediaPlayerEndReached) { assert(p_user_data); MediaPlayer *mp = (MediaPlayer *)p_user_data; mp->onStopped(); } }
void RenderVideo::paintReplaced(PaintInfo& paintInfo, const LayoutPoint& paintOffset) { MediaPlayer* mediaPlayer = mediaElement()->player(); bool displayingPoster = videoElement()->shouldDisplayPosterImage(); Page* page = 0; if (Frame* frame = this->frame()) page = frame->page(); if (!displayingPoster && !mediaPlayer) { if (page && paintInfo.phase == PaintPhaseForeground) page->addRelevantUnpaintedObject(this, visualOverflowRect()); return; } LayoutRect rect = videoBox(); if (rect.isEmpty()) { if (page && paintInfo.phase == PaintPhaseForeground) page->addRelevantUnpaintedObject(this, visualOverflowRect()); return; } rect.moveBy(paintOffset); if (page && paintInfo.phase == PaintPhaseForeground) page->addRelevantRepaintedObject(this, rect); if (displayingPoster) paintIntoRect(paintInfo.context, rect); else if (document()->view() && document()->view()->paintBehavior() & PaintBehaviorFlattenCompositingLayers) mediaPlayer->paintCurrentFrameInContext(paintInfo.context, pixelSnappedIntRect(rect)); else mediaPlayer->paint(paintInfo.context, pixelSnappedIntRect(rect)); }
BOOL CTeenSpiritDlg::OnCopyData(CWnd* pWnd, COPYDATASTRUCT* pCopyDataStruct) { if (pCopyDataStruct->dwData == COMMANDLINE_ARG) { static BOOL bEnqueue; if (pCopyDataStruct->cbData != 0) { LPCTSTR path = (LPCTSTR)pCopyDataStruct->lpData; if (_tcsicmp(path, _T("-q")) == 0) bEnqueue = TRUE; if (path[0] != '-' && path[0] !='\\') { PrgAPI* pAPI = PRGAPI(); MediaPlayer* pPlayer = pAPI->GetMediaPlayer(); MediaPlayListItem mpli; if (bEnqueue) pPlayer->Enqueue(path); else pPlayer->Play(path); bEnqueue = TRUE; } } else bEnqueue = FALSE; } return __super::OnCopyData(pWnd, pCopyDataStruct); }
void HTMLVideoElement::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& destRect) { MediaPlayer* player = HTMLMediaElement::player(); if (!player) return; player->paintCurrentFrameInContext(context, destRect); }
void RenderVideo::paintReplaced(PaintInfo& paintInfo, const LayoutPoint& paintOffset) { MediaPlayer* mediaPlayer = mediaElement()->player(); bool displayingPoster = videoElement()->shouldDisplayPosterImage(); if (!displayingPoster && !mediaPlayer) return; LayoutRect rect = videoBox(); if (rect.isEmpty()) return; rect.moveBy(paintOffset); LayoutRect contentRect = contentBoxRect(); contentRect.moveBy(paintOffset); GraphicsContext* context = paintInfo.context; bool clip = !contentRect.contains(rect); if (clip) { context->save(); context->clip(contentRect); } if (displayingPoster) paintIntoRect(context, rect); else if ((document().view() && document().view()->paintBehavior() & PaintBehaviorFlattenCompositingLayers) || !acceleratedRenderingInUse()) mediaPlayer->paint(context, pixelSnappedIntRect(rect)); if (clip) context->restore(); }
String MediaControlsHost::externalDeviceType() const { DEPRECATED_DEFINE_STATIC_LOCAL(String, none, (ASCIILiteral("none"))); String type = none; #if ENABLE(IOS_AIRPLAY) DEPRECATED_DEFINE_STATIC_LOCAL(String, airplay, (ASCIILiteral("airplay"))); DEPRECATED_DEFINE_STATIC_LOCAL(String, tvout, (ASCIILiteral("tvout"))); MediaPlayer* player = m_mediaElement->player(); if (!player) { LOG(Media, "MediaControlsHost::externalDeviceType - returning \"none\" because player is NULL"); return none; } switch (player->wirelessPlaybackTargetType()) { case MediaPlayer::TargetTypeNone: type = none; break; case MediaPlayer::TargetTypeAirPlay: type = airplay; break; case MediaPlayer::TargetTypeTVOut: type = tvout; break; } #endif LOG(Media, "MediaControlsHost::externalDeviceType - returning \"%s\"", type.utf8().data()); return type; }
// If exception is NULL and opStatus is not OK, this method sends an error // event to the client application; otherwise, if exception is not NULL and // opStatus is not OK, this method throws the given exception to the client // application. static void process_media_player_call(JNIEnv *env, jobject thiz, int opStatus, const char* exception, const char *message) { if (exception == NULL) { // Don't throw exception. Instead, send an event. if (opStatus != (int) OK) { MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp != 0) mp->notify(MEDIA_ERROR, opStatus, 0, 0); } } else { // Throw exception! if ( opStatus == (int) INVALID_OPERATION ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); } else if ( opStatus == (int) PERMISSION_DENIED ) { jniThrowException(env, "java/lang/SecurityException", NULL); } else if ( opStatus != (int) OK ) { if (strlen(message) > 230) { // if the message is too long, don't bother displaying the status code jniThrowException( env, exception, message); } else { char msg[256]; // append the status code to the message sprintf(msg, "%s: status=0x%X", message, opStatus); jniThrowException( env, exception, msg); } } } }
bool MediaElementSession::wirelessVideoPlaybackDisabled(const HTMLMediaElement& element) const { Settings* settings = element.document().settings(); if (!settings || !settings->allowsAirPlayForMediaPlayback()) { LOG(Media, "MediaElementSession::wirelessVideoPlaybackDisabled - returning TRUE because of settings"); return true; } if (element.fastHasAttribute(HTMLNames::webkitwirelessvideoplaybackdisabledAttr)) { LOG(Media, "MediaElementSession::wirelessVideoPlaybackDisabled - returning TRUE because of attribute"); return true; } #if PLATFORM(IOS) String legacyAirplayAttributeValue = element.fastGetAttribute(HTMLNames::webkitairplayAttr); if (equalLettersIgnoringASCIICase(legacyAirplayAttributeValue, "deny")) { LOG(Media, "MediaElementSession::wirelessVideoPlaybackDisabled - returning TRUE because of legacy attribute"); return true; } if (equalLettersIgnoringASCIICase(legacyAirplayAttributeValue, "allow")) { LOG(Media, "MediaElementSession::wirelessVideoPlaybackDisabled - returning FALSE because of legacy attribute"); return false; } #endif MediaPlayer* player = element.player(); if (!player) return true; bool disabled = player->wirelessVideoPlaybackDisabled(); LOG(Media, "MediaElementSession::wirelessVideoPlaybackDisabled - returning %s because media engine says so", disabled ? "TRUE" : "FALSE"); return disabled; }
bool RenderVideo::supportsAcceleratedRendering() const { MediaPlayer* p = mediaElement()->player(); if (p) return p->supportsAcceleratedRendering(); return false; }
void TrayToolTipDlg::UpdateSliders() { MediaPlayer* pMP = PRGAPI()->GetMediaPlayer(); INT volume = pMP->GetVolume(); INT pos = (INT)(pMP->GetMediaPos() * 1000); m_volumeSlider.SetPos(volume); m_positionSlider.SetPos(pos); m_positionSlider.SetMaxPos((INT)(pMP->GetMediaLength()*1000)); }
void HTMLVideoElement::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& destRect) { MediaPlayer* player = HTMLMediaElement::player(); if (!player) return; player->setVisible(true); // Make player visible or it won't draw. player->paintCurrentFrameInContext(context, destRect); }
static void wseemann_media_FFmpegMediaPlayer_attachAuxEffect(JNIEnv *env, jobject thiz, jint effectId) { __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "attachAuxEffect(): %d", effectId); MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return; } process_media_player_call( env, thiz, mp->attachAuxEffect(effectId), NULL, NULL ); }
static void wseemann_media_FFmpegMediaPlayer_set_audio_session_id(JNIEnv *env, jobject thiz, jint sessionId) { __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "set_session_id(): %d", sessionId); MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return; } process_media_player_call( env, thiz, mp->setAudioSessionId(sessionId), NULL, NULL ); }
static void com_media_ffmpeg_FFMpegPlayer_prepare(JNIEnv *env, jobject thiz) { MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return; } process_media_player_call( env, thiz, mp->prepare(), "java/io/IOException", "Prepare failed." ); }
static jint wseemann_media_FFmpegMediaPlayer_get_audio_session_id(JNIEnv *env, jobject thiz) { __android_log_write(ANDROID_LOG_VERBOSE, LOG_TAG, "get_session_id()"); MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return 0; } return mp->getAudioSessionId(); }
static void com_media_ffmpeg_FFMpegPlayer_reset(JNIEnv *env, jobject thiz) { MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return; } process_media_player_call( env, thiz, mp->reset(), NULL, NULL ); }
static void com_media_ffmpeg_FFMpegPlayer_setAudioStreamType(JNIEnv *env, jobject thiz, int streamtype) { MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return; } process_media_player_call( env, thiz, mp->setAudioStreamType(streamtype) , NULL, NULL ); }
static void wseemann_media_FFmpegMediaPlayer_setAuxEffectSendLevel(JNIEnv *env, jobject thiz, jfloat level) { __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "setAuxEffectSendLevel: level %f", level); MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return; } process_media_player_call( env, thiz, mp->setAuxEffectSendLevel(level), NULL, NULL ); }
static void wseemann_media_FFmpegMediaPlayer_setVolume(JNIEnv *env, jobject thiz, float leftVolume, float rightVolume) { __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "setVolume: left %f right %f", leftVolume, rightVolume); MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return; } process_media_player_call( env, thiz, mp->setVolume(leftVolume, rightVolume), NULL, NULL ); }
static jboolean wseemann_media_FFmpegMediaPlayer_isLooping(JNIEnv *env, jobject thiz) { __android_log_write(ANDROID_LOG_VERBOSE, LOG_TAG, "isLooping"); MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return false; } return mp->isLooping(); }
static void wseemann_media_FFmpegMediaPlayer_setLooping(JNIEnv *env, jobject thiz, jboolean looping) { __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "setLooping: %d", looping); MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return; } process_media_player_call( env, thiz, mp->setLooping(looping), NULL, NULL ); }
static void wseemann_media_FFmpegMediaPlayer_setAudioStreamType(JNIEnv *env, jobject thiz, int streamtype) { __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "setAudioStreamType: %d", streamtype); MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return; } process_media_player_call( env, thiz, mp->setAudioStreamType(streamtype) , NULL, NULL ); }
static void wseemann_media_FFmpegMediaPlayer_reset(JNIEnv *env, jobject thiz) { __android_log_write(ANDROID_LOG_VERBOSE, LOG_TAG, "reset"); MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return; } process_media_player_call( env, thiz, mp->reset(), NULL, NULL ); }
static void wseemann_media_FFmpegMediaPlayer_seekTo(JNIEnv *env, jobject thiz, int msec) { MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return; } __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "seekTo: %d(msec)", msec); process_media_player_call( env, thiz, mp->seekTo(msec), NULL, NULL ); }
MediaPlayer* CameraService::newMediaPlayer(const char *file) { MediaPlayer* mp = new MediaPlayer(); if (mp->setDataSource(file, NULL) == NO_ERROR) { mp->setAudioStreamType(AUDIO_STREAM_ENFORCED_AUDIBLE); mp->prepare(); } else { LOGE("Failed to load CameraService sounds: %s", file); return NULL; } return mp; }
static jboolean com_media_ffmpeg_FFMpegPlayer_isPlaying(JNIEnv *env, jobject thiz) { MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return false; } const jboolean is_playing = mp->isPlaying(); return is_playing; }
bool RenderMediaControls::paintMediaControlsPart(MediaControlElementType part, RenderObject* o, const RenderObject::PaintInfo& paintInfo, const IntRect& r) { ASSERT(SafariThemeLibrary()); switch (part) { case MediaFullscreenButton: paintThemePart(SafariTheme::MediaFullscreenButtonPart, paintInfo.context->platformContext(), r, NSRegularControlSize, determineState(o)); break; case MediaMuteButton: case MediaUnMuteButton: if (HTMLMediaElement* mediaElement = parentMediaElement(o)) paintThemePart(mediaElement->muted() ? SafariTheme::MediaUnMuteButtonPart : SafariTheme::MediaMuteButtonPart, paintInfo.context->platformContext(), r, NSRegularControlSize, determineState(o)); break; case MediaPauseButton: case MediaPlayButton: if (HTMLMediaElement* mediaElement = parentMediaElement(o)) paintThemePart(mediaElement->canPlay() ? SafariTheme::MediaPlayButtonPart : SafariTheme::MediaPauseButtonPart, paintInfo.context->platformContext(), r, NSRegularControlSize, determineState(o)); break; case MediaSeekBackButton: paintThemePart(SafariTheme::MediaSeekBackButtonPart, paintInfo.context->platformContext(), r, NSRegularControlSize, determineState(o)); break; case MediaSeekForwardButton: paintThemePart(SafariTheme::MediaSeekForwardButtonPart, paintInfo.context->platformContext(), r, NSRegularControlSize, determineState(o)); break; case MediaSlider: { HTMLMediaElement* mediaElement = parentMediaElement(o); if (!mediaElement) break; MediaPlayer* player = mediaElement->player(); float duration = player ? player->duration() : 0; float percentLoaded = duration ? player->maxTimeBuffered() /duration : 0; STPaintProgressIndicator(SafariTheme::MediaType, paintInfo.context->platformContext(), r, NSRegularControlSize, 0, percentLoaded); break; } case MediaSliderThumb: paintThemePart(SafariTheme::MediaSliderThumbPart, paintInfo.context->platformContext(), r, NSRegularControlSize, determineState(o)); break; case MediaTimelineContainer: ASSERT_NOT_REACHED(); break; case MediaCurrentTimeDisplay: ASSERT_NOT_REACHED(); break; case MediaTimeRemainingDisplay: ASSERT_NOT_REACHED(); break; case MediaControlsPanel: ASSERT_NOT_REACHED(); break; } return false; }
static jint com_media_ffmpeg_FFMpegPlayer_native_suspend_resume( JNIEnv *env, jobject thiz, jboolean isSuspend) { MediaPlayer* mp = getMediaPlayer(env, thiz); if (mp == NULL ) { jniThrowException(env, "java/lang/IllegalStateException", NULL); return UNKNOWN_ERROR; } return isSuspend ? mp->suspend() : mp->resume(); }