예제 #1
0
// AndroidBufferQueueItf callback for an audio player
XAresult AndroidBufferQueueCallback(
        XAAndroidBufferQueueItf caller,
        void *pCallbackContext,        /* input */
        void *pBufferContext,          /* input */
        void *pBufferData,             /* input */
        XAuint32 dataSize,             /* input */
        XAuint32 dataUsed,             /* input */
        const XAAndroidBufferItem *pItems,/* input */
        XAuint32 itemsLength           /* input */)
{
    XAresult res;
    int ok;

    // pCallbackContext was specified as NULL at RegisterCallback and is unused here
    assert(NULL == pCallbackContext);

    // note there is never any contention on this mutex unless a discontinuity request is active
    ok = pthread_mutex_lock(&mutex);
    assert(0 == ok);

    // was a discontinuity requested?
    if (discontinuity) {
        // FIXME sorry, can't rewind after EOS
        if (!reachedEof) {
            // clear the buffer queue
            res = (*playerBQItf)->Clear(playerBQItf);
            assert(XA_RESULT_SUCCESS == res);
            // rewind the data source so we are guaranteed to be at an appropriate point
            rewind(file);
            // Enqueue the initial buffers, with a discontinuity indicator on first buffer
            (void) enqueueInitialBuffers(JNI_TRUE);
        }
        // acknowledge the discontinuity request
        discontinuity = JNI_FALSE;
        ok = pthread_cond_signal(&cond);
        assert(0 == ok);
        goto exit;
    }

    if ((pBufferData == NULL) && (pBufferContext != NULL)) {
        const int processedCommand = *(int *)pBufferContext;
        if (kEosBufferCntxt == processedCommand) {
            ALOGV("EOS was processed\n");
            // our buffer with the EOS message has been consumed
            assert(0 == dataSize);
            goto exit;
        }
    }

    // pBufferData is a pointer to a buffer that we previously Enqueued
    assert(BUFFER_SIZE == dataSize);
    assert(dataCache <= (char *) pBufferData && (char *) pBufferData <
            &dataCache[BUFFER_SIZE * NB_BUFFERS]);
    assert(0 == (((char *) pBufferData - dataCache) % BUFFER_SIZE));

#if 0
    // sample code to use the XAVolumeItf
    XAAndroidBufferQueueState state;
    (*caller)->GetState(caller, &state);
    switch (state.index) {
    case 300:
        (*playerVolItf)->SetVolumeLevel(playerVolItf, -600); // -6dB
        ALOGV("setting volume to -6dB");
        break;
    case 400:
        (*playerVolItf)->SetVolumeLevel(playerVolItf, -1200); // -12dB
        ALOGV("setting volume to -12dB");
        break;
    case 500:
        (*playerVolItf)->SetVolumeLevel(playerVolItf, 0); // full volume
        ALOGV("setting volume to 0dB (full volume)");
        break;
    case 600:
        (*playerVolItf)->SetMute(playerVolItf, XA_BOOLEAN_TRUE); // mute
        ALOGV("muting player");
        break;
    case 700:
        (*playerVolItf)->SetMute(playerVolItf, XA_BOOLEAN_FALSE); // unmute
        ALOGV("unmuting player");
        break;
    case 800:
        (*playerVolItf)->SetStereoPosition(playerVolItf, -1000);
        (*playerVolItf)->EnableStereoPosition(playerVolItf, XA_BOOLEAN_TRUE);
        ALOGV("pan sound to the left (hard-left)");
        break;
    case 900:
        (*playerVolItf)->EnableStereoPosition(playerVolItf, XA_BOOLEAN_FALSE);
        ALOGV("disabling stereo position");
        break;
    default:
        break;
    }
#endif

    // don't bother trying to read more data once we've hit EOF
    if (reachedEof) {
        goto exit;
    }

    size_t nbRead;
    // note we do call fread from multiple threads, but never concurrently
    nbRead = fread(pBufferData, BUFFER_SIZE, 1, file);
    if (nbRead > 0) {
        assert(1 == nbRead);
        res = (*caller)->Enqueue(caller, NULL /*pBufferContext*/,
                pBufferData /*pData*/,
                nbRead * BUFFER_SIZE /*dataLength*/,
                NULL /*pMsg*/,
                0 /*msgLength*/);
        assert(XA_RESULT_SUCCESS == res);
    } else {
        // signal EOS
        XAAndroidBufferItem msgEos[1];
        msgEos[0].itemKey = XA_ANDROID_ITEMKEY_EOS;
        msgEos[0].itemSize = 0;
        // EOS message has no parameters, so the total size of the message is the size of the key
        //   plus the size if itemSize, both XAuint32
        res = (*caller)->Enqueue(caller, (void *)&kEosBufferCntxt /*pBufferContext*/,
                NULL /*pData*/, 0 /*dataLength*/,
                msgEos /*pMsg*/,
                // FIXME == sizeof(BufferItem)? */
                sizeof(XAuint32)*2 /*msgLength*/);
        assert(XA_RESULT_SUCCESS == res);
        reachedEof = JNI_TRUE;
    }

exit:
    ok = pthread_mutex_unlock(&mutex);
    assert(0 == ok);
    return XA_RESULT_SUCCESS;
}
예제 #2
0
// create streaming media player
jboolean Java_com_example_nativemedia_NativeMedia_createStreamingMediaPlayer(JNIEnv* env,
        jclass clazz, jstring filename)
{
    XAresult res;

    // convert Java string to UTF-8
    const char *utf8 = (*env)->GetStringUTFChars(env, filename, NULL);
    assert(NULL != utf8);

    // open the file to play
    file = fopen(utf8, "rb");
    if (file == NULL) {
        ALOGE("Failed to open %s", utf8);
        return JNI_FALSE;
    }

    // configure data source
    XADataLocator_AndroidBufferQueue loc_abq = { XA_DATALOCATOR_ANDROIDBUFFERQUEUE, NB_BUFFERS };
    XADataFormat_MIME format_mime = {
            XA_DATAFORMAT_MIME, XA_ANDROID_MIME_MP2TS, XA_CONTAINERTYPE_MPEG_TS };
    XADataSource dataSrc = {&loc_abq, &format_mime};

    // configure audio sink
    XADataLocator_OutputMix loc_outmix = { XA_DATALOCATOR_OUTPUTMIX, outputMixObject };
    XADataSink audioSnk = { &loc_outmix, NULL };

    // configure image video sink
    XADataLocator_NativeDisplay loc_nd = {
            XA_DATALOCATOR_NATIVEDISPLAY,        // locatorType
            // the video sink must be an ANativeWindow created from a Surface or SurfaceTextureClient
            (void*)theNativeWindow,              // hWindow
            // must be NULL
            NULL                                 // hDisplay
    };
    XADataSink imageVideoSink = {&loc_nd, NULL};

    // declare interfaces to use
    XAboolean     required[NB_MAXAL_INTERFACES]
                           = {XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE,           XA_BOOLEAN_TRUE};
    XAInterfaceID iidArray[NB_MAXAL_INTERFACES]
                           = {XA_IID_PLAY,     XA_IID_ANDROIDBUFFERQUEUESOURCE,
                                               XA_IID_STREAMINFORMATION};


    // create media player
    res = (*engineEngine)->CreateMediaPlayer(engineEngine, &playerObj, &dataSrc,
            NULL, &audioSnk, &imageVideoSink, NULL, NULL,
            NB_MAXAL_INTERFACES /*XAuint32 numInterfaces*/,
            iidArray /*const XAInterfaceID *pInterfaceIds*/,
            required /*const XAboolean *pInterfaceRequired*/);
    assert(XA_RESULT_SUCCESS == res);

    // release the Java string and UTF-8
    (*env)->ReleaseStringUTFChars(env, filename, utf8);

    // realize the player
    res = (*playerObj)->Realize(playerObj, XA_BOOLEAN_FALSE);
    assert(XA_RESULT_SUCCESS == res);

    // get the play interface
    res = (*playerObj)->GetInterface(playerObj, XA_IID_PLAY, &playerPlayItf);
    assert(XA_RESULT_SUCCESS == res);

    // get the stream information interface (for video size)
    res = (*playerObj)->GetInterface(playerObj, XA_IID_STREAMINFORMATION, &playerStreamInfoItf);
    assert(XA_RESULT_SUCCESS == res);

    // get the volume interface
    res = (*playerObj)->GetInterface(playerObj, XA_IID_VOLUME, &playerVolItf);
    assert(XA_RESULT_SUCCESS == res);

    // get the Android buffer queue interface
    res = (*playerObj)->GetInterface(playerObj, XA_IID_ANDROIDBUFFERQUEUESOURCE, &playerBQItf);
    assert(XA_RESULT_SUCCESS == res);

    // specify which events we want to be notified of
    res = (*playerBQItf)->SetCallbackEventsMask(playerBQItf, XA_ANDROIDBUFFERQUEUEEVENT_PROCESSED);

    // use the play interface to set up a callback for the XA_PLAYEVENT_HEADATEND event */
    res = (*playerPlayItf)->SetCallbackEventsMask(playerPlayItf, XA_PLAYEVENT_HEADATEND);
    assert(XA_RESULT_SUCCESS == res);
    res = (*playerPlayItf)->RegisterCallback(playerPlayItf,
            PlayCallback /*callback*/, NULL /*pContext*/);
    assert(XA_RESULT_SUCCESS == res);

    // register the callback from which OpenMAX AL can retrieve the data to play
    res = (*playerBQItf)->RegisterCallback(playerBQItf, AndroidBufferQueueCallback, NULL);
    assert(XA_RESULT_SUCCESS == res);

    // we want to be notified of the video size once it's found, so we register a callback for that
    res = (*playerStreamInfoItf)->RegisterStreamChangeCallback(playerStreamInfoItf,
            StreamChangeCallback, NULL);

    // enqueue the initial buffers
    if (!enqueueInitialBuffers(JNI_FALSE)) {
        return JNI_FALSE;
    }

    // prepare the player
    res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PAUSED);
    assert(XA_RESULT_SUCCESS == res);

    // set the volume
    res = (*playerVolItf)->SetVolumeLevel(playerVolItf, 0);//-300);
    assert(XA_RESULT_SUCCESS == res);

    // start the playback
    res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PLAYING);
    assert(XA_RESULT_SUCCESS == res);

    return JNI_TRUE;
}
예제 #3
0
// reset player to play another source
JNIEXPORT void JNICALL Java_org_apache_cordova_example_VideoPlayer_reset(JNIEnv* env,
		jclass clazz,jstring filename) {
	LOGE("reset!");

	/*	if (playerObj != NULL) {
		(*playerObj)->Destroy(playerObj);
		LOGE("playerObj");
		playerObj = NULL;
		playerPlayItf = NULL;
		playerBQItf = NULL;
		playerStreamInfoItf = NULL;
		playerVolItf = NULL;
	}

	// destroy output mix object, and invalidate all associated interfaces
	if (outputMixObject != NULL) {
		(*outputMixObject)->Destroy(outputMixObject);
		LOGE("outputMixObject");
		outputMixObject = NULL;
	}

	// destroy engine object, and invalidate all associated interfaces
	if (engineObject != NULL) {
		(*engineObject)->Destroy(engineObject);
		LOGE("engineObject");
		engineObject = NULL;
		engineEngine = NULL;
	}
	finished = 1;*/
	XAresult res;
	int ret;
	
	// pause the player
	//res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PAUSED);
	//assert(XA_RESULT_SUCCESS == res);
	LOGE("quit pthread start!");
	finished = 1;
	ret= pthread_join(httpThd,NULL);
	LOGE("quit pthread end!");
	if (ret != 0) {
		LOGE("quit pthread error!");
		return ;
	}

	finished = 0;
	const char *utf8 = (env)->GetStringUTFChars(filename, NULL);
    assert(NULL != utf8);

    // open the file to play
    file = fopen(utf8, "rb");
    if (file == NULL) {
        return ;
    }
	int len = getFileSize(file);
	    if (file != NULL) {
        fclose(file);
        file = NULL;
    }
	file = fopen(utf8, "rb");
    if (file == NULL) {
        return ;
    }	
	LOGE("len= %d",len);
	char* pBufferData = new char[len+1];
	pBufferData[len]='\0';
	int reLen = fread(pBufferData, 1, len, file);
	LOGE("relen= %d",reLen);

	int ok;
	ok = pthread_mutex_lock(&mutex_rw);
	assert(0 == ok);	
	hlsPlayList.SetContent(pBufferData,len);
	ok = pthread_mutex_unlock(&mutex_rw);
	assert(0 == ok);
	    // close the file
    if (file != NULL) {
        fclose(file);
        file = NULL;
    }
	 // release the Java string and UTF-8
    (env)->ReleaseStringUTFChars(filename, utf8);


		/*if (NULL != playerBQItf && NULL != file) {
		// first wait for buffers currently in queue to be drained
		int ok;
		ok = pthread_mutex_lock(&mutex);
		assert(0 == ok);
		discontinuity = JNI_TRUE;
		// wait for discontinuity request to be observed by buffer queue callback
		// Note: can't rewind after EOS, which we send when reaching EOF
		// (don't send EOS if you plan to play more content through the same player)
		while (discontinuity && !reachedEof) {
			ok = pthread_cond_wait(&cond, &mutex);
			assert(0 == ok);
		}
		ok = pthread_mutex_unlock(&mutex);
		assert(0 == ok);
	}

	 //pthread_create(&httpThd, NULL, curltest, NULL);
*/

		//sleep(5);
		ok = pthread_mutex_lock(&mutex);
		assert(0 == ok);
		//discontinuity = JNI_TRUE;
		LOGE("discontinuity");
		res = (*playerBQItf)->Clear(playerBQItf);
		assert(XA_RESULT_SUCCESS == res);
		clear();
		pthread_create(&httpThd, NULL, curltest, NULL);
		// Enqueue the initial buffers, with a discontinuity indicator on first buffer
		(void) enqueueInitialBuffers(JNI_TRUE);
		LOGE("discontinuity end");
     ok = pthread_mutex_unlock(&mutex);
	assert(0 == ok);			

}
예제 #4
0
// AndroidBufferQueueItf callback to supply MPEG-2 TS packets to the media player
static XAresult AndroidBufferQueueCallback(
        XAAndroidBufferQueueItf caller,
        void *pCallbackContext,        /* input */
        void *pBufferContext,          /* input */
        void *pBufferData,             /* input */
        XAuint32 dataSize,             /* input */
        XAuint32 dataUsed,             /* input */
        const XAAndroidBufferItem *pItems,/* input */
        XAuint32 itemsLength           /* input */)
{
    XAresult res;
    int ok;

    // pCallbackContext was specified as NULL at RegisterCallback and is unused here
    assert(NULL == pCallbackContext);

    // note there is never any contention on this mutex unless a discontinuity request is active
    ok = pthread_mutex_lock(&mutex);
    assert(0 == ok);

    // was a discontinuity requested?
    if (discontinuity) {
        // Note: can't rewind after EOS, which we send when reaching EOF
        // (don't send EOS if you plan to play more content through the same player)
        if (!reachedEof) {
            // clear the buffer queue
            res = (*playerBQItf)->Clear(playerBQItf);
            assert(XA_RESULT_SUCCESS == res);
            // rewind the data source so we are guaranteed to be at an appropriate point
            rewind(file);
            // Enqueue the initial buffers, with a discontinuity indicator on first buffer
            (void) enqueueInitialBuffers(JNI_TRUE);
        }
        // acknowledge the discontinuity request
        discontinuity = JNI_FALSE;
        ok = pthread_cond_signal(&cond);
        assert(0 == ok);
        goto exit;
    }

    if ((pBufferData == NULL) && (pBufferContext != NULL)) {
        const int processedCommand = *(int *)pBufferContext;
        if (kEosBufferCntxt == processedCommand) {
            LOGV("EOS was processed\n");
            // our buffer with the EOS message has been consumed
            assert(0 == dataSize);
            goto exit;
        }
    }

    // pBufferData is a pointer to a buffer that we previously Enqueued
    assert((dataSize > 0) && ((dataSize % MPEG2_TS_PACKET_SIZE) == 0));
    assert(dataCache <= (char *) pBufferData && (char *) pBufferData <
            &dataCache[BUFFER_SIZE * NB_BUFFERS]);
    assert(0 == (((char *) pBufferData - dataCache) % BUFFER_SIZE));

    // don't bother trying to read more data once we've hit EOF
    if (reachedEof) {
        goto exit;
    }

    size_t nbRead;
    // note we do call fread from multiple threads, but never concurrently
    size_t bytesRead;
    bytesRead = fread(pBufferData, 1, BUFFER_SIZE, file);
    if (bytesRead > 0) {
        if ((bytesRead % MPEG2_TS_PACKET_SIZE) != 0) {
            LOGV("Dropping last packet because it is not whole");
        }
        size_t packetsRead = bytesRead / MPEG2_TS_PACKET_SIZE;
        size_t bufferSize = packetsRead * MPEG2_TS_PACKET_SIZE;
        res = (*caller)->Enqueue(caller, NULL /*pBufferContext*/,
                pBufferData /*pData*/,
                bufferSize /*dataLength*/,
                NULL /*pMsg*/,
                0 /*msgLength*/);
        assert(XA_RESULT_SUCCESS == res);
    } else {
        // EOF or I/O error, signal EOS
        XAAndroidBufferItem msgEos[1];
        msgEos[0].itemKey = XA_ANDROID_ITEMKEY_EOS;
        msgEos[0].itemSize = 0;
        // EOS message has no parameters, so the total size of the message is the size of the key
        //   plus the size if itemSize, both XAuint32
        res = (*caller)->Enqueue(caller, (void *)&kEosBufferCntxt /*pBufferContext*/,
                NULL /*pData*/, 0 /*dataLength*/,
                msgEos /*pMsg*/,
                sizeof(XAuint32)*2 /*msgLength*/);
        assert(XA_RESULT_SUCCESS == res);
        reachedEof = JNI_TRUE;
    }

exit:
    ok = pthread_mutex_unlock(&mutex);
    assert(0 == ok);
    return XA_RESULT_SUCCESS;
}
예제 #5
0
// create streaming media player
bool AndroidVideoDecoder::createStreamingMediaPlayer()
{
    XAresult res;
    // convert Java string to UTF-8
    std::string path = sourcePath;
    unsigned pos = path.find("/");
    path = path.substr(pos+1);
	std::string diskPath = "/sdcard/renderScenes/" + path;
	std::string assetPath = sourcePath;
    
    // open the file to play
    file = fopen(diskPath.c_str(), "rb");
	if(file == NULL){
		FileSystem::getInstance()->openFile(assetPath);
		//another approach, this works!
		FILE * pFile;
		pFile = fopen (diskPath.c_str(), "wb");
		if(pFile == 0){
			//mkdir fails, after looking the implementation we supose that is a problem of the sandbox and the duplication of kernel static variables
			//and the strategy of not release all the memory from ram to speed up
			logErr("you find a bug, have a nice crash :) (and stop f*****g the sd!)");
		}
		fwrite (FileSystem::getInstance()->getFileData(assetPath) , sizeof(char), FileSystem::getInstance()->getFileSize(assetPath) - 1, pFile);
		fclose (pFile);
		FileSystem::getInstance()->destroyFileData(assetPath);
	}
    
    file = fopen(diskPath.c_str(), "rb");
    if (file == NULL) {
    	logErr("AndroidVideoDecoder::createStreamingMediaPlayer HARD FAIL: fail open! %s", diskPath.c_str());
    	return false;
    }
    logInf("file opened!");
    // configure data source
    XADataLocator_AndroidBufferQueue loc_abq = { XA_DATALOCATOR_ANDROIDBUFFERQUEUE, NB_BUFFERS };
    XADataFormat_MIME format_mime = {
        XA_DATAFORMAT_MIME, XA_ANDROID_MIME_MP2TS, XA_CONTAINERTYPE_MPEG_TS };
    XADataSource dataSrc = {&loc_abq, &format_mime};
    
    // configure audio sink
    XADataLocator_OutputMix loc_outmix = { XA_DATALOCATOR_OUTPUTMIX, outputMixObject };
    XADataSink audioSnk = { &loc_outmix, NULL };
    
    // configure image video sink
    XADataLocator_NativeDisplay loc_nd = {
        XA_DATALOCATOR_NATIVEDISPLAY,        // locatorType
        // the video sink must be an ANativeWindow created from a Surface or SurfaceTexture
        (void*)theNativeWindow,              // hWindow
        // must be NULL
        NULL                                 // hDisplay
    };
    XADataSink imageVideoSink = {&loc_nd, NULL};
    
    // declare interfaces to use
    XAboolean     required[NB_MAXAL_INTERFACES]
    = {XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE,           XA_BOOLEAN_TRUE};
    XAInterfaceID iidArray[NB_MAXAL_INTERFACES]
    = {XA_IID_PLAY,     XA_IID_ANDROIDBUFFERQUEUESOURCE,
        XA_IID_STREAMINFORMATION};
    
    // create media player
    res = (*engineEngine)->CreateMediaPlayer(engineEngine, &playerObj, &dataSrc,
                                             NULL, &audioSnk, &imageVideoSink, NULL, NULL,
                                             NB_MAXAL_INTERFACES /*XAuint32 numInterfaces*/,
                                             iidArray /*const XAInterfaceID *pInterfaceIds*/,
                                             required /*const XAboolean *pInterfaceRequired*/);
    assert(XA_RESULT_SUCCESS == res);
    
    // realize the player
    res = (*playerObj)->Realize(playerObj, XA_BOOLEAN_FALSE);
    assert(XA_RESULT_SUCCESS == res);
    // get the play interface
    res = (*playerObj)->GetInterface(playerObj, XA_IID_PLAY, &playerPlayItf);
    assert(XA_RESULT_SUCCESS == res);
    
    // get the stream information interface (for video size)
    res = (*playerObj)->GetInterface(playerObj, XA_IID_STREAMINFORMATION, &playerStreamInfoItf);
    assert(XA_RESULT_SUCCESS == res);
    
    // get the volume interface
    res = (*playerObj)->GetInterface(playerObj, XA_IID_VOLUME, &playerVolItf);
    assert(XA_RESULT_SUCCESS == res);
    
    // get the Android buffer queue interface
    res = (*playerObj)->GetInterface(playerObj, XA_IID_ANDROIDBUFFERQUEUESOURCE, &playerBQItf);
    assert(XA_RESULT_SUCCESS == res);
    
    // specify which events we want to be notified of
    res = (*playerBQItf)->SetCallbackEventsMask(playerBQItf, XA_ANDROIDBUFFERQUEUEEVENT_PROCESSED);
    assert(XA_RESULT_SUCCESS == res);
    
    // register the callback from which OpenMAX AL can retrieve the data to play
    res = (*playerBQItf)->RegisterCallback(playerBQItf, AndroidBufferQueueCallback, NULL);
    assert(XA_RESULT_SUCCESS == res);
    
    // we want to be notified of the video size once it's found, so we register a callback for that
    res = (*playerStreamInfoItf)->RegisterStreamChangeCallback(playerStreamInfoItf,
                                                               StreamChangeCallback, NULL);
    assert(XA_RESULT_SUCCESS == res);
    
    // enqueue the initial buffers
    if (!enqueueInitialBuffers(false)) {
        return false;
    }
    
    // prepare the player
    res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PAUSED);
    assert(XA_RESULT_SUCCESS == res);
    
    // set the volume
    res = (*playerVolItf)->SetVolumeLevel(playerVolItf, 0);
    assert(XA_RESULT_SUCCESS == res);
    
    // start the playback
    res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PLAYING);
    assert(XA_RESULT_SUCCESS == res);
    
    return true;
}