예제 #1
0
static int AndroidTrackCallback(void *userData)
{
    struct android_aud_stream *stream = (struct android_aud_stream*) userData;
    jmethodID write_method=0, play_method=0, stop_method=0, flush_method=0;
    int size = stream->play_buf_size;
    jbyteArray outputBuffer;
    jbyte *buf;
    JNIEnv *jni_env = 0;
    pj_bool_t attached = attach_jvm(&jni_env);
    
    if (!stream->track) {
        goto on_return;
    }

    PJ_LOG(5, (THIS_FILE, "Playback thread started"));

    /* Get methods ids */
    write_method = (*jni_env)->GetMethodID(jni_env, stream->track_class,
                                           "write", "([BII)I");
    play_method = (*jni_env)->GetMethodID(jni_env, stream->track_class,
                                          "play", "()V");
    stop_method = (*jni_env)->GetMethodID(jni_env, stream->track_class,
                                          "stop", "()V");
    flush_method = (*jni_env)->GetMethodID(jni_env, stream->track_class,
                                           "flush", "()V");
    if (write_method==0 || play_method==0 || stop_method==0 ||
        flush_method==0)
    {
        PJ_LOG(3, (THIS_FILE, "Unable to get audio track methods"));
        goto on_return;
    }

    outputBuffer = (*jni_env)->NewByteArray(jni_env, size);
    if (outputBuffer == 0) {
        PJ_LOG(3, (THIS_FILE, "Unable to allocate output buffer"));
        goto on_return;
    }
    buf = (*jni_env)->GetByteArrayElements(jni_env, outputBuffer, 0);

    /* Start playing */
    set_android_thread_priority(THREAD_PRIORITY_URGENT_AUDIO);
    (*jni_env)->CallVoidMethod(jni_env, stream->track, play_method);

    while (!stream->quit_flag) {
        pjmedia_frame frame;
        pj_status_t status;
        int bytesWritten;

        if (!stream->running) {
            (*jni_env)->CallVoidMethod(jni_env, stream->track, stop_method);
            (*jni_env)->CallVoidMethod(jni_env, stream->track, flush_method);
            pj_sem_wait(stream->play_sem);
            if (stream->quit_flag)
                break;
            (*jni_env)->CallVoidMethod(jni_env, stream->track, play_method);
        }
        
        frame.type = PJMEDIA_FRAME_TYPE_AUDIO;
        frame.size = size;
        frame.buf = (void *)buf;
        frame.timestamp.u64 = stream->play_timestamp.u64;
        frame.bit_info = 0;
        
        status = (*stream->play_cb)(stream->user_data, &frame);
        if (status != PJ_SUCCESS)
            continue;
        
        if (frame.type != PJMEDIA_FRAME_TYPE_AUDIO)
            pj_bzero(frame.buf, frame.size);
        
        (*jni_env)->ReleaseByteArrayElements(jni_env, outputBuffer, buf,
        				     JNI_COMMIT);

        /* Write to the device output. */
        bytesWritten = (*jni_env)->CallIntMethod(jni_env, stream->track,
                                                 write_method, outputBuffer,
                                                 0, size);
        if (bytesWritten <= 0 || bytesWritten != size) {
            PJ_LOG(4, (THIS_FILE, "Player thread: Error %d writing data",
                                  bytesWritten));
            continue;
        }

        stream->play_timestamp.u64 += stream->param.samples_per_frame /
                                      stream->param.channel_count;
    };
    
    (*jni_env)->ReleaseByteArrayElements(jni_env, outputBuffer, buf, 0);
    (*jni_env)->DeleteLocalRef(jni_env, outputBuffer);
    
on_return:
    detach_jvm(attached);
    PJ_LOG(5, (THIS_FILE, "Player thread stopped"));
    stream->play_thread_exited = 1;
    
    return 0;
}
예제 #2
0
static int PJ_THREAD_FUNC AndroidTrackCallback(void* userData){
	struct android_aud_stream *stream = (struct android_aud_stream*) userData;
	JNIEnv *jni_env = 0;
	ATTACH_JVM(jni_env);
	jmethodID write_method=0, play_method=0;
	//jmethodID get_state_method=0;
	pj_status_t status = 0;
	//jint track_state;
	int size =  stream->samples_per_frame * stream->bytes_per_sample;
	int nframes = stream->samples_per_frame / stream->channel_count;
	jbyte* buf;
	jbyteArray outputBuffer;
	pj_timestamp tstamp;

	PJ_LOG(3,(THIS_FILE, "<< Enter player thread"));

	if(!stream->track){
		goto on_break;
	}

	//Get methods ids
	write_method = jni_env->GetMethodID(stream->track_class,"write", "([BII)I");
	play_method = jni_env->GetMethodID(stream->track_class,"play", "()V");
	/*
	get_state_method =  jni_env->GetMethodID(stream->track_class,"getState", "()I");
	if(get_state_method==0) {
		goto on_break;
	}*/

	/*
	track_state = jni_env->CallIntMethod(stream->track, get_state_method);
	PJ_LOG(3,(THIS_FILE, "Player state is now %d", track_state));
	if((int)track_state != 1){
		PJ_LOG(1, (THIS_FILE, "Bad player state !!! %d", track_state));
		goto on_break;
	}*/

	outputBuffer = jni_env->NewByteArray(size);
	if (outputBuffer == 0) {
		PJ_LOG(2, (THIS_FILE, "Not able to allocate a buffer for input play process"));
		goto on_break;
	}

	buf = jni_env->GetByteArrayElements(outputBuffer, 0);

	set_android_thread_priority(THREAD_PRIORITY_URGENT_AUDIO);
	//setpriority(PRIO_PROCESS, 0, -19 /*ANDROID_PRIORITY_URGENT_AUDIO*/);

	//start playing
	jni_env->CallVoidMethod(stream->track, play_method);

	//Init everything
	tstamp.u64 = 0;
	pj_bzero (buf, size);

//	pj_sem_post(stream->audio_launch_sem);

	while ( !stream->quit_flag ) {
		pj_bzero (buf, size);
		pjmedia_frame frame;

		frame.type = PJMEDIA_FRAME_TYPE_AUDIO;
		frame.size = size;
		frame.buf = (void *) buf;
		frame.timestamp.u64 = tstamp.u64;
		frame.bit_info = 0;

		//Fill frame from pj
		status = (*stream->play_cb)(stream->user_data, &frame);
		if (status != PJ_SUCCESS){
			goto on_finish;
		}

		if (frame.type != PJMEDIA_FRAME_TYPE_AUDIO){
			pj_bzero(frame.buf, frame.size);
			PJ_LOG(3, (THIS_FILE, "Hey, not an audio frame !!!"));
			continue;
		}

	//	PJ_LOG(4,(THIS_FILE, "Valid play frame get from network stack"));
		/*
		if(size != frame.size){
			PJ_LOG(2, (THIS_FILE, "Frame size doesn't match : %d vs %d", frame.size, size) );
		}
		*/
		//PJ_LOG(4, (THIS_FILE, "New audio track frame to treat : %d <size : %d>", frame.type, frame.size));

		//Write to the java buffer
		//jni_env->SetByteArrayRegion(outputBuffer, 0, frame.size, (jbyte*)frame.buf);

		//Write to the device output
		status = jni_env->CallIntMethod(stream->track, write_method,
				outputBuffer,
				0,
				frame.size);

		if(status < 0){
			PJ_LOG(1, (THIS_FILE, "Error while writing %d ", status));
			//goto on_finish;
			continue;
		}else if(size != status){
			PJ_LOG(2, (THIS_FILE, "Not everything written"));
		}

	//	PJ_LOG(4,(THIS_FILE, "Valid play frame sent to the audio layer"));

		tstamp.u64 += nframes;
	};

	on_finish:
	jni_env->ReleaseByteArrayElements(outputBuffer, buf, 0);
		jni_env->DeleteLocalRef(outputBuffer);


	on_break:
		DETACH_JVM(jni_env);
//		pj_sem_post(stream->audio_launch_sem);
		PJ_LOG(3,(THIS_FILE, ">> Play thread stopped"));
		stream->play_thread_exited = 1;
		return 0;
}
예제 #3
0
static int AndroidRecorderCallback(void *userData)
{
    struct android_aud_stream *stream = (struct android_aud_stream *)userData;
    jmethodID read_method=0, record_method=0, stop_method=0;
    int size = stream->rec_buf_size;
    jbyteArray inputBuffer;
    jbyte *buf;
    JNIEnv *jni_env = 0;
    pj_bool_t attached = attach_jvm(&jni_env);
    
    PJ_ASSERT_RETURN(jni_env, 0);
    
    if (!stream->record) {
        goto on_return;
    }

    PJ_LOG(5, (THIS_FILE, "Recorder thread started"));

    /* Get methods ids */
    read_method = (*jni_env)->GetMethodID(jni_env, stream->record_class, 
                                          "read", "([BII)I");
    record_method = (*jni_env)->GetMethodID(jni_env, stream->record_class,
                                            "startRecording", "()V");
    stop_method = (*jni_env)->GetMethodID(jni_env, stream->record_class,
                                          "stop", "()V");
    if (read_method==0 || record_method==0 || stop_method==0) {
        PJ_LOG(3, (THIS_FILE, "Unable to get recording methods"));
        goto on_return;
    }
    
    /* Create a buffer for frames read */
    inputBuffer = (*jni_env)->NewByteArray(jni_env, size);
    if (inputBuffer == 0) {
        PJ_LOG(3, (THIS_FILE, "Unable to allocate input buffer"));
        goto on_return;
    }
    
    /* Start recording
     * setpriority(PRIO_PROCESS, 0, -19); //ANDROID_PRIORITY_AUDIO
     * set priority is probably not enough because it does not change the thread
     * group in scheduler
     * Temporary solution is to call the java api to set the thread priority.
     * A cool solution would be to port (if possible) the code from the
     * android os regarding set_sched groups
     */
    set_android_thread_priority(THREAD_PRIORITY_URGENT_AUDIO);
    (*jni_env)->CallVoidMethod(jni_env, stream->record, record_method);
    
    while (!stream->quit_flag) {
        pjmedia_frame frame;
        pj_status_t status;
        int bytesRead;
        
        if (!stream->running) {
            (*jni_env)->CallVoidMethod(jni_env, stream->record, stop_method);
            pj_sem_wait(stream->rec_sem);
            if (stream->quit_flag)
                break;
            (*jni_env)->CallVoidMethod(jni_env, stream->record, record_method);
        }
        
        bytesRead = (*jni_env)->CallIntMethod(jni_env, stream->record,
                                              read_method, inputBuffer,
                                              0, size);
        if (bytesRead <= 0 || bytesRead != size) {
            PJ_LOG (4, (THIS_FILE, "Record thread : error %d reading data",
                                   bytesRead));
            continue;
        }

        buf = (*jni_env)->GetByteArrayElements(jni_env, inputBuffer, 0);
        frame.type = PJMEDIA_FRAME_TYPE_AUDIO;
        frame.size =  size;
        frame.bit_info = 0;
        frame.buf = (void *)buf;
        frame.timestamp.u64 = stream->rec_timestamp.u64;

        status = (*stream->rec_cb)(stream->user_data, &frame);
        (*jni_env)->ReleaseByteArrayElements(jni_env, inputBuffer, buf,
        				     JNI_ABORT);

        stream->rec_timestamp.u64 += stream->param.samples_per_frame /
                                     stream->param.channel_count;
    }

    (*jni_env)->DeleteLocalRef(jni_env, inputBuffer);
    
on_return:
    detach_jvm(attached);
    PJ_LOG(5, (THIS_FILE, "Recorder thread stopped"));
    stream->rec_thread_exited = 1;

    return 0;
}
예제 #4
0
static int PJ_THREAD_FUNC AndroidRecorderCallback(void* userData){
	struct android_aud_stream *stream = (struct android_aud_stream*) userData;
	JNIEnv *jni_env = 0;
	ATTACH_JVM(jni_env);

	jmethodID read_method=0, record_method=0;
	int bytesRead;
	int size =  stream->samples_per_frame * stream->bytes_per_sample;
	int nframes = stream->samples_per_frame / stream->channel_count;
	jbyte* buf;
	pj_status_t status = 0;
	jbyteArray inputBuffer;
	pj_timestamp tstamp, now, last_frame;

	int elapsed_time = 0;
	//Frame time in ms
	int frame_time = nframes * 1000 / stream->samples_per_sec;
	int missed_time = frame_time;
	int to_wait = 0;

	PJ_LOG(3,(THIS_FILE, "<< Enter recorder thread"));

	if(!stream->record){
		goto on_break;
	}


	//Get methods ids
	read_method = jni_env->GetMethodID(stream->record_class,"read", "([BII)I");
	record_method = jni_env->GetMethodID(stream->record_class,"startRecording", "()V");
	if(read_method==0 || record_method==0) {
		goto on_break;
	}

	//Create a buffer for frames read
	inputBuffer = jni_env->NewByteArray(size);
	if (inputBuffer == 0) {
		PJ_LOG(2, (THIS_FILE, "Not able to allocate a buffer for input read process"));
		goto on_break;
	}


	//start recording
	//setpriority(PRIO_PROCESS, 0, -19 /*ANDROID_PRIORITY_AUDIO*/);
	// set priority is probably not enough cause does not change the thread group in scheduler
	// Temporary solution is to call the java api to set the thread priority.
	// A cool solution would be to port (if possible) the code from the android os regarding set_sched groups
	set_android_thread_priority(THREAD_PRIORITY_URGENT_AUDIO);

	buf = jni_env->GetByteArrayElements(inputBuffer, 0);

	//Init everything
	tstamp.u64 = 0;
	pj_bzero (buf, size);


	jni_env->CallVoidMethod(stream->record, record_method);
	pj_get_timestamp(&last_frame);

	while ( !stream->quit_flag ) {
		pj_bzero (buf, size);

#if COMPATIBLE_ALSA
		pj_get_timestamp(&now);
		// Time between now and last frame next frame (ms)
		elapsed_time = pj_elapsed_msec(&last_frame, &now);

		pj_get_timestamp(&last_frame);

		//PJ_LOG (4, (THIS_FILE, "Elapsed time is %d | missed time is %d | frame time %d", elapsed_time, missed_time, frame_time));
		//Update missed time
		// Positif if we are late
		// negatif if we are earlier
		// dividing by 2 is empiric result
		// on N1 if not we get buffer overflow I assume that it fill packets faster than the frequency
		missed_time =  missed_time/2 + elapsed_time - frame_time;

		//PJ_LOG (4, (THIS_FILE, "And now :: Elapsed time is %d | missed time is %d", elapsed_time, missed_time));

		//If we go faster than the buffer filling we have to wait no
		if( missed_time <= 0 ){
			//if(elapsed_time < frame_time){
				to_wait = - missed_time - 2;
				if(to_wait > 0){
			//		PJ_LOG (4, (THIS_FILE, "Wait for %d / %d", to_wait, frame_time));
					pj_thread_sleep(to_wait);
				}
			//}
		}
/*
		//PJ_LOG (4, (THIS_FILE, "Next frame %d", next_frame_in));
		if (next_frame_in-2 > 0) {
			//PJ_LOG (4, (THIS_FILE, "Wait for buffer %d", next_frame_in));
			pj_thread_sleep(next_frame_in-5);
			//Reset the delay we have regarding next frame
			retard = 0;
		}else{
			if(next_frame_in < 0){
				retard += next_frame_in;
			}
		}
*/
#endif

		bytesRead = jni_env->CallIntMethod(stream->record, read_method,
					inputBuffer,
					0,
					size);


		if(bytesRead<=0){
			PJ_LOG (3, (THIS_FILE, "Record thread : error while reading data... is there something we can do here? %d", bytesRead));
			continue;
		}
		if(stream->quit_flag){
			break;
		}
		if(bytesRead != size){
			PJ_LOG(3, (THIS_FILE, "Overrun..."));
			continue;
		}

	//	PJ_LOG(4,(THIS_FILE, "Valid record frame read"));
		//jni_env->GetByteArrayRegion(inputBuffer, 0, size, buf );

		pjmedia_frame frame;

		frame.type = PJMEDIA_FRAME_TYPE_AUDIO;
		frame.size =  size;
		frame.bit_info = 0;
		frame.buf = (void*) buf;
		frame.timestamp.u64 = tstamp.u64;

	//	PJ_LOG(3, (THIS_FILE, "New audio record frame to treat : %d <size : %d>", frame.type, frame.size));

		status = (*stream->rec_cb)(stream->user_data, &frame);
	//	PJ_LOG(4,(THIS_FILE, "Valid record frame sent to network stack"));

		if (status != PJ_SUCCESS){
			PJ_LOG(1, (THIS_FILE, "Error in record callback"));
			goto on_finish;
		}


		//Update for next step
		tstamp.u64 += nframes;
	};


	on_finish:
		jni_env->ReleaseByteArrayElements(inputBuffer, buf, 0);
		jni_env->DeleteLocalRef(inputBuffer);

	on_break:
		DETACH_JVM(jni_env);
		PJ_LOG(3,(THIS_FILE, ">> Record thread stopped"));
//		pj_sem_post(stream->audio_launch_sem);
		stream->rec_thread_exited = 1;
		return 0;
}