/* prepare a new audio buffer */ void sdl_audio_callback(void *opaque, Uint8 *stream, int len) { /*SDL AUDIO THREAD*/ FFMovie *movie = opaque; int audio_size, len1; double pts; while (len > 0) { if (movie->audio_buf_index >= movie->audio_buf_size) { audio_size = audio_decode_frame(movie, movie->audio_buf, &pts); if (audio_size < 0) { /* if error, just output silence */ movie->audio_buf_size = 1024; memset(movie->audio_buf, 0, movie->audio_buf_size); } else { audio_size = synchronize_audio(movie, (int16_t*)movie->audio_buf, audio_size, pts); movie->audio_buf_size = audio_size; } movie->audio_buf_index = 0; } len1 = movie->audio_buf_size - movie->audio_buf_index; if (len1 > len) len1 = len; memcpy(stream, (uint8_t *)movie->audio_buf + movie->audio_buf_index, len1); len -= len1; stream += len1; movie->audio_buf_index += len1; } }
void audio_callback(void *userdata, Uint8 *stream, int len) { VideoState *is = (VideoState *)userdata; int len1, audio_size; double pts; while(len > 0) { if(is->audio_buf_index >= is->audio_buf_size) { /* We have already sent all our data; get more */ audio_size = audio_decode_frame(is, is->audio_buf, sizeof(is->audio_buf), &pts); if(audio_size < 0) { /* If error, output silence */ is->audio_buf_size = 1024; memset(is->audio_buf, 0, is->audio_buf_size); } else { audio_size = synchronize_audio(is, (int16_t *)is->audio_buf, audio_size, pts); is->audio_buf_size = audio_size; } is->audio_buf_index = 0; } len1 = is->audio_buf_size - is->audio_buf_index; if(len1 > len) len1 = len; memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1); len -= len1; stream += len1; is->audio_buf_index += len1; } }
static void audio_callback(void *userdata, uint8_t * stream, int len) { VideoState *is = (VideoState *) userdata; int len1, audio_size; double pts; if (!is->first) return; while (len > 0) { if (is->audio_buf_index >= is->audio_buf_size) { /* We have already sent all our data; get more */ audio_size = -1; if (!is->paused) { audio_size = audio_decode_frame(is, AUDIO_BUF(is), AUDIO_BUF_SIZE, &pts); } if (audio_size < 0) { /* If error, output silence */ is->audio_buf_size = 1024; memset(AUDIO_BUF(is), 0, is->audio_buf_size); } else { audio_size = synchronize_audio(is, (int16_t *) AUDIO_BUF(is), audio_size, pts); is->audio_buf_size = audio_size; } is->audio_buf_index = 0; } len1 = is->audio_buf_size - is->audio_buf_index; if (len1 > len) len1 = len; memcpy(stream, (uint8_t *) AUDIO_BUF(is) + is->audio_buf_index, len1); len -= len1; stream += len1; is->audio_buf_index += len1; } }
void *audio_thread(void *arg) { JNIEnv* env; if((*g_jvm)->AttachCurrentThread(g_jvm, (void**)&env, NULL) != JNI_OK) { LOGE("%s: AttachCurrentThread() failed", __FUNCTION__); return ((void *)-1);; } VideoState *is = (VideoState*)arg; int remain, audio_size;//remain 解码出的音频缓冲区剩余的数据长度 int pcmBufferLen;//音频数据写入的缓冲区的长度 jclass audio_track_cls = (*env)->FindClass(env,"android/media/AudioTrack"); jmethodID min_buff_size_id = (*env)->GetStaticMethodID( env, audio_track_cls, "getMinBufferSize", "(III)I"); int buffer_size = (*env)->CallStaticIntMethod(env,audio_track_cls,min_buff_size_id, frequency, 12, /*CHANNEL_IN_STEREO*/ 2); /*ENCODING_PCM_16BIT*/ LOGI(10,"buffer_size=%i",buffer_size); pcmBufferLen = AVCODEC_MAX_AUDIO_FRAME_SIZE * 3/2; jbyteArray buffer = (*env)->NewByteArray(env,pcmBufferLen); jmethodID constructor_id = (*env)->GetMethodID(env,audio_track_cls, "<init>", "(IIIIII)V"); jobject audio_track = (*env)->NewObject(env,audio_track_cls, constructor_id, 3, /*AudioManager.STREAM_MUSIC*/ frequency, /*sampleRateInHz*/ 12, /*CHANNEL_IN_STEREO*/ 2, /*ENCODING_PCM_16BIT*/ buffer_size*10, /*bufferSizeInBytes*/ 1 /*AudioTrack.MODE_STREAM*/ ); //setvolume jmethodID setStereoVolume = (*env)->GetMethodID(env,audio_track_cls,"setStereoVolume","(FF)I"); (*env)->CallIntMethod(env,audio_track,setStereoVolume,1.0,1.0); //play jmethodID method_play = (*env)->GetMethodID(env,audio_track_cls, "play", "()V"); (*env)->CallVoidMethod(env,audio_track, method_play); //write jmethodID method_write = (*env)->GetMethodID(env,audio_track_cls,"write","([BII)I"); //release jmethodID method_release = (*env)->GetMethodID(env,audio_track_cls,"release","()V"); //double ref_clock, sync_threshold, diff; double pts; LOGI(1, "pcmBufferLen = %d, AVCODEC_MAX_AUDIO_FRAME_SIZE = %d", pcmBufferLen, AVCODEC_MAX_AUDIO_FRAME_SIZE); while(!is->quit) { if(is->audio_buf_index >= is->audio_buf_size) {//audio_buf中的数据已经转移完毕了 audio_size = audio_decode_frame(is, is->audio_buf, sizeof(is->audio_buf), &pts); if (audio_size <= 0) { is->audio_buf_size = AVCODEC_MAX_AUDIO_FRAME_SIZE * 3/2; memset(is->audio_buf, 0, is->audio_buf_size); }else { audio_size = synchronize_audio(is, (int16_t *)is->audio_buf,audio_size,pts); is->audio_buf_size = audio_size; //fwrite(is->audio_buf, 1, audio_size, fp); } //每次解码出音频之后,就把音频的索引audio_buf_index值0 从头开始索引 is->audio_buf_index = 0; } //剩余的数据长度超过音频数据写入的缓冲区的长度 remain = is->audio_buf_size - is->audio_buf_index; if(remain > pcmBufferLen) { remain = pcmBufferLen; } (*env)->SetByteArrayRegion(env,buffer, 0, remain, (jbyte *)is->audio_buf); (*env)->CallIntMethod(env,audio_track,method_write,buffer,0,remain); is->audio_buf_index += remain; } (*env)->CallVoidMethod(env,audio_track, method_release); if(debug) LOGI(1, "### decode audio thread exit."); if((*g_jvm)->DetachCurrentThread(g_jvm) != JNI_OK) { LOGE(1,"### detach audio thread error"); } pthread_exit(0); ((void *)0); }