Ejemplo n.º 1
0
OpenSLSoundPool::OpenSLSoundPool(int maxStreams, SLuint32 samplingRate, SLuint32 bitrate):
		engineObject(NULL),
		outputMixObject(NULL),
		maxStreams(maxStreams),
		samplingRate(samplingRate),
		bitrate(bitrate){

	LOGI("MySoundPool");

	if (OpenSLSoundPool::instance != NULL){
		LOGI("Can only be instantiated once");
		exit(EXIT_FAILURE);
	}
	OpenSLSoundPool::instance = this;

	// see if OpenSL library is available
	void* handle = dlopen("libOpenSLES.so", RTLD_LAZY);
	if (handle == NULL){
		LOGI("OpenSLES not available");
		exit(EXIT_FAILURE);
	}

	bufferQueues = new std::vector<BufferQueue*>();

	samples = new std::vector<ResourceBuffer*>();

	LOGI("OpenSLES available");
	LOGI("Initializing");
	createEngine();
	createBufferQueueAudioPlayer();

	minVolume = SL_MILLIBEL_MIN;
	maxVolume = 0;
}
Ejemplo n.º 2
0
int and_osles::open(int sample_rate, int channel, int bitsPerSample)
{
	LOGI("and_osles open()");

	close();

	LOGI("sample_rate %d, channel %d, bitsPerSample %d", 
		sample_rate, channel, bitsPerSample);

	m_osles_handle = (osles_handle *)malloc(sizeof(osles_handle));
	if(!m_osles_handle) {
		LOGE("failed to alloc osles handle");
		return -1;
	}
	
	if( createEngine() < 0){
		LOGE("failed to create engine");
		return -1;
	}

	if( createBufferQueueAudioPlayer(sample_rate, channel, bitsPerSample) < 0) {
		LOGE("failed to create player");
		return -1;
	}

	m_fifo = new and_fifobuffer;
	m_fifo->create(AUDIO_FIFO_SIZE);

	m_one_sec_size = sample_rate * channel * bitsPerSample / 8;

	LOGI("osles opened: rate %d, chn %d, bit %d, one_sec_size %d", 
		sample_rate, channel, bitsPerSample, m_one_sec_size);
	return 0;
}
Ejemplo n.º 3
0
// init mididriver
jboolean
Java_org_billthefarmer_accordion_MidiDriver_init(JNIEnv *env,
						 jobject obj)
{
    EAS_RESULT result;

    if (result = initEAS() != EAS_SUCCESS)
    {
	shutdownEAS();

	LOG_E(LOG_TAG, "Init EAS failed: %ld", result);

	return JNI_FALSE;
    }

    // LOG_D(LOG_TAG, "Init EAS success, buffer: %ld", bufferSize);

    // allocate buffer in bytes
    buffer = (EAS_PCM *)malloc(bufferSize * sizeof(EAS_PCM));
    if (buffer == NULL)
    {
	shutdownEAS();

	LOG_E(LOG_TAG, "Allocate buffer failed");

	return JNI_FALSE;
    }

    // create the engine and output mix objects
    if (result = createEngine() != SL_RESULT_SUCCESS)
    {
	shutdownEAS();
	shutdownAudio();
	free(buffer);
	buffer = NULL;

	LOG_E(LOG_TAG, "Create engine failed: %ld", result);

	return JNI_FALSE;
    }

    // create buffer queue audio player
    if (result = createBufferQueueAudioPlayer() != SL_RESULT_SUCCESS)
    {
	shutdownEAS();
	shutdownAudio();
	free(buffer);
	buffer = NULL;

	LOG_E(LOG_TAG, "Create buffer queue audio player failed: %ld", result);

	return JNI_FALSE;
    }

    // call the callback to start playing
    bqPlayerCallback(bqPlayerBufferQueue, NULL);

    return JNI_TRUE;
}
Ejemplo n.º 4
0
SLESAudioTrack::SLESAudioTrack(int samplerate, int bitsPerSample, int channel):engineObject(NULL),engineEngine(NULL),
                                 outputMixObject(NULL),
                                 bqPlayerObject(NULL),bqPlayerPlay(NULL),
                                 bqPlayerBufferQueue(NULL),bqPlayerEffectSend(NULL),
                                 bqPlayerMuteSolo(NULL),bqPlayerVolume(NULL){
    if(SL_RESULT_SUCCESS == createEngine()){
        if(SL_RESULT_SUCCESS != createBufferQueueAudioPlayer(samplerate, bitsPerSample, channel)){
            releaseEngine();
        }
    }
}
Ejemplo n.º 5
0
void URendererAudioOpenSLES::start(){

	//创建播放引擎和播放对象
	if(!mPlayer || createEngine() || createBufferQueueAudioPlayer(this->mChannels,this->mSampleRate)){
		//设置出错标记
		mPrepared = false;
		this->mPlayer->notifyMsg(MEDIA_INFO_PREPARE_ERROR,ERROR_SOFT_PLAYER_OPENSLES);
		ulog_err("URendererAudioOpenSLES::start failed");
	}else{
		//设置音频渲染器状态
		ulog_info("URendererAudioOpenSLES::start\n");
		mPrepared = true;
		this->UThread::start();
		//AudioPlayerCallback(this->mPlayerBufferQueue,this->mPlayer);
	}

}
Ejemplo n.º 6
0
void init_swr(){
	uint64_t out_channel_layout=AV_CH_LAYOUT_STEREO;
	//nb_samples: AAC-1024 MP3-1152
	out_sample_rate=pCodecCtx->sample_rate;
	out_channels=av_get_channel_layout_nb_channels(out_channel_layout);

	out_buffer=(uint8_t *)av_malloc(MAX_AUDIO_FRAME_SIZE*out_channels);
	  //FIX:Some Codec's Context Information is missing
	int in_channel_layout=av_get_default_channel_layout(pCodecCtx->channels);
	//Swr
	au_convert_ctx = swr_alloc();
	swr_alloc_set_opts(au_convert_ctx,out_channel_layout, out_sample_fmt,         out_sample_rate,
					 in_channel_layout,  pCodecCtx->sample_fmt , pCodecCtx->sample_rate,0, NULL);
	if(swr_init(au_convert_ctx)<0){
		au_convert_ctx=NULL;
	}
	createBufferQueueAudioPlayer(2,out_sample_rate);  
}
Ejemplo n.º 7
0
static BOOL OSLES_Init(void)
{
	short			samplesize;
	int				n;

	samplesize=1;
	if (md_mode&DMODE_STEREO) samplesize<<=1;
	if (md_mode&DMODE_16BITS) samplesize<<=1;

	SLresult result;

    // create engine
    result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
    if(SL_RESULT_SUCCESS != result)
    	return 1;

    // realize the engine
    result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
    if(SL_RESULT_SUCCESS != result)
    	return 1;

    // get the engine interface, which is needed in order to create other objects
    result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
    if(SL_RESULT_SUCCESS != result)
    	return 1;

    // create output mix, with environmental reverb specified as a non-required interface
    const SLInterfaceID ids[1] = {SL_IID_ENVIRONMENTALREVERB};
    const SLboolean req[1] = {SL_BOOLEAN_FALSE};
    result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 0, ids, req);
    if(SL_RESULT_SUCCESS != result)
    	return 1;

    // realize the output mix
    result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
    if(SL_RESULT_SUCCESS != result)
    	return 1;

    /*
    // get the environmental reverb interface
    // this could fail if the environmental reverb effect is not available,
    // either because the feature is not present, excessive CPU load, or
    // the required MODIFY_AUDIO_SETTINGS permission was not requested and granted
    result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB,
            &outputMixEnvironmentalReverb);
    if (SL_RESULT_SUCCESS == result) {
        result = (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(
                outputMixEnvironmentalReverb, &reverbSettings);
    }
*/
    createBufferQueueAudioPlayer();
    buffersize=md_mixfreq*samplesize*BUFFERSIZE/1000;

	for (n=0;n<NUMBUFFERS;n++) {
		buffer[n]=_mm_malloc(buffersize);
		if (!buffer[n]) {
			_mm_errno=MMERR_OUT_OF_MEMORY;
			return 1;
		}
	}

	md_mode|=DMODE_SOFT_MUSIC|DMODE_SOFT_SNDFX;
	buffersout=nextbuffer=0;

	return VC_Init();
}
Ejemplo n.º 8
0
int stream_component_open(VideoState *is, int stream_index) {

  AVFormatContext *pFormatCtx = is->pFormatCtx;
  AVCodecContext *codecCtx = NULL;
  AVCodec *codec = NULL;
  AVDictionary *optionsDict = NULL;

  if(stream_index < 0 || stream_index >= pFormatCtx->nb_streams) {
    return -1;
  }

  // Get a pointer to the codec context for the video stream
  codecCtx = pFormatCtx->streams[stream_index]->codec;

  if(codecCtx->codec_type == AVMEDIA_TYPE_AUDIO) {
	is->audio_callback = audio_callback;

    // Set audio settings from codec info
	AudioPlayer *player = malloc(sizeof(AudioPlayer));
    is->audio_player = player;
    createEngine(&is->audio_player);
    createBufferQueueAudioPlayer(&is->audio_player, is, codecCtx->channels, codecCtx->sample_rate);
    //is->audio_hw_buf_size = 4096;
  } else if (codecCtx->codec_type == AVMEDIA_TYPE_VIDEO) {
	// Set video settings from codec info
	VideoPlayer *player = malloc(sizeof(VideoPlayer));
	is->video_player = player;
	createVideoEngine(&is->video_player);
	createScreen(&is->video_player, is->native_window, 0, 0);
  }
  codec = avcodec_find_decoder(codecCtx->codec_id);
  if(!codec || (avcodec_open2(codecCtx, codec, &optionsDict) < 0)) {
    fprintf(stderr, "Unsupported codec!\n");
    return -1;
  }

  switch(codecCtx->codec_type) {
  case AVMEDIA_TYPE_AUDIO:
    is->audioStream = stream_index;
    is->audio_st = pFormatCtx->streams[stream_index];
    is->audio_buf_size = 0;
    is->audio_buf_index = 0;

    /* averaging filter for audio sync */
    is->audio_diff_avg_coef = exp(log(0.01 / AUDIO_DIFF_AVG_NB));
    is->audio_diff_avg_count = 0;
    /* Correct audio only if larger error than this */
    is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / codecCtx->sample_rate;

	is->sws_ctx_audio = swr_alloc();
	if (!is->sws_ctx_audio) {
		fprintf(stderr, "Could not allocate resampler context\n");
		return -1;
	}

    memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
    packet_queue_init(&is->audioq);
    break;
  case AVMEDIA_TYPE_VIDEO:
    is->videoStream = stream_index;
    is->video_st = pFormatCtx->streams[stream_index];

    is->frame_timer = (double)av_gettime() / 1000000.0;
    is->frame_last_delay = 40e-3;
    is->video_current_pts_time = av_gettime();

    packet_queue_init(&is->videoq);

    createScreen(&is->video_player, NULL, is->video_st->codec->width, is->video_st->codec->height);

    is->video_tid = malloc(sizeof(*(is->video_tid)));
	// uncomment for video
    pthread_create(is->video_tid, NULL, (void *) &video_thread, is);
    is->sws_ctx = createScaler(&is->video_player, is->video_st->codec);

    codecCtx->get_buffer2 = our_get_buffer;

    break;
  default:
    break;
  }

  return 0;
}
Ejemplo n.º 9
0
void naCreateBufferQueueAudioPlayer(JNIEnv* env, jclass clazz) {
	createBufferQueueAudioPlayer();
}
Ejemplo n.º 10
0
/**
 * This is the main entry point of a native application that is using
 * android_native_app_glue.  It runs in its own thread, with its own
 * event loop for receiving input events and doing other things.
 */
void android_main(struct android_app* state) {
	//struct engine engine;

const int FRAMES_PER_SECOND = 60;
const int SKIP_TICKS = 1000 / FRAMES_PER_SECOND;

	// Make sure glue isn't stripped.
	app_dummy();

	memset(&engine, 0, sizeof(engine));
	state->userData = &engine;
	state->onAppCmd = engine_handle_cmd;
	state->onInputEvent = engine_handle_input;
	engine.app = state;
	
	createSoundEngine();
	createBufferQueueAudioPlayer();

	// Prepare to monitor accelerometer
	/*engine.sensorManager = ASensorManager_getInstance();
	engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager,
			ASENSOR_TYPE_ACCELEROMETER);
	engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager,
			state->looper, LOOPER_ID_USER, NULL, NULL);*/

	if (state->savedState != NULL) {
		// We are starting with a previous saved state; restore from it.
	   // engine.state = *(struct saved_state*)state->savedState;
	}

	int32_t next_game_tick = getTickCount();
	int sleep_time = 0;
//http://www.koonsolo.com/news/dewitters-gameloop/
	// loop waiting for stuff to do.
	while (1) {
		// Read all pending events.
		int ident;
		int events;
		struct android_poll_source* source;

		// If not animating, we will block forever waiting for events.
		// If animating, we loop until all events are read, then continue
		// to draw the next frame of animation.
		if ((ident = ALooper_pollAll(0, NULL, &events,
				(void**)&source)) >= 0) {
			// Process this event.
			if (source != NULL) {
				source->process(state, source);
			}
		}

		engine_draw_frame(&engine);
		next_game_tick += SKIP_TICKS;
        sleep_time = next_game_tick - now_ms();
        if( sleep_time >= 0 ) {
            sleep( sleep_time );
        }
	}

	shutdownAudio();
}