static void retro_task_threaded_init(void) { running_lock = slock_new(); finished_lock = slock_new(); worker_cond = scond_new(); slock_lock(running_lock); worker_continue = true; slock_unlock(running_lock); worker_thread = sthread_create(threaded_worker, NULL); }
static bool video_thread_init(thread_video_t *thr, const video_info_t *info, const input_driver_t **input, void **input_data) { size_t max_size; thread_packet_t pkt = {CMD_INIT}; thr->lock = slock_new(); thr->alpha_lock = slock_new(); thr->frame.lock = slock_new(); thr->cond_cmd = scond_new(); thr->cond_thread = scond_new(); thr->input = input; thr->input_data = input_data; thr->info = *info; thr->alive = true; thr->focus = true; thr->has_windowed = true; thr->suppress_screensaver = true; max_size = info->input_scale * RARCH_SCALE_BASE; max_size *= max_size; max_size *= info->rgb32 ? sizeof(uint32_t) : sizeof(uint16_t); thr->frame.buffer = (uint8_t*)malloc(max_size); if (!thr->frame.buffer) return false; memset(thr->frame.buffer, 0x80, max_size); thr->last_time = cpu_features_get_time_usec(); thr->thread = sthread_create(video_thread_loop, thr); if (!thr->thread) return false; video_thread_send_and_wait_user_to_thread(thr, &pkt); thr->send_and_wait = video_thread_send_and_wait_user_to_thread; return pkt.data.b; }
static void *sdl_audio_init(const char *device, unsigned rate, unsigned latency) { (void)device; if (SDL_InitSubSystem(SDL_INIT_AUDIO) < 0) return NULL; sdl_audio_t *sdl = (sdl_audio_t*)calloc(1, sizeof(*sdl)); if (!sdl) return NULL; // We have to buffer up some data ourselves, so we let SDL carry approx half of the latency. SDL double buffers audio and we do as well. int frames = find_num_frames(rate, latency / 4); SDL_AudioSpec spec = {0}; spec.freq = rate; spec.format = AUDIO_S16SYS; spec.channels = 2; spec.samples = frames; // This is in audio frames, not samples ... :( spec.callback = sdl_audio_cb; spec.userdata = sdl; SDL_AudioSpec out; if (SDL_OpenAudio(&spec, &out) < 0) { RARCH_ERR("Failed to open SDL audio: %s\n", SDL_GetError()); free(sdl); return 0; } g_settings.audio.out_rate = out.freq; sdl->lock = slock_new(); sdl->cond = scond_new(); RARCH_LOG("SDL audio: Requested %d ms latency, got %d ms\n", latency, (int)(out.samples * 4 * 1000 / g_settings.audio.out_rate)); // Create a buffer twice as big as needed and prefill the buffer. size_t bufsize = out.samples * 4 * sizeof(int16_t); void *tmp = calloc(1, bufsize); sdl->buffer = fifo_new(bufsize); if (tmp) { fifo_write(sdl->buffer, tmp, bufsize); free(tmp); } SDL_PauseAudio(0); return sdl; }
static void *drm_gfx_init(const video_info_t *video, const input_driver_t **input, void **input_data) { struct drm_video *_drmvars = (struct drm_video*) calloc(1, sizeof(struct drm_video)); if (!_drmvars) return NULL; /* Setup surface parameters */ _drmvars->menu_active = false; _drmvars->rgb32 = video->rgb32; /* It's very important that we set aspect here because the * call seq when a core is loaded is gfx_init()->set_aspect()->gfx_frame() * and we don't want the main surface to be setup in set_aspect() * before we get to gfx_frame(). */ _drmvars->current_aspect = video_driver_get_aspect_ratio(); /* Initialize the rest of the mutexes and conditions. */ _drmvars->vsync_condition = scond_new(); _drmvars->vsync_cond_mutex = slock_new(); _drmvars->pending_mutex = slock_new(); _drmvars->core_width = 0; _drmvars->core_height = 0; _drmvars->main_surface = NULL; _drmvars->menu_surface = NULL; if (input && input_data) *input = NULL; /* DRM Init */ if (!init_drm()) { RARCH_ERR ("DRM: Failed to initialize DRM\n"); return NULL; } else RARCH_LOG ("DRM: Init succesful.\n"); _drmvars->kms_width = drm.current_mode->hdisplay; _drmvars->kms_height = drm.current_mode->vdisplay; return _drmvars; }
CDIF_MT::CDIF_MT(CDAccess *cda) : disc_cdaccess(cda), CDReadThread(NULL), SBMutex(NULL), SBCond(NULL) { try { CDIF_Message msg; RTS_Args s; SBMutex = slock_new(); SBCond = scond_new(); UnrecoverableError = false; s.cdif_ptr = this; CDReadThread = sthread_create((void (*)(void*))ReadThreadStart_C, &s); EmuThreadQueue.Read(&msg); } catch(...) { if(CDReadThread) { sthread_join((sthread_t*)CDReadThread); CDReadThread = NULL; } if(SBMutex) { slock_free((slock_t*)SBMutex); SBMutex = NULL; } if(SBCond) { scond_free((scond_t*)SBCond); SBCond = NULL; } if(disc_cdaccess) { delete disc_cdaccess; disc_cdaccess = NULL; } throw; } }
static void *dispmanx_gfx_init(const video_info_t *video, const input_driver_t **input, void **input_data) { int i; struct dispmanx_video *_dispvars = calloc(1, sizeof(struct dispmanx_video)); if (!_dispvars) return NULL; _dispvars->bytes_per_pixel = video->rgb32 ? 4 : 2; _dispvars->screen = 0; _dispvars->vcImagePtr = 0; _dispvars->pageflip_pending = 0; _dispvars->currentPage = NULL; _dispvars->pages = calloc(NUMPAGES, sizeof(struct dispmanx_page)); if (!_dispvars->pages) { free(_dispvars); return NULL; } for (i = 0; i < NUMPAGES; i++) { _dispvars->pages[i].numpage = i; _dispvars->pages[i].used = false; _dispvars->pages[i].dispvars = _dispvars; _dispvars->pages[i].page_used_mutex = slock_new(); } /* Initialize the rest of the mutexes and conditions. */ _dispvars->vsync_condition = scond_new(); _dispvars->pending_mutex = slock_new(); _dispvars->vsync_cond_mutex = slock_new(); bcm_host_init(); _dispvars->display = vc_dispmanx_display_open(_dispvars->screen); if (input && input_data) *input = NULL; return _dispvars; }
static void *rs_init(const char *device, unsigned rate, unsigned latency) { int channels, format; rsd_t *rsd = (rsd_t*)calloc(1, sizeof(rsd_t)); if (!rsd) return NULL; rsound_t *rd; if (rsd_init(&rd) < 0) { free(rsd); return NULL; } rsd->cond_lock = slock_new(); rsd->cond = scond_new(); rsd->buffer = fifo_new(1024 * 4); channels = 2; format = RSD_S16_NE; rsd_set_param(rd, RSD_CHANNELS, &channels); rsd_set_param(rd, RSD_SAMPLERATE, &rate); rsd_set_param(rd, RSD_LATENCY, &latency); if (device) rsd_set_param(rd, RSD_HOST, (void*)device); rsd_set_param(rd, RSD_FORMAT, &format); rsd_set_callback(rd, rsound_audio_cb, err_cb, 256, rsd); if (rsd_start(rd) < 0) { free(rsd); rsd_free(rd); return NULL; } rsd->rd = rd; return rsd; }
/** * rarch_threaded_audio_init: * @out_driver : output driver * @out_data : output audio data * @device : audio device (optional) * @out_rate : output audio rate * @latency : audio latency * @driver : audio driver * * Starts a audio driver in a new thread. * Access to audio driver will be mediated through this driver. * This driver interfaces with audio callback and is * only used in that case. * * Returns: true (1) if successful, otherwise false (0). **/ bool rarch_threaded_audio_init(const audio_driver_t **out_driver, void **out_data, const char *device, unsigned audio_out_rate, unsigned latency, const audio_driver_t *drv) { audio_thread_t *thr = (audio_thread_t*)calloc(1, sizeof(*thr)); if (!thr) return false; thr->driver = (const audio_driver_t*)drv; thr->device = device; thr->out_rate = audio_out_rate; thr->latency = latency; if (!(thr->cond = scond_new())) goto error; if (!(thr->lock = slock_new())) goto error; thr->alive = true; thr->stopped = true; if (!(thr->thread = sthread_create(audio_thread_loop, thr))) goto error; /* Wait until thread has initialized (or failed) the driver. */ slock_lock(thr->lock); while (!thr->inited) scond_wait(thr->cond, thr->lock); slock_unlock(thr->lock); if (thr->inited < 0) /* Thread failed. */ goto error; *out_driver = &audio_thread; *out_data = thr; return true; error: *out_driver = NULL; *out_data = NULL; audio_thread_free(thr); return false; }
static bool init_thread(ffemu_t *handle) { handle->lock = slock_new(); handle->cond_lock = slock_new(); handle->cond = scond_new(); handle->audio_fifo = fifo_new(32000 * sizeof(int16_t) * handle->params.channels * MAX_FRAMES / 60); handle->attr_fifo = fifo_new(sizeof(struct ffemu_video_data) * MAX_FRAMES); handle->video_fifo = fifo_new(handle->params.fb_width * handle->params.fb_height * handle->video.pix_size * MAX_FRAMES); handle->alive = true; handle->can_sleep = true; handle->thread = sthread_create(ffemu_thread, handle); assert(handle->lock && handle->cond_lock && handle->cond && handle->audio_fifo && handle->attr_fifo && handle->video_fifo && handle->thread); return true; }
static void *dispmanx_gfx_init(const video_info_t *video, const input_driver_t **input, void **input_data) { struct dispmanx_video *_dispvars = calloc(1, sizeof(struct dispmanx_video)); bcm_host_init(); _dispvars->display = vc_dispmanx_display_open(0 /* LCD */); /* If the console framebuffer has active overscan settings, * the user must have overscan_scale=1 in config.txt to have * the same size for both fb console and dispmanx. */ graphics_get_display_size(_dispvars->display, &_dispvars->dispmanx_width, &_dispvars->dispmanx_height); /* Setup surface parameters */ _dispvars->vc_image_ptr = 0; _dispvars->pageflip_pending = 0; _dispvars->current_page = NULL; _dispvars->menu_active = false; /* Initialize the rest of the mutexes and conditions. */ _dispvars->vsync_condition = scond_new(); _dispvars->vsync_cond_mutex = slock_new(); _dispvars->pending_mutex = slock_new(); dispmanx_surface_init(_dispvars, video->rgb32 ? 4 : 2, video->rgb32 ? VC_IMAGE_XRGB8888 : VC_IMAGE_RGB565, 0 /* layer */, 255 /* alpha */, 3, /* numpages */ &_dispvars->surfaces[MAIN_SURFACE]); if (input && input_data) *input = NULL; dispmanx_blank_console(_dispvars); return _dispvars; }
static bool create_softfilter_graph(rarch_softfilter_t *filt, enum retro_pixel_format in_pixel_format, unsigned max_width, unsigned max_height, softfilter_simd_mask_t cpu_features, unsigned threads) { unsigned input_fmts, input_fmt, output_fmts, i = 0; struct config_file_userdata userdata; char key[64] = {0}; char name[64] = {0}; (void)i; snprintf(key, sizeof(key), "filter"); if (!config_get_array(filt->conf, key, name, sizeof(name))) { RARCH_ERR("Could not find 'filter' array in config.\n"); return false; } if (filt->num_plugs == 0) { RARCH_ERR("No filter plugs found. Exiting...\n"); return false; } filt->impl = softfilter_find_implementation(filt, name); if (!filt->impl) { RARCH_ERR("Could not find implementation.\n"); return false; } userdata.conf = filt->conf; /* Index-specific configs take priority over ident-specific. */ userdata.prefix[0] = key; userdata.prefix[1] = filt->impl->short_ident; /* Simple assumptions. */ filt->pix_fmt = in_pixel_format; input_fmts = filt->impl->query_input_formats(); switch (in_pixel_format) { case RETRO_PIXEL_FORMAT_XRGB8888: input_fmt = SOFTFILTER_FMT_XRGB8888; break; case RETRO_PIXEL_FORMAT_RGB565: input_fmt = SOFTFILTER_FMT_RGB565; break; default: return false; } if (!(input_fmt & input_fmts)) { RARCH_ERR("Softfilter does not support input format.\n"); return false; } output_fmts = filt->impl->query_output_formats(input_fmt); /* If we have a match of input/output formats, use that. */ if (output_fmts & input_fmt) filt->out_pix_fmt = in_pixel_format; else if (output_fmts & SOFTFILTER_FMT_XRGB8888) filt->out_pix_fmt = RETRO_PIXEL_FORMAT_XRGB8888; else if (output_fmts & SOFTFILTER_FMT_RGB565) filt->out_pix_fmt = RETRO_PIXEL_FORMAT_RGB565; else { RARCH_ERR("Did not find suitable output format for softfilter.\n"); return false; } filt->max_width = max_width; filt->max_height = max_height; filt->impl_data = filt->impl->create( &softfilter_config, input_fmt, input_fmt, max_width, max_height, threads != RARCH_SOFTFILTER_THREADS_AUTO ? threads : retro_get_cpu_cores(), cpu_features, &userdata); if (!filt->impl_data) { RARCH_ERR("Failed to create softfilter state.\n"); return false; } threads = filt->impl->query_num_threads(filt->impl_data); if (!threads) { RARCH_ERR("Invalid number of threads.\n"); return false; } filt->threads = threads; RARCH_LOG("Using %u threads for softfilter.\n", threads); filt->packets = (struct softfilter_work_packet*) calloc(threads, sizeof(*filt->packets)); if (!filt->packets) { RARCH_ERR("Failed to allocate softfilter packets.\n"); return false; } #ifdef HAVE_THREADS filt->thread_data = (struct filter_thread_data*) calloc(threads, sizeof(*filt->thread_data)); if (!filt->thread_data) return false; for (i = 0; i < threads; i++) { filt->thread_data[i].userdata = filt->impl_data; filt->thread_data[i].done = true; filt->thread_data[i].lock = slock_new(); if (!filt->thread_data[i].lock) return false; filt->thread_data[i].cond = scond_new(); if (!filt->thread_data[i].cond) return false; filt->thread_data[i].thread = sthread_create( filter_thread_loop, &filt->thread_data[i]); if (!filt->thread_data[i].thread) return false; } #endif return true; }
rarch_softfilter_t *rarch_softfilter_new(const char *filter_path, unsigned threads, enum retro_pixel_format in_pixel_format, unsigned max_width, unsigned max_height) { unsigned i, cpu_features, output_fmts, input_fmts, input_fmt; softfilter_get_implementation_t cb; i = 0; (void)i; (void)filter_path; rarch_softfilter_t *filt = (rarch_softfilter_t*)calloc(1, sizeof(*filt)); if (!filt) return NULL; cb = NULL; #if defined(HAVE_FILTERS_BUILTIN) cb = (softfilter_get_implementation_t)softfilter_get_implementation_from_idx(g_settings.video.filter_idx); #elif defined(HAVE_DYLIB) filt->lib = dylib_load(filter_path); if (!filt->lib) goto error; cb = (softfilter_get_implementation_t)dylib_proc(filt->lib, "softfilter_get_implementation"); #endif if (!cb) { RARCH_ERR("Couldn't find softfilter symbol.\n"); goto error; } cpu_features = rarch_get_cpu_features(); filt->impl = cb(cpu_features); if (!filt->impl) goto error; RARCH_LOG("Loaded softfilter \"%s\".\n", filt->impl->ident); if (filt->impl->api_version != SOFTFILTER_API_VERSION) { RARCH_ERR("Softfilter ABI mismatch.\n"); goto error; } // Simple assumptions. filt->pix_fmt = in_pixel_format; input_fmts = filt->impl->query_input_formats(); switch (in_pixel_format) { case RETRO_PIXEL_FORMAT_XRGB8888: input_fmt = SOFTFILTER_FMT_XRGB8888; break; case RETRO_PIXEL_FORMAT_RGB565: input_fmt = SOFTFILTER_FMT_RGB565; break; default: goto error; } if (!(input_fmt & input_fmts)) { RARCH_ERR("Softfilter does not support input format.\n"); goto error; } output_fmts = filt->impl->query_output_formats(input_fmt); if (output_fmts & input_fmt) // If we have a match of input/output formats, use that. filt->out_pix_fmt = in_pixel_format; else if (output_fmts & SOFTFILTER_FMT_XRGB8888) filt->out_pix_fmt = RETRO_PIXEL_FORMAT_XRGB8888; else if (output_fmts & SOFTFILTER_FMT_RGB565) filt->out_pix_fmt = RETRO_PIXEL_FORMAT_RGB565; else { RARCH_ERR("Did not find suitable output format for softfilter.\n"); goto error; } filt->max_width = max_width; filt->max_height = max_height; filt->impl_data = filt->impl->create(input_fmt, input_fmt, max_width, max_height, threads != RARCH_SOFTFILTER_THREADS_AUTO ? threads : rarch_get_cpu_cores(), cpu_features); if (!filt->impl_data) { RARCH_ERR("Failed to create softfilter state.\n"); goto error; } threads = filt->impl->query_num_threads(filt->impl_data); if (!threads) { RARCH_ERR("Invalid number of threads.\n"); goto error; } RARCH_LOG("Using %u threads for softfilter.\n", threads); filt->packets = (struct softfilter_work_packet*)calloc(threads, sizeof(*filt->packets)); if (!filt->packets) { RARCH_ERR("Failed to allocate softfilter packets.\n"); goto error; } #ifdef HAVE_THREADS filt->thread_data = (struct filter_thread_data*)calloc(threads, sizeof(*filt->thread_data)); if (!filt->thread_data) goto error; filt->threads = threads; for (i = 0; i < threads; i++) { filt->thread_data[i].userdata = filt->impl_data; filt->thread_data[i].done = true; filt->thread_data[i].lock = slock_new(); if (!filt->thread_data[i].lock) goto error; filt->thread_data[i].cond = scond_new(); if (!filt->thread_data[i].cond) goto error; filt->thread_data[i].thread = sthread_create(filter_thread_loop, &filt->thread_data[i]); if (!filt->thread_data[i].thread) goto error; } #endif return filt; error: rarch_softfilter_free(filt); return NULL; }
static void *sunxi_gfx_init(const video_info_t *video, const input_driver_t **input, void **input_data) { struct sunxi_video *_dispvars = (struct sunxi_video*) calloc(1, sizeof(struct sunxi_video)); if (!_dispvars) return NULL; _dispvars->src_bytes_per_pixel = video->rgb32 ? 4 : 2; _dispvars->sunxi_disp = sunxi_disp_init("/dev/fb0"); /* Blank text console and disable cursor blinking. */ sunxi_blank_console(_dispvars); _dispvars->pages = (struct sunxi_page*)calloc(NUMPAGES, sizeof (struct sunxi_page)); if (!_dispvars->pages) goto error; _dispvars->dst_pitch = _dispvars->sunxi_disp->xres * _dispvars->sunxi_disp->bits_per_pixel / 8; /* Considering 4 bytes per pixel since we will be in 32bpp on the CB/CB2/CT for hw scalers to work. */ _dispvars->dst_pixels_per_line = _dispvars->dst_pitch / 4; _dispvars->pageflip_pending = false; _dispvars->nextPage = &_dispvars->pages[0]; _dispvars->keep_vsync = true; _dispvars->menu_active = false; _dispvars->bytes_per_pixel = video->rgb32 ? 4 : 2; /* It's very important that we set aspect here because the * call seq when a core is loaded is gfx_init()->set_aspect()->gfx_frame() * and we don't want the main surface to be setup in set_aspect() * before we get to gfx_frame(). */ _dispvars->aspect_ratio = video_driver_get_aspect_ratio(); switch (_dispvars->bytes_per_pixel) { case 2: pixman_blit = pixman_composite_src_0565_8888_asm_neon; break; case 4: pixman_blit = pixman_composite_src_8888_8888_asm_neon; break; default: goto error; } _dispvars->pending_mutex = slock_new(); _dispvars->vsync_condition = scond_new(); if (input && input_data) *input = NULL; /* Launching vsync thread */ _dispvars->vsync_thread = sthread_create(sunxi_vsync_thread_func, _dispvars); return _dispvars; error: if (_dispvars) free(_dispvars); return NULL; }
static void *coreaudio_init(const char *device, unsigned rate, unsigned latency) { size_t fifo_size; UInt32 i_size; AudioStreamBasicDescription real_desc; #ifdef OSX_PPC Component comp; #else AudioComponent comp; #endif #ifndef TARGET_OS_IPHONE AudioChannelLayout layout = {0}; #endif AURenderCallbackStruct cb = {0}; AudioStreamBasicDescription stream_desc = {0}; bool component_unavailable = false; static bool session_initialized = false; coreaudio_t *dev = NULL; #ifdef OSX_PPC ComponentDescription desc = {0}; #else AudioComponentDescription desc = {0}; #endif settings_t *settings = config_get_ptr(); (void)session_initialized; (void)device; dev = (coreaudio_t*)calloc(1, sizeof(*dev)); if (!dev) return NULL; dev->lock = slock_new(); dev->cond = scond_new(); #if TARGET_OS_IPHONE if (!session_initialized) { session_initialized = true; AudioSessionInitialize(0, 0, coreaudio_interrupt_listener, 0); AudioSessionSetActive(true); } #endif /* Create AudioComponent */ desc.componentType = kAudioUnitType_Output; #if TARGET_OS_IPHONE desc.componentSubType = kAudioUnitSubType_RemoteIO; #else desc.componentSubType = kAudioUnitSubType_HALOutput; #endif desc.componentManufacturer = kAudioUnitManufacturer_Apple; #ifdef OSX_PPC comp = FindNextComponent(NULL, &desc); #else comp = AudioComponentFindNext(NULL, &desc); #endif if (comp == NULL) goto error; #ifdef OSX_PPC component_unavailable = (OpenAComponent(comp, &dev->dev) != noErr); #else component_unavailable = (AudioComponentInstanceNew(comp, &dev->dev) != noErr); #endif if (component_unavailable) goto error; #if !TARGET_OS_IPHONE if (device) choose_output_device(dev, device); #endif dev->dev_alive = true; /* Set audio format */ stream_desc.mSampleRate = rate; stream_desc.mBitsPerChannel = sizeof(float) * CHAR_BIT; stream_desc.mChannelsPerFrame = 2; stream_desc.mBytesPerPacket = 2 * sizeof(float); stream_desc.mBytesPerFrame = 2 * sizeof(float); stream_desc.mFramesPerPacket = 1; stream_desc.mFormatID = kAudioFormatLinearPCM; stream_desc.mFormatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked | (is_little_endian() ? 0 : kAudioFormatFlagIsBigEndian); if (AudioUnitSetProperty(dev->dev, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &stream_desc, sizeof(stream_desc)) != noErr) goto error; /* Check returned audio format. */ i_size = sizeof(real_desc); if (AudioUnitGetProperty(dev->dev, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &real_desc, &i_size) != noErr) goto error; if (real_desc.mChannelsPerFrame != stream_desc.mChannelsPerFrame) goto error; if (real_desc.mBitsPerChannel != stream_desc.mBitsPerChannel) goto error; if (real_desc.mFormatFlags != stream_desc.mFormatFlags) goto error; if (real_desc.mFormatID != stream_desc.mFormatID) goto error; RARCH_LOG("[CoreAudio]: Using output sample rate of %.1f Hz\n", (float)real_desc.mSampleRate); settings->audio.out_rate = real_desc.mSampleRate; /* Set channel layout (fails on iOS). */ #ifndef TARGET_OS_IPHONE layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo; if (AudioUnitSetProperty(dev->dev, kAudioUnitProperty_AudioChannelLayout, kAudioUnitScope_Input, 0, &layout, sizeof(layout)) != noErr) goto error; #endif /* Set callbacks and finish up. */ cb.inputProc = audio_write_cb; cb.inputProcRefCon = dev; if (AudioUnitSetProperty(dev->dev, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &cb, sizeof(cb)) != noErr) goto error; if (AudioUnitInitialize(dev->dev) != noErr) goto error; fifo_size = (latency * settings->audio.out_rate) / 1000; fifo_size *= 2 * sizeof(float); dev->buffer_size = fifo_size; dev->buffer = fifo_new(fifo_size); if (!dev->buffer) goto error; RARCH_LOG("[CoreAudio]: Using buffer size of %u bytes: (latency = %u ms)\n", (unsigned)fifo_size, latency); if (AudioOutputUnitStart(dev->dev) != noErr) goto error; return dev; error: RARCH_ERR("[CoreAudio]: Failed to initialize driver ...\n"); coreaudio_free(dev); return NULL; }
CDIF_Queue::CDIF_Queue() { ze_mutex = slock_new(); ze_cond = scond_new(); }
static void *alsa_thread_init(const char *device, unsigned rate, unsigned latency) { snd_pcm_uframes_t buffer_size; snd_pcm_format_t format; snd_pcm_hw_params_t *params = NULL; snd_pcm_sw_params_t *sw_params = NULL; const char *alsa_dev = device ? device : "default"; unsigned latency_usec = latency * 1000 / 2; unsigned channels = 2; unsigned periods = 4; alsa_thread_t *alsa = (alsa_thread_t*) calloc(1, sizeof(alsa_thread_t)); if (!alsa) return NULL; TRY_ALSA(snd_pcm_open(&alsa->pcm, alsa_dev, SND_PCM_STREAM_PLAYBACK, 0)); TRY_ALSA(snd_pcm_hw_params_malloc(¶ms)); alsa->has_float = alsathread_find_float_format(alsa->pcm, params); format = alsa->has_float ? SND_PCM_FORMAT_FLOAT : SND_PCM_FORMAT_S16; TRY_ALSA(snd_pcm_hw_params_any(alsa->pcm, params)); TRY_ALSA(snd_pcm_hw_params_set_access( alsa->pcm, params, SND_PCM_ACCESS_RW_INTERLEAVED)); TRY_ALSA(snd_pcm_hw_params_set_format(alsa->pcm, params, format)); TRY_ALSA(snd_pcm_hw_params_set_channels(alsa->pcm, params, channels)); TRY_ALSA(snd_pcm_hw_params_set_rate(alsa->pcm, params, rate, 0)); TRY_ALSA(snd_pcm_hw_params_set_buffer_time_near( alsa->pcm, params, &latency_usec, NULL)); TRY_ALSA(snd_pcm_hw_params_set_periods_near( alsa->pcm, params, &periods, NULL)); TRY_ALSA(snd_pcm_hw_params(alsa->pcm, params)); /* Shouldn't have to bother with this, * but some drivers are apparently broken. */ if (snd_pcm_hw_params_get_period_size(params, &alsa->period_frames, NULL)) snd_pcm_hw_params_get_period_size_min( params, &alsa->period_frames, NULL); RARCH_LOG("ALSA: Period size: %d frames\n", (int)alsa->period_frames); if (snd_pcm_hw_params_get_buffer_size(params, &buffer_size)) snd_pcm_hw_params_get_buffer_size_max(params, &buffer_size); RARCH_LOG("ALSA: Buffer size: %d frames\n", (int)buffer_size); alsa->buffer_size = snd_pcm_frames_to_bytes(alsa->pcm, buffer_size); alsa->period_size = snd_pcm_frames_to_bytes(alsa->pcm, alsa->period_frames); TRY_ALSA(snd_pcm_sw_params_malloc(&sw_params)); TRY_ALSA(snd_pcm_sw_params_current(alsa->pcm, sw_params)); TRY_ALSA(snd_pcm_sw_params_set_start_threshold( alsa->pcm, sw_params, buffer_size / 2)); TRY_ALSA(snd_pcm_sw_params(alsa->pcm, sw_params)); snd_pcm_hw_params_free(params); snd_pcm_sw_params_free(sw_params); alsa->fifo_lock = slock_new(); alsa->cond_lock = slock_new(); alsa->cond = scond_new(); alsa->buffer = fifo_new(alsa->buffer_size); if (!alsa->fifo_lock || !alsa->cond_lock || !alsa->cond || !alsa->buffer) goto error; alsa->worker_thread = sthread_create(alsa_worker_thread, alsa); if (!alsa->worker_thread) { RARCH_ERR("error initializing worker thread"); goto error; } return alsa; error: RARCH_ERR("ALSA: Failed to initialize...\n"); if (params) snd_pcm_hw_params_free(params); if (sw_params) snd_pcm_sw_params_free(sw_params); alsa_thread_free(alsa); return NULL; }
static void *sl_init(const char *device, unsigned rate, unsigned latency) { unsigned i; SLInterfaceID id; SLboolean req; SLresult res; sl_t *sl; SLDataFormat_PCM fmt_pcm = {0}; SLDataSource audio_src = {0}; SLDataSink audio_sink = {0}; SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {0}; SLDataLocator_OutputMix loc_outmix = {0}; settings_t *settings = config_get_ptr(); (void)device; id = SL_IID_ANDROIDSIMPLEBUFFERQUEUE; req = SL_BOOLEAN_TRUE; res = 0; sl = (sl_t*)calloc(1, sizeof(sl_t)); if (!sl) goto error; RARCH_LOG("[SLES]: Requested audio latency: %u ms.", latency); GOTO_IF_FAIL(slCreateEngine(&sl->engine_object, 0, NULL, 0, NULL, NULL)); GOTO_IF_FAIL(SLObjectItf_Realize(sl->engine_object, SL_BOOLEAN_FALSE)); GOTO_IF_FAIL(SLObjectItf_GetInterface(sl->engine_object, SL_IID_ENGINE, &sl->engine)); GOTO_IF_FAIL(SLEngineItf_CreateOutputMix(sl->engine, &sl->output_mix, 0, NULL, NULL)); GOTO_IF_FAIL(SLObjectItf_Realize(sl->output_mix, SL_BOOLEAN_FALSE)); if (settings->audio.block_frames) sl->buf_size = settings->audio.block_frames * 4; else sl->buf_size = next_pow2(32 * latency); sl->buf_count = (latency * 4 * rate + 500) / 1000; sl->buf_count = (sl->buf_count + sl->buf_size / 2) / sl->buf_size; sl->buffer = (uint8_t**)calloc(sizeof(uint8_t*), sl->buf_count); if (!sl->buffer) goto error; sl->buffer_chunk = (uint8_t*)calloc(sl->buf_count, sl->buf_size); if (!sl->buffer_chunk) goto error; for (i = 0; i < sl->buf_count; i++) sl->buffer[i] = sl->buffer_chunk + i * sl->buf_size; RARCH_LOG("[SLES]: Setting audio latency: Block size = %u, Blocks = %u, Total = %u ...\n", sl->buf_size, sl->buf_count, sl->buf_size * sl->buf_count); fmt_pcm.formatType = SL_DATAFORMAT_PCM; fmt_pcm.numChannels = 2; fmt_pcm.samplesPerSec = rate * 1000; // Samplerate is in milli-Hz. fmt_pcm.bitsPerSample = 16; fmt_pcm.containerSize = 16; fmt_pcm.channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT; fmt_pcm.endianness = SL_BYTEORDER_LITTLEENDIAN; /* Android only. */ audio_src.pLocator = &loc_bufq; audio_src.pFormat = &fmt_pcm; loc_bufq.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE; loc_bufq.numBuffers = sl->buf_count; loc_outmix.locatorType = SL_DATALOCATOR_OUTPUTMIX; loc_outmix.outputMix = sl->output_mix; audio_sink.pLocator = &loc_outmix; GOTO_IF_FAIL(SLEngineItf_CreateAudioPlayer(sl->engine, &sl->buffer_queue_object, &audio_src, &audio_sink, 1, &id, &req)); GOTO_IF_FAIL(SLObjectItf_Realize(sl->buffer_queue_object, SL_BOOLEAN_FALSE)); GOTO_IF_FAIL(SLObjectItf_GetInterface(sl->buffer_queue_object, SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &sl->buffer_queue)); sl->cond = scond_new(); sl->lock = slock_new(); (*sl->buffer_queue)->RegisterCallback(sl->buffer_queue, opensl_callback, sl); /* Enqueue a bit to get stuff rolling. */ sl->buffered_blocks = sl->buf_count; sl->buffer_index = 0; for (i = 0; i < sl->buf_count; i++) (*sl->buffer_queue)->Enqueue(sl->buffer_queue, sl->buffer[i], sl->buf_size); GOTO_IF_FAIL(SLObjectItf_GetInterface(sl->buffer_queue_object, SL_IID_PLAY, &sl->player)); GOTO_IF_FAIL(SLPlayItf_SetPlayState(sl->player, SL_PLAYSTATE_PLAYING)); return sl; error: RARCH_ERR("Couldn't initialize OpenSL ES driver, error code: [%d].\n", (int)res); sl_free(sl); return NULL; }
static void *ja_init(const char *device, unsigned rate, unsigned latency) { int i; const char **jports = NULL; char *dest_ports[2]; size_t bufsize = 0; int parsed = 0; settings_t *settings = config_get_ptr(); jack_t *jd = (jack_t*)calloc(1, sizeof(jack_t)); if (!jd) return NULL; jd->cond = scond_new(); jd->cond_lock = slock_new(); jd->client = jack_client_open("RetroArch", JackNullOption, NULL); if (jd->client == NULL) goto error; settings->audio.out_rate = jack_get_sample_rate(jd->client); jack_set_process_callback(jd->client, process_cb, jd); jack_on_shutdown(jd->client, shutdown_cb, jd); jd->ports[0] = jack_port_register(jd->client, "left", JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0); jd->ports[1] = jack_port_register(jd->client, "right", JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0); if (jd->ports[0] == NULL || jd->ports[1] == NULL) { RARCH_ERR("Failed to register ports.\n"); goto error; } jports = jack_get_ports(jd->client, NULL, NULL, JackPortIsPhysical | JackPortIsInput); if (jports == NULL) { RARCH_ERR("Failed to get ports.\n"); goto error; } bufsize = find_buffersize(jd, latency); jd->buffer_size = bufsize; RARCH_LOG("JACK: Internal buffer size: %d frames.\n", (int)(bufsize / sizeof(jack_default_audio_sample_t))); for (i = 0; i < 2; i++) { jd->buffer[i] = jack_ringbuffer_create(bufsize); if (jd->buffer[i] == NULL) { RARCH_ERR("Failed to create buffers.\n"); goto error; } } parsed = parse_ports(dest_ports, jports); if (jack_activate(jd->client) < 0) { RARCH_ERR("Failed to activate Jack...\n"); goto error; } for (i = 0; i < 2; i++) { if (jack_connect(jd->client, jack_port_name(jd->ports[i]), dest_ports[i])) { RARCH_ERR("Failed to connect to Jack port.\n"); goto error; } } for (i = 0; i < parsed; i++) free(dest_ports[i]); jack_free(jports); return jd; error: if (jports != NULL) jack_free(jports); return NULL; }
void ANativeActivity_onCreate(ANativeActivity* activity, void* savedState, size_t savedStateSize) { int msgpipe[2]; struct android_app* android_app; (void)savedState; (void)savedStateSize; RARCH_LOG("Creating Native Activity: %p\n", activity); activity->callbacks->onDestroy = onDestroy; activity->callbacks->onStart = onStart; activity->callbacks->onResume = onResume; activity->callbacks->onSaveInstanceState = NULL; activity->callbacks->onPause = onPause; activity->callbacks->onStop = onStop; activity->callbacks->onConfigurationChanged = onConfigurationChanged; activity->callbacks->onLowMemory = NULL; activity->callbacks->onWindowFocusChanged = onWindowFocusChanged; activity->callbacks->onNativeWindowCreated = onNativeWindowCreated; activity->callbacks->onNativeWindowDestroyed = onNativeWindowDestroyed; activity->callbacks->onInputQueueCreated = onInputQueueCreated; activity->callbacks->onInputQueueDestroyed = onInputQueueDestroyed; // these are set only for the native activity, and are reset when it ends ANativeActivity_setWindowFlags(activity, AWINDOW_FLAG_KEEP_SCREEN_ON | AWINDOW_FLAG_FULLSCREEN, 0); if (pthread_key_create(&thread_key, jni_thread_destruct)) RARCH_ERR("Error initializing pthread_key\n"); android_app = (struct android_app*)calloc(1, sizeof(*android_app)); if (!android_app) { RARCH_ERR("Failed to initialize android_app\n"); return; } memset(android_app, 0, sizeof(struct android_app)); android_app->activity = activity; android_app->mutex = (slock_t*)slock_new(); android_app->cond = (scond_t*)scond_new(); if (pipe(msgpipe)) { RARCH_ERR("could not create pipe: %s.\n", strerror(errno)); activity->instance = NULL; } android_app->msgread = msgpipe[0]; android_app->msgwrite = msgpipe[1]; android_app->thread = (sthread_t*)sthread_create(android_app_entry, android_app); // Wait for thread to start. slock_lock(android_app->mutex); while (!android_app->running) scond_wait(android_app->cond, android_app->mutex); slock_unlock(android_app->mutex); activity->instance = android_app; }
static void frontend_gx_init(void *data) { (void)data; #ifdef HW_RVL IOS_ReloadIOS(IOS_GetVersion()); L2Enhance(); #ifndef IS_SALAMANDER gx_init_mem2(); #endif #endif #ifdef USBGECKO DEBUG_Init(GDBSTUB_DEVICE_USB, 1); _break(); #endif #if defined(DEBUG) && defined(IS_SALAMANDER) VIInit(); GXRModeObj *rmode = VIDEO_GetPreferredMode(NULL); void *xfb = MEM_K0_TO_K1(SYS_AllocateFramebuffer(rmode)); console_init(xfb, 20, 20, rmode->fbWidth, rmode->xfbHeight, rmode->fbWidth * VI_DISPLAY_PIX_SZ); VIConfigure(rmode); VISetNextFramebuffer(xfb); VISetBlack(FALSE); VIFlush(); VIWaitForRetrace(); VIWaitForRetrace(); #endif #ifndef DEBUG __exception_setreload(8); #endif fatInitDefault(); #ifdef HAVE_LOGGER devoptab_list[STD_OUT] = &dotab_stdout; devoptab_list[STD_ERR] = &dotab_stdout; dotab_stdout.write_r = gx_logger_net; #elif defined(HAVE_FILE_LOGGER) && !defined(IS_SALAMANDER) devoptab_list[STD_OUT] = &dotab_stdout; devoptab_list[STD_ERR] = &dotab_stdout; dotab_stdout.write_r = gx_logger_file; #endif #if defined(HW_RVL) && !defined(IS_SALAMANDER) gx_devices[GX_DEVICE_SD].interface = &__io_wiisd; gx_devices[GX_DEVICE_SD].name = "sd"; gx_devices[GX_DEVICE_SD].mounted = fatMountSimple( gx_devices[GX_DEVICE_SD].name, gx_devices[GX_DEVICE_SD].interface); gx_devices[GX_DEVICE_USB].interface = &__io_usbstorage; gx_devices[GX_DEVICE_USB].name = "usb"; gx_devices[GX_DEVICE_USB].mounted = fatMountSimple( gx_devices[GX_DEVICE_USB].name, gx_devices[GX_DEVICE_USB].interface); gx_device_cond_mutex = slock_new(); gx_device_cond = scond_new(); gx_device_mutex = slock_new(); gx_device_thread = sthread_create(gx_devthread, NULL); #endif }
static void *gfx_ctx_vc_init(video_frame_info_t *video_info, void *video_driver) { VC_DISPMANX_ALPHA_T alpha; EGLint n, major, minor; DISPMANX_ELEMENT_HANDLE_T dispman_element; DISPMANX_DISPLAY_HANDLE_T dispman_display; DISPMANX_UPDATE_HANDLE_T dispman_update; DISPMANX_MODEINFO_T dispman_modeinfo; VC_RECT_T dst_rect; VC_RECT_T src_rect; #ifdef HAVE_EGL static const EGLint attribute_list[] = { EGL_RED_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_BLUE_SIZE, 8, EGL_ALPHA_SIZE, 8, EGL_DEPTH_SIZE, 16, EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_NONE }; static const EGLint context_attributes[] = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE }; #endif settings_t *settings = config_get_ptr(); vc_ctx_data_t *vc = NULL; if (g_egl_inited) { RARCH_ERR("[VC/EGL]: Attempted to re-initialize driver.\n"); return NULL; } vc = (vc_ctx_data_t*)calloc(1, sizeof(*vc)); if (!vc) return NULL; /* If we set this env variable, Broadcom's EGL implementation will block * on vsync with a double buffer when we call eglSwapBuffers. Less input lag! * Has to be done before any EGL call. * NOTE this is commented out because it should be the right way to do it, but * currently it doesn't work, so we are using an vsync callback based solution.*/ /* if (video_info->max_swapchain_images <= 2) setenv("V3D_DOUBLE_BUFFER", "1", 1); else setenv("V3D_DOUBLE_BUFFER", "0", 1); */ bcm_host_init(); #ifdef HAVE_EGL if (!egl_init_context(&vc->egl, EGL_NONE, EGL_DEFAULT_DISPLAY, &major, &minor, &n, attribute_list)) { egl_report_error(); goto error; } if (!egl_create_context(&vc->egl, (vc_api == GFX_CTX_OPENGL_ES_API) ? context_attributes : NULL)) { egl_report_error(); goto error; } #endif /* Create an EGL window surface. */ if (graphics_get_display_size(0 /* LCD */, &vc->fb_width, &vc->fb_height) < 0) goto error; dst_rect.x = 0; dst_rect.y = 0; dst_rect.width = vc->fb_width; dst_rect.height = vc->fb_height; src_rect.x = 0; src_rect.y = 0; /* Use dispmanx upscaling if fullscreen_x * and fullscreen_y are set. */ if ((settings->uints.video_fullscreen_x != 0) && (settings->uints.video_fullscreen_y != 0)) { /* Keep input and output aspect ratio equal. * There are other aspect ratio settings which can be used to stretch video output. */ /* Calculate source and destination aspect ratios. */ float srcAspect = (float)settings->uints.video_fullscreen_x / (float)settings->uints.video_fullscreen_y; float dstAspect = (float)vc->fb_width / (float)vc->fb_height; /* If source and destination aspect ratios are not equal correct source width. */ if (srcAspect != dstAspect) src_rect.width = (unsigned)(settings->uints.video_fullscreen_y * dstAspect) << 16; else src_rect.width = settings->uints.video_fullscreen_x << 16; src_rect.height = settings->uints.video_fullscreen_y << 16; } else { src_rect.width = vc->fb_width << 16; src_rect.height = vc->fb_height << 16; } dispman_display = vc_dispmanx_display_open(0 /* LCD */); vc->dispman_display = dispman_display; vc_dispmanx_display_get_info(dispman_display, &dispman_modeinfo); dispman_update = vc_dispmanx_update_start(0); alpha.flags = DISPMANX_FLAGS_ALPHA_FIXED_ALL_PIXELS; alpha.opacity = 255; alpha.mask = 0; dispman_element = vc_dispmanx_element_add(dispman_update, dispman_display, 0 /*layer*/, &dst_rect, 0 /*src*/, &src_rect, DISPMANX_PROTECTION_NONE, &alpha, 0 /*clamp*/, DISPMANX_NO_ROTATE); vc->native_window.element = dispman_element; /* Use dispmanx upscaling if fullscreen_x and fullscreen_y are set. */ if (settings->uints.video_fullscreen_x != 0 && settings->uints.video_fullscreen_y != 0) { /* Keep input and output aspect ratio equal. * There are other aspect ratio settings which * can be used to stretch video output. */ /* Calculate source and destination aspect ratios. */ float srcAspect = (float)settings->uints.video_fullscreen_x / (float)settings->uints.video_fullscreen_y; float dstAspect = (float)vc->fb_width / (float)vc->fb_height; /* If source and destination aspect ratios are not equal correct source width. */ if (srcAspect != dstAspect) vc->native_window.width = (unsigned)(settings->uints.video_fullscreen_y * dstAspect); else vc->native_window.width = settings->uints.video_fullscreen_x; vc->native_window.height = settings->uints.video_fullscreen_y; } else { vc->native_window.width = vc->fb_width; vc->native_window.height = vc->fb_height; } vc_dispmanx_update_submit_sync(dispman_update); #ifdef HAVE_EGL if (!egl_create_surface(&vc->egl, &vc->native_window)) goto error; #endif /* For vsync after eglSwapBuffers when max_swapchain < 3 */ vc->vsync_condition = scond_new(); vc->vsync_condition_mutex = slock_new(); vc->vsync_callback_set = false; if (video_info->max_swapchain_images <= 2) { /* Start sending vsync callbacks so we can wait for vsync after eglSwapBuffers */ vc_dispmanx_vsync_callback(vc->dispman_display, dispmanx_vsync_callback, (void*)vc); vc->vsync_callback_set = true; } return vc; error: gfx_ctx_vc_destroy(video_driver); return NULL; }