static int control(struct dec_audio *da, int cmd, void *arg) { struct ad_mpg123_context *con = da->priv; switch (cmd) { case ADCTRL_RESET: mp_audio_set_null_data(&da->decoded); mpg123_close(con->handle); if (mpg123_open_feed(con->handle) != MPG123_OK) { MP_ERR(da, "mpg123 failed to reopen stream: %s\n", mpg123_strerror(con->handle)); return CONTROL_FALSE; } return CONTROL_TRUE; } return CONTROL_UNKNOWN; }
static int hotplug_init(struct ao *ao) { MP_DBG(ao, "Hotplug init\n"); struct wasapi_state *state = ao->priv; state->log = ao->log; CoInitializeEx(NULL, COINIT_APARTMENTTHREADED); HRESULT hr = CoCreateInstance(&CLSID_MMDeviceEnumerator, NULL, CLSCTX_ALL, &IID_IMMDeviceEnumerator, (void **)&state->pEnumerator); EXIT_ON_ERROR(hr); hr = wasapi_change_init(ao, true); EXIT_ON_ERROR(hr); return 0; exit_label: MP_ERR(state, "Error setting up audio hotplug: %s\n", mp_HRESULT_to_str(hr)); hotplug_uninit(ao); return -1; }
static int recreate_audio_filters(struct MPContext *mpctx) { struct ao_chain *ao_c = mpctx->ao_chain; assert(ao_c); if (!mp_output_chain_update_filters(ao_c->filter, mpctx->opts->af_settings)) goto fail; update_speed_filters(mpctx); mp_notify(mpctx, MPV_EVENT_AUDIO_RECONFIG, NULL); return 0; fail: MP_ERR(mpctx, "Audio filter initialized failed!\n"); return -1; }
static void hotplug_uninit(struct ao *ao) { OSStatus err = noErr; for (int i = 0; i < MP_ARRAY_SIZE(hotplug_properties); i++) { AudioObjectPropertyAddress addr = { hotplug_properties[i], kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster }; err = AudioObjectRemovePropertyListener( kAudioObjectSystemObject, &addr, hotplug_cb, (void *)ao); if (err != noErr) { char *c1 = fourcc_repr(hotplug_properties[i]); char *c2 = fourcc_repr(err); MP_ERR(ao, "failed to set device listener %s (%s)", c1, c2); } } }
bool ca_init_chmap(struct ao *ao, AudioDeviceID device) { struct mp_chmap_sel chmap_sel = {0}; ca_retrieve_layouts(ao, &chmap_sel, device); if (!chmap_sel.num_chmaps) mp_chmap_sel_add_map(&chmap_sel, &(struct mp_chmap)MP_CHMAP_INIT_STEREO); mp_chmap_sel_add_map(&chmap_sel, &(struct mp_chmap)MP_CHMAP_INIT_MONO); if (!ao_chmap_sel_adjust(ao, &chmap_sel, &ao->channels)) { MP_ERR(ao, "could not select a suitable channel map among the " "hardware supported ones. Make sure to configure your " "output device correctly in 'Audio MIDI Setup.app'\n"); return false; } return true; }
static void update_framebuffer_from_bo( const struct MPGLContext *ctx, struct gbm_bo *bo) { struct priv *p = ctx->priv; p->fb.bo = bo; p->fb.fd = p->kms->fd; p->fb.width = gbm_bo_get_width(bo); p->fb.height = gbm_bo_get_height(bo); int stride = gbm_bo_get_stride(bo); int handle = gbm_bo_get_handle(bo).u32; int ret = drmModeAddFB(p->kms->fd, p->fb.width, p->fb.height, 24, 32, stride, handle, &p->fb.id); if (ret) { MP_ERR(ctx->vo, "Failed to create framebuffer: %s\n", mp_strerror(errno)); } gbm_bo_set_user_data(bo, &p->fb, framebuffer_destroy_callback); }
static void get_and_update_icc_profile(struct gl_priv *p, int *events) { if (gl_video_icc_auto_enabled(p->renderer)) { MP_VERBOSE(p, "Querying ICC profile...\n"); bstr icc = bstr0(NULL); int r = mpgl_control(p->glctx, events, VOCTRL_GET_ICC_PROFILE, &icc); if (r != VO_NOTAVAIL) { if (r == VO_FALSE) { MP_WARN(p, "Could not retrieve an ICC profile.\n"); } else if (r == VO_NOTIMPL) { MP_ERR(p, "icc-profile-auto not implemented on this platform.\n"); } gl_video_set_icc_profile(p->renderer, icc); } } }
// Iterate entries. The first call establishes the first entry. Returns false // if no entry found, otherwise returns true and sets mpa->entry/entry_filename. bool mp_archive_next_entry(struct mp_archive *mpa) { mpa->entry = NULL; talloc_free(mpa->entry_filename); mpa->entry_filename = NULL; if (!mpa->arch) return false; locale_t oldlocale = uselocale(mpa->locale); bool success = false; while (!mp_cancel_test(mpa->primary_src->cancel)) { struct archive_entry *entry; int r = archive_read_next_header(mpa->arch, &entry); if (r == ARCHIVE_EOF) break; if (r < ARCHIVE_OK) MP_ERR(mpa, "%s\n", archive_error_string(mpa->arch)); if (r < ARCHIVE_WARN) { MP_FATAL(mpa, "could not read archive entry\n"); mp_archive_check_fatal(mpa, r); break; } if (archive_entry_filetype(entry) != AE_IFREG) continue; // Some archives may have no filenames, or libarchive won't return some. const char *fn = archive_entry_pathname(entry); char buf[64]; if (!fn || bstr_validate_utf8(bstr0(fn)) < 0) { snprintf(buf, sizeof(buf), "mpv_unknown#%d", mpa->entry_num); fn = buf; } mpa->entry = entry; mpa->entry_filename = talloc_strdup(mpa, fn); mpa->entry_num += 1; success = true; break; } uselocale(oldlocale); return success; }
static HRESULT get_device_delay(struct wasapi_state *state, double *delay_us) { UINT64 sample_count = atomic_load(&state->sample_count); UINT64 position, qpc_position; HRESULT hr; hr = IAudioClock_GetPosition(state->pAudioClock, &position, &qpc_position); // GetPosition succeeded, but the result may be // inaccurate due to the length of the call // http://msdn.microsoft.com/en-us/library/windows/desktop/dd370889%28v=vs.85%29.aspx if (hr == S_FALSE) { MP_VERBOSE(state, "Possibly inaccurate device position.\n"); hr = S_OK; } EXIT_ON_ERROR(hr); // convert position to number of samples careful to avoid overflow UINT64 sample_position = uint64_scale(position, state->format.Format.nSamplesPerSec, state->clock_frequency); INT64 diff = sample_count - sample_position; *delay_us = diff * 1e6 / state->format.Format.nSamplesPerSec; // Correct for any delay in IAudioClock_GetPosition above. // This should normally be very small (<1 us), but just in case. . . LARGE_INTEGER qpc; QueryPerformanceCounter(&qpc); INT64 qpc_diff = av_rescale(qpc.QuadPart, 10000000, state->qpc_frequency.QuadPart) - qpc_position; // ignore the above calculation if it yeilds more than 10 seconds (due to // possible overflow inside IAudioClock_GetPosition) if (qpc_diff < 10 * 10000000) { *delay_us -= qpc_diff / 10.0; // convert to us } else { MP_VERBOSE(state, "Insane qpc delay correction of %g seconds. " "Ignoring it.\n", qpc_diff / 10000000.0); } MP_TRACE(state, "Device delay: %g us\n", *delay_us); return S_OK; exit_label: MP_ERR(state, "Error getting device delay: %s\n", mp_HRESULT_to_str(hr)); return hr; }
// Return true if this was a readable directory. static bool scan_dir(struct pl_parser *p, char *path, struct stat *dir_stack, int num_dir_stack, char ***files, int *num_files) { if (strlen(path) >= 8192 || num_dir_stack == MAX_DIR_STACK) return false; // things like mount bind loops DIR *dp = opendir(path); if (!dp) { MP_ERR(p, "Could not read directory.\n"); return false; } struct dirent *ep; while ((ep = readdir(dp))) { if (ep->d_name[0] == '.') continue; if (mp_cancel_test(p->s->cancel)) break; char *file = mp_path_join(p, path, ep->d_name); struct stat st; if (stat(file, &st) == 0 && S_ISDIR(st.st_mode)) { for (int n = 0; n < num_dir_stack; n++) { if (same_st(&dir_stack[n], &st)) { MP_VERBOSE(p, "Skip recursive entry: %s\n", file); goto skip; } } dir_stack[num_dir_stack] = st; scan_dir(p, file, dir_stack, num_dir_stack + 1, files, num_files); } else { MP_TARRAY_APPEND(p, *files, *num_files, file); } skip: ; } closedir(dp); return true; }
static void thread_feed(struct ao *ao) { struct wasapi_state *state = ao->priv; HRESULT hr; UINT32 frame_count = state->bufferFrameCount; if (state->share_mode == AUDCLNT_SHAREMODE_SHARED) { UINT32 padding = 0; hr = IAudioClient_GetCurrentPadding(state->pAudioClient, &padding); EXIT_ON_ERROR(hr); frame_count -= padding; MP_TRACE(ao, "Frame to fill: %"PRIu32". Padding: %"PRIu32"\n", frame_count, padding); } double delay; hr = get_device_delay(state, &delay); EXIT_ON_ERROR(hr); BYTE *pData; hr = IAudioRenderClient_GetBuffer(state->pRenderClient, frame_count, &pData); EXIT_ON_ERROR(hr); BYTE *data[1] = {pData}; ao_read_data(ao, (void**)data, frame_count, (int64_t) ( mp_time_us() + delay * 1e6 + frame_count * 1e6 / state->format.Format.nSamplesPerSec)); hr = IAudioRenderClient_ReleaseBuffer(state->pRenderClient, frame_count, 0); EXIT_ON_ERROR(hr); atomic_fetch_add(&state->sample_count, frame_count); return; exit_label: MP_ERR(state, "Error feeding audio: %s\n", mp_HRESULT_to_str(hr)); MP_VERBOSE(ao, "Requesting ao reload\n"); ao_request_reload(ao); return; }
bool video_init_best_codec(struct dec_video *d_video, char* video_decoders) { assert(!d_video->vd_driver); video_reset_decoding(d_video); d_video->has_broken_packet_pts = -10; // needs 10 packets to reach decision struct mp_decoder_entry *decoder = NULL; struct mp_decoder_list *list = mp_select_video_decoders(d_video->header->codec, video_decoders); mp_print_decoders(d_video->log, MSGL_V, "Codec list:", list); for (int n = 0; n < list->num_entries; n++) { struct mp_decoder_entry *sel = &list->entries[n]; const struct vd_functions *driver = find_driver(sel->family); if (!driver) continue; MP_VERBOSE(d_video, "Opening video decoder %s:%s\n", sel->family, sel->decoder); d_video->vd_driver = driver; if (init_video_codec(d_video, sel->decoder)) { decoder = sel; break; } d_video->vd_driver = NULL; MP_WARN(d_video, "Video decoder init failed for " "%s:%s\n", sel->family, sel->decoder); } if (d_video->vd_driver) { d_video->decoder_desc = talloc_asprintf(d_video, "%s [%s:%s]", decoder->desc, decoder->family, decoder->decoder); MP_INFO(d_video, "Selected video codec: %s\n", d_video->decoder_desc); } else { MP_ERR(d_video, "Failed to initialize a video decoder for codec '%s'.\n", d_video->header->codec ? d_video->header->codec : "<unknown>"); } talloc_free(list); return !!d_video->vd_driver; }
static int filter_ext(struct vf_instance *vf, struct mp_image *mpi) { VdpStatus vdp_st; struct vf_priv_s *p = vf->priv; struct mp_vdpau_ctx *ctx = p->ctx; struct vdp_functions *vdp = &ctx->vdp; if (!mpi) { return 0; } // Pass-through anything that's not been decoded by VDPAU if (mpi->imgfmt != IMGFMT_VDPAU) { vf_add_output_frame(vf, mpi); return 0; } if (mp_vdpau_mixed_frame_get(mpi)) { MP_ERR(vf, "Can't apply vdpaurb filter after vdpaupp filter.\n"); mp_image_unrefp(&mpi); return -1; } struct mp_image *out = vf_alloc_out_image(vf); if (!out) { mp_image_unrefp(&mpi); return -1; } mp_image_copy_attributes(out, mpi); VdpVideoSurface surface = (uintptr_t)mpi->planes[3]; assert(surface > 0); vdp_st = vdp->video_surface_get_bits_y_cb_cr(surface, VDP_YCBCR_FORMAT_NV12, (void * const *)out->planes, out->stride); CHECK_VDP_WARNING(vf, "Error when calling vdp_output_surface_get_bits_y_cb_cr"); vf_add_output_frame(vf, out); mp_image_unrefp(&mpi); return 0; }
int audio_init_best_codec(struct dec_audio *d_audio, char *audio_decoders) { assert(!d_audio->ad_driver); audio_reset_decoding(d_audio); struct mp_decoder_entry *decoder = NULL; struct mp_decoder_list *list = audio_select_decoders(d_audio->header->codec, audio_decoders); mp_print_decoders(d_audio->log, MSGL_V, "Codec list:", list); for (int n = 0; n < list->num_entries; n++) { struct mp_decoder_entry *sel = &list->entries[n]; const struct ad_functions *driver = find_driver(sel->family); if (!driver) continue; MP_VERBOSE(d_audio, "Opening audio decoder %s:%s\n", sel->family, sel->decoder); d_audio->ad_driver = driver; if (init_audio_codec(d_audio, sel->decoder)) { decoder = sel; break; } MP_WARN(d_audio, "Audio decoder init failed for " "%s:%s\n", sel->family, sel->decoder); } if (d_audio->ad_driver) { d_audio->decoder_desc = talloc_asprintf(d_audio, "%s [%s:%s]", decoder->desc, decoder->family, decoder->decoder); MP_VERBOSE(d_audio, "Selected audio codec: %s\n", d_audio->decoder_desc); MP_VERBOSE(d_audio, "AUDIO: %d Hz, %d ch, %s\n", d_audio->decoded.rate, d_audio->decoded.channels.num, af_fmt_to_str(d_audio->decoded.format)); } else { MP_ERR(d_audio, "Failed to initialize an audio decoder for codec '%s'.\n", d_audio->header->codec ? d_audio->header->codec : "<unknown>"); } talloc_free(list); return !!d_audio->ad_driver; }
// Reconfigure the filter chain according to decoder output. // probe_only: don't force fallback to software when doing hw decoding, and // the filter chain couldn't be configured static void filter_reconfig(struct MPContext *mpctx, bool probe_only) { struct dec_video *d_video = mpctx->d_video; struct mp_image_params params = d_video->decoder_output; set_allowed_vo_formats(d_video->vfilter, mpctx->video_out); if (video_reconfig_filters(d_video, ¶ms) < 0) { // Most video filters don't work with hardware decoding, so this // might be the reason why filter reconfig failed. if (!probe_only && video_vd_control(d_video, VDCTRL_FORCE_HWDEC_FALLBACK, NULL) == CONTROL_OK) { // Fallback active; decoder will return software format next // time. Don't abort video decoding. d_video->vfilter->initialized = 0; mp_image_unrefp(&d_video->waiting_decoded_mpi); d_video->decoder_output = (struct mp_image_params){0}; MP_VERBOSE(mpctx, "hwdec falback due to filters.\n"); } return; } if (d_video->vfilter->initialized < 1) return; if (params.rotate && (params.rotate % 90 == 0)) { if (!(mpctx->video_out->driver->caps & VO_CAP_ROTATE90)) { // Try to insert a rotation filter. char deg[10]; snprintf(deg, sizeof(deg), "%d", params.rotate); char *args[] = {"angle", deg, NULL, NULL}; if (try_filter(mpctx, params, "rotate", "autorotate", args) >= 0) { params.rotate = 0; } else { MP_ERR(mpctx, "Can't insert rotation filter.\n"); } } } }
int ai_alsa_init(audio_in_t *ai) { int err; err = snd_pcm_open(&ai->alsa.handle, ai->alsa.device, SND_PCM_STREAM_CAPTURE, 0); if (err < 0) { MP_ERR(ai, "Error opening audio: %s\n", snd_strerror(err)); return -1; } err = snd_output_stdio_attach(&ai->alsa.log, stderr, 0); if (err < 0) { return -1; } err = ai_alsa_setup(ai); return err; }
int dvb_step_channel(stream_t *stream, int dir) { int new_current; dvb_channels_list *list; dvb_priv_t *priv = stream->priv; dvb_state_t* state = priv->state; MP_VERBOSE(stream, "DVB_STEP_CHANNEL dir %d\n", dir); list = state->list; if (list == NULL) { MP_ERR(stream, "dvb_step_channel: NULL list_ptr, quit\n"); return 0; } new_current = (list->NUM_CHANNELS + list->current + (dir >= 0 ? 1 : -1)) % list->NUM_CHANNELS; return dvb_set_channel(stream, state->card, new_current); }
static void write_packet(struct ao *ao, AVPacket *packet) { // TODO: Can we unify this with the equivalent video code path? struct priv *ac = ao->priv; packet->stream_index = ac->stream->index; if (packet->pts != AV_NOPTS_VALUE) { packet->pts = av_rescale_q(packet->pts, ac->codec->time_base, ac->stream->time_base); } else { // Do we need this at all? Better be safe than sorry... MP_WARN(ao, "encoder lost pts, why?\n"); if (ac->savepts != MP_NOPTS_VALUE) { packet->pts = av_rescale_q(ac->savepts, ac->codec->time_base, ac->stream->time_base); } } if (packet->dts != AV_NOPTS_VALUE) { packet->dts = av_rescale_q(packet->dts, ac->codec->time_base, ac->stream->time_base); } if (packet->duration > 0) { packet->duration = av_rescale_q(packet->duration, ac->codec->time_base, ac->stream->time_base); } ac->savepts = AV_NOPTS_VALUE; if (encode_lavc_write_frame(ao->encode_lavc_ctx, ac->stream, packet) < 0) { MP_ERR(ao, "error writing at %d %d/%d\n", (int) packet->pts, ac->stream->time_base.num, ac->stream->time_base.den); return; } }
/** * @brief Copy Constructor for stationlist * * @see parse_setup_stationlist */ static int copycreate_stationlist (struct pvr_t *pvr, stationlist_t *stationlist, int num) { int i; if (chantab < 0 || !stationlist) return -1; num = FFMAX (num, chanlists[chantab].count); free (stationlist->list); stationlist->list = NULL; stationlist->total = 0; stationlist->enabled = 0; stationlist->used = 0; stationlist->list = calloc (num, sizeof (station_elem_t)); if (!stationlist->list) { MP_ERR(pvr, "%s No memory allocated for station list, giving up\n", LOG_LEVEL_V4L2); return -1; } /* transport the channel list data to our extented struct */ stationlist->total = num; BUFSTRCPY(stationlist->name, chanlists[chantab].name); for (i = 0; i < chanlists[chantab].count; i++) { stationlist->list[i].station[0]= '\0'; /* no station name yet */ BUFSTRCPY(stationlist->list[i].name, chanlists[chantab].list[i].name); stationlist->list[i].freq = chanlists[chantab].list[i].freq; stationlist->list[i].enabled = 1; /* default enabled */ stationlist->enabled++; stationlist->used++; } return 0; }
static bool create_dc(struct MPGLContext *ctx, int flags) { struct w32_context *w32_ctx = ctx->priv; HWND win = vo_w32_hwnd(ctx->vo); if (w32_ctx->hdc) return true; HDC hdc = GetDC(win); if (!hdc) return false; PIXELFORMATDESCRIPTOR pfd; memset(&pfd, 0, sizeof pfd); pfd.nSize = sizeof pfd; pfd.nVersion = 1; pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER; pfd.iPixelType = PFD_TYPE_RGBA; pfd.cColorBits = 24; pfd.iLayerType = PFD_MAIN_PLANE; int pf = ChoosePixelFormat(hdc, &pfd); if (!pf) { MP_ERR(ctx->vo, "unable to select a valid pixel format!\n"); ReleaseDC(win, hdc); return false; } SetPixelFormat(hdc, pf, &pfd); int pfmt = GetPixelFormat(hdc); if (DescribePixelFormat(hdc, pfmt, sizeof(PIXELFORMATDESCRIPTOR), &pfd)) { ctx->depth_r = pfd.cRedBits; ctx->depth_g = pfd.cGreenBits; ctx->depth_b = pfd.cBlueBits; } w32_ctx->hdc = hdc; return true; }
int dvb_set_ts_filt(dvb_priv_t *priv, int fd, uint16_t pid, dmx_pes_type_t pestype) { int i; struct dmx_pes_filter_params pesFilterParams; pesFilterParams.pid = pid; pesFilterParams.input = DMX_IN_FRONTEND; pesFilterParams.output = DMX_OUT_TS_TAP; pesFilterParams.pes_type = pestype; pesFilterParams.flags = DMX_IMMEDIATE_START; errno = 0; if ((i = ioctl(fd, DMX_SET_PES_FILTER, &pesFilterParams)) < 0) { MP_ERR(priv, "ERROR IN SETTING DMX_FILTER %i for fd %d: ERRNO: %d", pid, fd, errno); return 0; } MP_VERBOSE(priv, "SET PES FILTER ON PID %d to fd %d, RESULT: %d, ERRNO: %d\n", pid, fd, i, errno); return 1; }
/* libmpg123 has a new format ready; query and store, return return value of mpg123_getformat() */ static int set_format(struct dec_audio *da) { struct ad_mpg123_context *con = da->priv; int ret; long rate; int channels; int encoding; ret = mpg123_getformat(con->handle, &rate, &channels, &encoding); if (ret == MPG123_OK) { mp_audio_set_num_channels(&da->decoded, channels); da->decoded.rate = rate; int af = mpg123_format_to_af(encoding); if (!af) { /* This means we got a funny custom build of libmpg123 that only supports an unknown format. */ MP_ERR(da, "Bad encoding from mpg123: %i.\n", encoding); return MPG123_ERR; } mp_audio_set_format(&da->decoded, af); con->sample_size = channels * af_fmt2bps(af); } return ret; }
static int lavfi_reconfig(struct vf_instance *vf, struct mp_image_params *in, struct mp_image_params *out) { struct vf_priv_s *p = vf_lw_old_priv(vf); if (p->angle == 4) { // "auto" int r = in->rotate; if (r < 0 || r >= 360) { MP_ERR(vf, "Can't apply rotation of %d degrees.\n", r); return -1; } if (r % 90) { double a = r / 180.0 * M_PI; vf_lw_update_graph(vf, NULL, "rotate=%f:ow=rotw(%f):oh=roth(%f)", a, a, a); } else { vf_lw_update_graph(vf, NULL, "%s", rot[(r / 90) % 360]); } out->rotate = 0; } return 0; }
static int parse_dir(struct pl_parser *p) { if (p->real_stream->type != STREAMTYPE_DIR) return -1; if (p->probing) return 0; char *path = mp_file_get_path(p, bstr0(p->real_stream->url)); if (strlen(path) >= 8192) return -1; // things like mount bind loops DIR *dp = opendir(path); if (!dp) { MP_ERR(p, "Could not read directory.\n"); return -1; } char **files = NULL; int num_files = 0; struct dirent *ep; while ((ep = readdir(dp))) { if (strcmp(ep->d_name, ".") == 0 || strcmp(ep->d_name, "..") == 0) continue; MP_TARRAY_APPEND(p, files, num_files, talloc_strdup(p, ep->d_name)); } if (files) qsort(files, num_files, sizeof(files[0]), cmp_filename); for (int n = 0; n < num_files; n++) playlist_add_file(p->pl, mp_path_join(p, path, files[n])); closedir(dp); p->add_base = false; return num_files > 0 ? 0 : -1; }
static void *client_thread(void *p) { pthread_detach(pthread_self()); int rc; struct client_arg *arg = p; bstr client_msg = { talloc_strdup(NULL, ""), 0 }; mpthread_set_name(arg->client_name); int pipe_fd = mpv_get_wakeup_pipe(arg->client); if (pipe_fd < 0) { MP_ERR(arg, "Could not get wakeup pipe\n"); goto done; } MP_VERBOSE(arg, "Client connected\n"); struct pollfd fds[2] = { {.events = POLLIN, .fd = pipe_fd}, {.events = POLLIN, .fd = arg->client_fd},
static void add_pts_to_sort(struct dec_video *d_video, double pts) { if (pts != MP_NOPTS_VALUE) { int delay = -1; video_vd_control(d_video, VDCTRL_QUERY_UNSEEN_FRAMES, &delay); if (delay >= 0 && delay < d_video->num_buffered_pts) d_video->num_buffered_pts = delay; if (d_video->num_buffered_pts == sizeof(d_video->buffered_pts) / sizeof(double)) MP_ERR(d_video, "Too many buffered pts\n"); else { int i, j; for (i = 0; i < d_video->num_buffered_pts; i++) if (d_video->buffered_pts[i] < pts) break; for (j = d_video->num_buffered_pts; j > i; j--) d_video->buffered_pts[j] = d_video->buffered_pts[j - 1]; d_video->buffered_pts[i] = pts; d_video->num_buffered_pts++; } } }
static int af_open(struct af_instance *af) { struct af_resample *s = af->priv; af->control = control; af->uninit = uninit; af->filter = filter; if (s->opts.cutoff <= 0.0) s->opts.cutoff = af_resample_default_cutoff(s->opts.filter_size); s->avrctx = avresample_alloc_context(); s->avrctx_out = avresample_alloc_context(); if (s->avrctx && s->avrctx_out) { return AF_OK; } else { MP_ERR(af, "Cannot initialize Libavresample Context. \n"); uninit(af); return AF_ERROR; } }
static CGLError test_gl_version(struct vo *vo, CGLContextObj *ctx, CGLPixelFormatObj *pix, CGLOpenGLProfile version) { CGLPixelFormatAttribute attrs[] = { kCGLPFAOpenGLProfile, (CGLPixelFormatAttribute) version, kCGLPFADoubleBuffer, kCGLPFAAccelerated, #if MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_8 // leave this as the last entry of the array to not break the fallback // code kCGLPFASupportsAutomaticGraphicsSwitching, #endif 0 }; GLint npix; CGLError err; err = CGLChoosePixelFormat(attrs, pix, &npix); if (err == kCGLBadAttribute) { // kCGLPFASupportsAutomaticGraphicsSwitching is probably not supported // by the current hardware. Falling back to not using it. attrs[MP_ARRAY_SIZE(attrs) - 2] = 0; err = CGLChoosePixelFormat(attrs, pix, &npix); } if (err != kCGLNoError) { MP_ERR(vo, "error creating CGL pixel format: %s (%d)\n", CGLErrorString(err), err); goto error_out; } err = CGLCreateContext(*pix, 0, ctx); error_out: return err; }
HRESULT wasapi_change_init(struct ao *ao, bool is_hotplug) { struct wasapi_state *state = ao->priv; struct change_notify *change = &state->change; HRESULT hr = CoCreateInstance(&CLSID_MMDeviceEnumerator, NULL, CLSCTX_ALL, &IID_IMMDeviceEnumerator, (void **)&change->pEnumerator); EXIT_ON_ERROR(hr); // COM voodoo to emulate c++ class change->client.lpVtbl = &sIMMNotificationClientVtbl; // register the change notification client hr = IMMDeviceEnumerator_RegisterEndpointNotificationCallback( change->pEnumerator, (IMMNotificationClient *)change); EXIT_ON_ERROR(hr); // so the callbacks can access the ao change->ao = ao; // whether or not this is the hotplug instance change->is_hotplug = is_hotplug; if (is_hotplug) { MP_DBG(ao, "Monitoring for hotplug events\n"); } else { // Get the device string to compare with the pwstrDeviceId change->monitored = state->deviceID; MP_VERBOSE(ao, "Monitoring changes in device %S\n", change->monitored); } return hr; exit_label: MP_ERR(state, "Error setting up device change monitoring: %s\n", mp_HRESULT_to_str(hr)); wasapi_change_uninit(ao); return hr; }
static void display_handle_error(void *data, struct wl_display *display, void *object_id, uint32_t code, const char *message) { struct vo_wayland_state *wl = data; const char * error_type_msg = ""; switch (code) { case WL_DISPLAY_ERROR_INVALID_OBJECT: error_type_msg = "Invalid object"; break; case WL_DISPLAY_ERROR_INVALID_METHOD: error_type_msg = "Invalid method"; break; case WL_DISPLAY_ERROR_NO_MEMORY: error_type_msg = "No memory"; break; } MP_ERR(wl, "%s: %s\n", error_type_msg, message); }