コード例 #1
0
ファイル: ps3_vdec.c プロジェクト: ridams/showtime
/**
 * Keep in mind, this callback fires on different threads
 */
static uint32_t
decoder_callback(uint32_t handle, uint32_t msg_type, int32_t err_code,
		 uint32_t arg)
{
  vdec_decoder_t *vdd = (vdec_decoder_t *)(intptr_t)arg;

  switch(msg_type) {
  case VDEC_CALLBACK_AUDONE:
    hts_mutex_lock(&vdd->mtx);
    if(!vdd->submitted_au)
      TRACE(TRACE_ERROR, "VDEC", "AUDONE but no buffers pending");
    vdd->submitted_au = 0;
    hts_cond_signal(&vdd->audone);
    hts_mutex_unlock(&vdd->mtx);
    break;

  case VDEC_CALLBACK_PICOUT:
    picture_out(vdd);
    break;

  case VDEC_CALLBACK_SEQDONE:
    hts_mutex_lock(&vdd->mtx);
    vdd->sequence_done = 1;
    hts_cond_signal(&vdd->seqdone);
    hts_mutex_unlock(&vdd->mtx);
    break;

  case VDEC_CALLBACK_ERROR:
    TRACE(TRACE_ERROR, "VDEC", "ERROR %x", err_code);
    break;
  }
  return 0;
}
コード例 #2
0
ファイル: glw_video_opengl.c プロジェクト: dreamcat4/showtime
static void
surface_init(glw_video_t *gv, glw_video_surface_t *gvs)
{
  int i;

  if(gvs->gvs_pbo[0])
    glDeleteBuffers(gv->gv_planes, gvs->gvs_pbo);

  glGenBuffers(gv->gv_planes, gvs->gvs_pbo);

  if(!gvs->gvs_textures[0])
    glGenTextures(gv->gv_planes, gvs->gvs_textures);

  gvs->gvs_uploaded = 0;
  for(i = 0; i < gv->gv_planes; i++) {

    int linesize = LINESIZE(gvs->gvs_width[i], gv->gv_tex_bytes_per_pixel);

    gvs->gvs_size[i] = linesize * gvs->gvs_height[i];
    assert(gvs->gvs_size[i] > 0);

    glBindBuffer(GL_PIXEL_UNPACK_BUFFER, gvs->gvs_pbo[i]);
    glBufferData(GL_PIXEL_UNPACK_BUFFER,gvs->gvs_size[i], NULL, GL_STREAM_DRAW);
    gvs->gvs_data[i] = glMapBuffer(GL_PIXEL_UNPACK_BUFFER, GL_WRITE_ONLY);
    assert(gvs->gvs_data[i] != NULL);
  }
  glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
  TAILQ_INSERT_TAIL(&gv->gv_avail_queue, gvs, gvs_link);
  hts_cond_signal(&gv->gv_avail_queue_cond);
}
コード例 #3
0
ファイル: omx.c プロジェクト: Ezio-PS/movian
static OMX_ERRORTYPE
oc_event_handler(OMX_HANDLETYPE component, OMX_PTR opaque, OMX_EVENTTYPE event,
                 OMX_U32 data1, OMX_U32 data2, OMX_PTR eventdata)
{
  omx_component_t *oc = opaque;

#if 1
  omxdbg("%s: event  0x%x 0x%x 0x%x %p\n",
         oc->oc_name, event, (int)data1, (int)data2, eventdata);
#endif

  switch(event) {
  case OMX_EventCmdComplete:
  complete:

    hts_mutex_lock(oc->oc_mtx);
    oc->oc_cmd_done = 1;
    hts_cond_broadcast(&oc->oc_event_cond);
    hts_mutex_unlock(oc->oc_mtx);
    break;

  case OMX_EventError:

    switch(data1) {
    case OMX_ErrorPortUnpopulated:
      break;

    case OMX_ErrorSameState:
      goto complete;

    case OMX_ErrorStreamCorrupt:
      TRACE(TRACE_INFO, "OMX", "%s: Corrupt stream", oc->oc_name);
      hts_mutex_lock(oc->oc_mtx);
      oc->oc_stream_corrupt = 1;
      hts_cond_signal(oc->oc_avail_cond);
      hts_mutex_unlock(oc->oc_mtx);
      break;

      
    default:
      TRACE(TRACE_ERROR, "OMX", "%s: ERROR 0x%x\n", oc->oc_name, (int)data1);
      break;
    }
    break;

  case OMX_EventPortSettingsChanged:
    if(oc->oc_port_settings_changed_cb != NULL)
      oc->oc_port_settings_changed_cb(oc);
    break;

  case OMX_EventMark:
    if(oc->oc_event_mark_cb != NULL)
      oc->oc_event_mark_cb(oc, eventdata);
    break;
    
  default:
    break;
  }
  return 0;
}
コード例 #4
0
ファイル: glw_video_rpi.c プロジェクト: Overx/showtime
static void
buffer_mark(omx_component_t *oc, void *ptr)
{
  if(ptr == NULL)
    return;

  rpi_video_display_t *rvd = oc->oc_opaque;
  glw_video_t *gv = rvd->rvd_gv;
  media_pipe_t *mp = gv->gv_mp;
  video_decoder_t *vd = gv->gv_vd;
  media_buf_meta_t *mbm = ptr;

  rvd->rvd_last_pts = rvd->rvd_pts;

  rvd->rvd_pts = mbm->mbm_pts;

  if(mbm->mbm_duration == 0) {
    if(rvd->rvd_last_pts != PTS_UNSET && rvd->rvd_pts != PTS_UNSET)
      rvd->rvd_estimated_duration = rvd->rvd_pts - rvd->rvd_last_pts;
    mbm->mbm_duration = rvd->rvd_estimated_duration;
  }

  hts_mutex_lock(&mp->mp_mutex);
  vd->vd_reorder_current = mbm;
  hts_cond_signal(&mp->mp_video.mq_avail);
  hts_mutex_unlock(&mp->mp_mutex);
}
コード例 #5
0
ファイル: es_timer.c プロジェクト: Ralbarker/showtime
static int
set_timer(duk_context *duk, int repeat)
{
  es_context_t *ec = es_get(duk);

  es_timer_t *et = es_resource_create(ec, &es_resource_timer, 1);
  int val = duk_require_int(duk, 1);

  es_root_register(duk, 0, et);

  et->et_interval = val * repeat;

  int64_t now = arch_get_ts();
  et->et_expire = now + val * 1000LL;

  hts_mutex_lock(&timer_mutex);

  if(thread_running == 0) {
    thread_running = 1;
    hts_thread_create_detached("estimer", timer_thread, NULL,
                               THREAD_PRIO_MODEL);
  } else {
    hts_cond_signal(&timer_cond);
  }

  LIST_INSERT_SORTED(&timers, et, et_link, estimercmp, es_timer_t);

  hts_mutex_unlock(&timer_mutex);

  es_resource_push(duk, &et->super);
  return 1;
}
コード例 #6
0
ファイル: glw_video_opengl.c プロジェクト: carlinx/showtime
static void
gv_surface_pixmap_release(glw_video_t *gv, glw_video_surface_t *gvs,
			  const glw_video_config_t *gvc,
			  struct glw_video_surface_queue *fromqueue)
{
  int i;

  hts_mutex_lock(&gv->gv_surface_mutex);
  TAILQ_REMOVE(fromqueue, gvs, gvs_link);

  if(gvs->gvs_uploaded) {
    gvs->gvs_uploaded = 0;

    for(i = 0; i < 3; i++) {
      glBindBuffer(GL_PIXEL_UNPACK_BUFFER, gvs->gvs_pbo[i]);

      // Setting the buffer to NULL tells the GPU it can assign
      // us another piece of memory as backing store.
#ifdef PBO_RELEASE_BEFORE_MAP
      glBufferData(GL_PIXEL_UNPACK_BUFFER,
		   gvc->gvc_width[i] * gvc->gvc_height[i],
		   NULL, GL_STREAM_DRAW);
#endif

      gvs->gvs_pbo_ptr[i] = glMapBuffer(GL_PIXEL_UNPACK_BUFFER, GL_WRITE_ONLY);
      gvs->gvs_data[i] = gvs->gvs_pbo_ptr[i];
    }
    glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
  }

  TAILQ_INSERT_TAIL(&gv->gv_avail_queue, gvs, gvs_link);
  hts_cond_signal(&gv->gv_avail_queue_cond);
  hts_mutex_unlock(&gv->gv_surface_mutex);
}
コード例 #7
0
ファイル: glw_video_vdpau.c プロジェクト: Cy-4AH/showtime
static int
surface_init(glw_video_t *gv, glw_video_surface_t *gvs)
{
  VdpStatus r;
  glw_root_t *gr = gv->w.glw_root;
  glw_backend_root_t *gbr = &gr->gr_be;
  vdpau_dev_t *vd = gbr->gbr_vdpau_dev;


  r = vd->vdp_output_surface_create(vd->vd_dev, VDP_RGBA_FORMAT_B8G8R8A8,
                                    gvs->gvs_width[0],
                                    gvs->gvs_height[0],
				    &gvs->gvs_vdpau_surface);

  if(r != VDP_STATUS_OK)
    return -1;

  glGenTextures(1, gvs->gvs_texture.textures);

  gvs->gvs_gl_surface =
    gbr->gbr_glVDPAURegisterOutputSurfaceNV((void *)(uintptr_t)
                                            gvs->gvs_vdpau_surface,
                                            GL_TEXTURE_2D, 1,
                                            gvs->gvs_texture.textures);

  TAILQ_INSERT_TAIL(&gv->gv_avail_queue, gvs, gvs_link);
  hts_cond_signal(&gv->gv_avail_queue_cond);
  return 0;
}
コード例 #8
0
ファイル: ps3_webpopup.c プロジェクト: Ralbarker/showtime
static void
web_sys_callback(int type, void *userdata)
{
  int r;
  TRACE(TRACE_DEBUG, "WEB", "Got callback 0x%x", type);

  switch(type) {
  case WEBBROWSER_GRABBED:
    browser_visible = 1;
    break;

  case WEBBROWSER_RELEASED:
    browser_visible = 0;
    break;

  case WEBBROWSER_UNLOADING_FINISHED:
    webBrowserShutdown();
    break;

  case WEBBROWSER_SHUTDOWN_FINISHED:
    r = lv2MemContinerDestroy(memcontainer);
    if(r)
      TRACE(TRACE_ERROR, "WEB", "Unable to release container: 0x%x", r);

    hts_mutex_lock(&web_mutex);
    hts_cond_signal(&web_cond);
    browser_open = 0;
    hts_mutex_unlock(&web_mutex);

    break;
  }
}
コード例 #9
0
ファイル: glw_video_opengl.c プロジェクト: dreamcat4/showtime
static void
gv_surface_pixmap_release(glw_video_t *gv, glw_video_surface_t *gvs,
			  struct glw_video_surface_queue *fromqueue)
{
  int i;

  assert(gvs != gv->gv_sa);
  assert(gvs != gv->gv_sb);

  TAILQ_REMOVE(fromqueue, gvs, gvs_link);

  if(gvs->gvs_uploaded) {
    gvs->gvs_uploaded = 0;

    for(i = 0; i < gv->gv_planes; i++) {
      glBindBuffer(GL_PIXEL_UNPACK_BUFFER, gvs->gvs_pbo[i]);

      // Setting the buffer to NULL tells the GPU it can assign
      // us another piece of memory as backing store.
#ifdef PBO_RELEASE_BEFORE_MAP
      glBufferData(GL_PIXEL_UNPACK_BUFFER, gvs->gvs_size[i],
		   NULL, GL_STREAM_DRAW);
#endif

      gvs->gvs_data[i] = glMapBuffer(GL_PIXEL_UNPACK_BUFFER, GL_WRITE_ONLY);
      assert(gvs->gvs_data[i] != NULL);
    }
    glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
  }

  TAILQ_INSERT_TAIL(&gv->gv_avail_queue, gvs, gvs_link);
  hts_cond_signal(&gv->gv_avail_queue_cond);
}
コード例 #10
0
ファイル: omx.c プロジェクト: kshostak/showtime
static void
omx_clk_do(omx_clk_t *clk, int op)
{
  omx_clk_cmd_t *cmd = malloc(sizeof(omx_clk_cmd_t));
  cmd->cmd = op;
  TAILQ_INSERT_TAIL(&clk->q, cmd, link);
  hts_cond_signal(&clk->cond);
}
コード例 #11
0
ファイル: glw_video_rsx.c プロジェクト: dreamcat4/showtime
static void
gv_surface_pixmap_release(glw_video_t *gv, glw_video_surface_t *gvs,
			  struct glw_video_surface_queue *fromqueue)
{
  TAILQ_REMOVE(fromqueue, gvs, gvs_link);
  TAILQ_INSERT_TAIL(&gv->gv_avail_queue, gvs, gvs_link);
  hts_cond_signal(&gv->gv_avail_queue_cond);
}
コード例 #12
0
ファイル: android_audio.c プロジェクト: StingerFG/movian
static void
buffer_callback(SLAndroidSimpleBufferQueueItf bq, void *context)
{
  decoder_t *d = context;
  media_pipe_t *mp = d->ad.ad_mp;
  hts_mutex_lock(&mp->mp_mutex);
  d->d_avail_buffers++;
  hts_cond_signal(&mp->mp_audio.mq_avail);
  hts_mutex_unlock(&mp->mp_mutex);
}
コード例 #13
0
ファイル: rpi_pixmap.c プロジェクト: Bibamaru/showtime
static void 
decoder_port_settings_changed(omx_component_t *oc)
{
  rpi_pixmap_decoder_t *rpd = oc->oc_opaque;

  hts_mutex_lock(&rpd->rpd_mtx);
  rpd->rpd_change = 1;
  hts_cond_signal(&rpd->rpd_cond);
  hts_mutex_unlock(&rpd->rpd_mtx);
}
コード例 #14
0
ファイル: glw_video_rsx.c プロジェクト: carlinx/showtime
static void
gv_surface_pixmap_release(glw_video_t *gv, glw_video_surface_t *gvs,
			  const glw_video_config_t *gvc,
			  struct glw_video_surface_queue *fromqueue)
{
  hts_mutex_lock(&gv->gv_surface_mutex);
  TAILQ_REMOVE(fromqueue, gvs, gvs_link);
  TAILQ_INSERT_TAIL(&gv->gv_avail_queue, gvs, gvs_link);
  hts_cond_signal(&gv->gv_avail_queue_cond);
  hts_mutex_unlock(&gv->gv_surface_mutex);
}
コード例 #15
0
ファイル: omx.c プロジェクト: Ezio-PS/movian
static OMX_ERRORTYPE
oc_fill_buffer_done(OMX_HANDLETYPE hComponent,
                     OMX_PTR opaque,
                     OMX_BUFFERHEADERTYPE* buf)
{
  omx_component_t *oc = opaque;

  hts_mutex_lock(oc->oc_mtx);
  oc->oc_filled = buf;
  hts_cond_signal(oc->oc_avail_cond);
  hts_mutex_unlock(oc->oc_mtx);
  return 0;
}
コード例 #16
0
ファイル: omx.c プロジェクト: kshostak/showtime
static OMX_ERRORTYPE
oc_empty_buffer_done(OMX_HANDLETYPE hComponent,
                     OMX_PTR opaque,
                     OMX_BUFFERHEADERTYPE* buf)
{
  omx_component_t *oc = opaque;
  hts_mutex_lock(oc->oc_avail_mtx);
  oc->oc_inflight_buffers--;
  buf->pAppPrivate = oc->oc_avail;
  oc->oc_avail = buf;
  hts_cond_signal(oc->oc_avail_cond);
  hts_mutex_unlock(oc->oc_avail_mtx);
  return 0;
}
コード例 #17
0
static void *
swthread(void *aux)
{
  plugins_init2();
  
  hts_mutex_lock(&gconf.state_mutex);
  gconf.state_plugins_loaded = 1;
  hts_cond_signal(&gconf.state_cond);
  hts_mutex_unlock(&gconf.state_mutex);

  plugins_upgrade_check();

  upgrade_init();
  return NULL;
}
コード例 #18
0
ファイル: glw_text_bitmap.c プロジェクト: Allba/showtime
static void
gtb_realize(glw_text_bitmap_t *gtb)
{
  glw_root_t *gr = gtb->w.glw_root;
  int direct = gtb->gtb_maxlines > 1;


#if 0


  switch(gtb->gtb_state) {
  case GTB_QUEUED_FOR_RENDERING:
    if(direct)
      return;
    TAILQ_REMOVE(&gr->gr_gtb_render_queue, gtb, gtb_workq_link);
    break;

  case GTB_QUEUED_FOR_DIMENSIONING:
    if(!direct)
      return;
    TAILQ_REMOVE(&gr->gr_gtb_dim_queue, gtb, gtb_workq_link);
    break;

  case GTB_IDLE:
  case GTB_DIMENSIONING:
  case GTB_NEED_RENDER:
  case GTB_RENDERING:
  case GTB_VALID:
    break;
  }
#endif

  if(gtb->gtb_state != GTB_IDLE && gtb->gtb_state != GTB_VALID) {
    gtb->gtb_deferred_realize = 1;
    return;
  }

  if(direct) {
    gtb->gtb_state = GTB_NEED_RENDER;
  } else {
    TAILQ_INSERT_TAIL(&gr->gr_gtb_dim_queue, gtb, gtb_workq_link);
    gtb->gtb_state = GTB_QUEUED_FOR_DIMENSIONING;
    hts_cond_signal(&gr->gr_gtb_work_cond);
  }
}
コード例 #19
0
ファイル: linux_webpopup.c プロジェクト: Bibamaru/showtime
void
linux_webpopup_check(void)
{
  webpopup_t *wp;
  while((wp = LIST_FIRST(&pending_open)) != NULL) {
    LIST_REMOVE(wp, wp_link);
    wp->wp_win = gtk_window_new(GTK_WINDOW_TOPLEVEL);
    gtk_window_set_title(GTK_WINDOW(wp->wp_win), wp->wp_title);
    wp->wp_webview = webkit_web_view_new();


#if 0
    wp->wp_scrolled_win = gtk_scrolled_window_new(NULL, NULL);
    gtk_container_add(GTK_CONTAINER(wp->wp_scrolled_win), wp->wp_webview);
    gtk_container_add(GTK_CONTAINER(wp->wp_win), wp->wp_scrolled_win);
#else
    WebKitWebSettings *s =
      webkit_web_view_get_settings(WEBKIT_WEB_VIEW(wp->wp_webview));
    g_object_set(G_OBJECT(s), "auto-resize-window", 1, NULL);
    gtk_container_add(GTK_CONTAINER(wp->wp_win), wp->wp_webview);
#endif
    g_signal_connect(G_OBJECT(wp->wp_webview),
		     "navigation-policy-decision-requested",
		     G_CALLBACK(navigation_policy_decision_requested), wp);

    g_signal_connect(G_OBJECT(wp->wp_webview),
		     "load-error",
		     G_CALLBACK(load_error), wp);

    g_signal_connect(G_OBJECT(wp->wp_win), "delete_event",
		     G_CALLBACK(closed_window), wp);

    webkit_web_view_load_uri(WEBKIT_WEB_VIEW(wp->wp_webview), wp->wp_url);

    //gtk_window_set_default_size(GTK_WINDOW(wp->wp_win), 1200, 800);

    gtk_widget_show_all(wp->wp_win);
  }

  while((wp = LIST_FIRST(&pending_close)) != NULL) {
    LIST_REMOVE(wp, wp_link);
    hts_cond_signal(&wp->wp_cond);
    gtk_widget_destroy(wp->wp_win);
  }
}
コード例 #20
0
ファイル: android_glw.c プロジェクト: Bibamaru/showtime
JNIEXPORT void JNICALL
Java_com_showtimemediacenter_showtime_STCore_glwFini(JNIEnv *env,
                                                     jobject obj,
                                                     jint id)
{
  android_glw_root_t *agr = (android_glw_root_t *)id;
  glw_root_t *gr = &agr->gr;

  TRACE(TRACE_DEBUG, "GLW", "GLW %p finishing", agr);

  glw_lock(gr);
  // Calling twice will unload all textures, etc
  glw_reap(gr);
  glw_reap(gr);
  glw_flush(gr);
  agr->agr_running = 0;
  hts_cond_signal(&agr->agr_runcond);
  glw_unlock(gr);
}
コード例 #21
0
ファイル: glw_text_bitmap.c プロジェクト: StingerFG/movian
static void
gtb_realize(glw_text_bitmap_t *gtb)
{
  glw_root_t *gr = gtb->w.glw_root;
  int direct = gtb->gtb_maxlines > 1;

  if(gtb->gtb_state != GTB_IDLE && gtb->gtb_state != GTB_VALID) {
    gtb->gtb_deferred_realize = 1;
    return;
  }

  if(direct) {
    gtb->gtb_state = GTB_NEED_RENDER;
    glw_need_refresh(gr, 0);
  } else {
    TAILQ_INSERT_TAIL(&gr->gr_gtb_dim_queue, gtb, gtb_workq_link);
    gtb->gtb_state = GTB_QUEUED_FOR_DIMENSIONING;
    hts_cond_signal(&gr->gr_gtb_work_cond);
  }
}
コード例 #22
0
ファイル: glw_video_vdpau.c プロジェクト: Cy-4AH/showtime
static void
gv_surface_pixmap_release(glw_video_t *gv, glw_video_surface_t *gvs,
			  struct glw_video_surface_queue *fromqueue)
{
  glw_root_t *gr = gv->w.glw_root;
  glw_backend_root_t *gbr = &gr->gr_be;

  assert(gvs != gv->gv_sa);
  assert(gvs != gv->gv_sb);

  TAILQ_REMOVE(fromqueue, gvs, gvs_link);

  if(gvs->gvs_mapped) {
    gvs->gvs_mapped = 0;
    gbr->gbr_glVDPAUUnmapSurfacesNV(1, &gvs->gvs_gl_surface);
  }

  TAILQ_INSERT_TAIL(&gv->gv_avail_queue, gvs, gvs_link);
  hts_cond_signal(&gv->gv_avail_queue_cond);
}
コード例 #23
0
ファイル: glw_video_vdpau.c プロジェクト: Cy-4AH/showtime
static int64_t
gvv_newframe(glw_video_t *gv, video_decoder_t *vd0, int flags)
{
  video_decoder_t *vd = gv->gv_vd;
  media_pipe_t *mp = gv->gv_mp;

  gv->gv_cmatrix_cur[0] = (gv->gv_cmatrix_cur[0] * 3.0f +
			   gv->gv_cmatrix_tgt[0]) / 4.0f;

  if(flags & GLW_REINITIALIZE_VDPAU) {

    int i;
    for(i = 0; i < GLW_VIDEO_MAX_SURFACES; i++)
      gv->gv_surfaces[i].gvs_vdpau_surface = VDP_INVALID_HANDLE;

    gv->gv_engine = NULL;

    mp_send_cmd(mp, &mp->mp_video, MB_CTRL_REINITIALIZE);

    drain(gv, &gv->gv_displaying_queue);
    drain(gv, &gv->gv_decoded_queue);
    hts_cond_signal(&gv->gv_avail_queue_cond);

    return AV_NOPTS_VALUE;
  }

  glw_video_surface_t *gvs;

  while((gvs = TAILQ_FIRST(&gv->gv_parked_queue)) != NULL) {
    TAILQ_REMOVE(&gv->gv_parked_queue, gvs, gvs_link);
    surface_init(gv, gvs);
  }

  glw_need_refresh(gv->w.glw_root, 0);
  return glw_video_newframe_blend(gv, vd, flags, &gv_surface_pixmap_release, 1);
}
コード例 #24
0
ファイル: video_decoder.c プロジェクト: mla2/showtime
/**
 * Video decoder thread
 */
static void *
vd_thread(void *aux)
{
  video_decoder_t *vd = aux;
  media_pipe_t *mp = vd->vd_mp;
  media_queue_t *mq = &mp->mp_video;
  media_buf_t *mb;
  media_codec_t *mc;
  int run = 1;
  int reqsize = -1;
  int reinit = 0;
  int size;
  vd->vd_frame = avcodec_alloc_frame();

  hts_mutex_lock(&mp->mp_mutex);

  while(run) {

    if((mb = TAILQ_FIRST(&mq->mq_q)) == NULL) {
      hts_cond_wait(&mq->mq_avail, &mp->mp_mutex);
      continue;
    }

    if(mb->mb_data_type == MB_VIDEO && vd->vd_hold && 
       vd->vd_skip == 0 && mb->mb_skip == 0) {
      hts_cond_wait(&mq->mq_avail, &mp->mp_mutex);
      continue;
    }

    TAILQ_REMOVE(&mq->mq_q, mb, mb_link);
    mq->mq_packets_current--;
    mp->mp_buffer_current -= mb->mb_size;
    mq_update_stats(mp, mq);

    hts_cond_signal(&mp->mp_backpressure);
    hts_mutex_unlock(&mp->mp_mutex);

    mc = mb->mb_cw;

    switch(mb->mb_data_type) {
    case MB_CTRL_EXIT:
      run = 0;
      break;

    case MB_CTRL_PAUSE:
      vd->vd_hold = 1;
      break;

    case MB_CTRL_PLAY:
      vd->vd_hold = 0;
      break;

    case MB_FLUSH:
      vd_init_timings(vd);
      vd->vd_do_flush = 1;
      vd->vd_interlaced = 0;
      video_overlay_flush(vd, 1);
      break;

    case MB_VIDEO:
      if(reinit) {
	reinit = 0;
	if(mc->reinit != NULL)
	  mc->reinit(mc);
      }

      if(mb->mb_skip == 2)
	vd->vd_skip = 1;

      size = mb->mb_size;

      if(mc->decode)
	mc->decode(mc, vd, mq, mb, reqsize);
      else
	vd_decode_video(vd, mq, mb);

      update_vbitrate(mp, mq, size, vd);
      reqsize = -1;
      break;

    case MB_REQ_OUTPUT_SIZE:
      reqsize = mb->mb_data32;
      break;

    case MB_REINITIALIZE:
      reinit = 1;
      break;

#ifdef CONFIG_DVD
    case MB_DVD_HILITE:
    case MB_DVD_RESET_SPU:
    case MB_DVD_CLUT:
    case MB_DVD_PCI:
    case MB_DVD_SPU:
      dvdspu_decoder_dispatch(vd, mb, mp);
      break;
#endif

    case MB_SUBTITLE:
      if(vd->vd_ext_subtitles == NULL && mb->mb_stream == mq->mq_stream2)
	video_overlay_decode(vd, mb);
      break;

    case MB_END:
      break;

    case MB_BLACKOUT:
      vd->vd_frame_deliver(FRAME_BUFFER_TYPE_BLACKOUT, NULL, NULL,
			   vd->vd_opaque);
      break;

    case MB_FLUSH_SUBTITLES:
      video_overlay_flush(vd, 1);
      break;

    case MB_EXT_SUBTITLE:
      if(vd->vd_ext_subtitles != NULL)
         subtitles_destroy(vd->vd_ext_subtitles);

      // Steal subtitle from the media_buf
      vd->vd_ext_subtitles = mb->mb_data;
      mb->mb_data = NULL; 
      video_overlay_flush(vd, 1);
      break;

    default:
      abort();
    }

    hts_mutex_lock(&mp->mp_mutex);
    media_buf_free_locked(mp, mb);
  }

  hts_mutex_unlock(&mp->mp_mutex);

  if(vd->vd_ext_subtitles != NULL)
    subtitles_destroy(vd->vd_ext_subtitles);

  /* Free ffmpeg frame */
  av_free(vd->vd_frame);
  return NULL;
}
コード例 #25
0
ファイル: glw_text_bitmap.c プロジェクト: StingerFG/movian
static void
glw_text_bitmap_layout(glw_t *w, const glw_rctx_t *rc)
{
  glw_text_bitmap_t *gtb = (void *)w;
  glw_root_t *gr = w->glw_root;

  gr->gr_can_externalize = 0;

  // Initialize renderers

  if(unlikely(!glw_renderer_initialized(&gtb->gtb_text_renderer)))
    glw_renderer_init_quad(&gtb->gtb_text_renderer);

  if(w->glw_class == &glw_text &&
     unlikely(!glw_renderer_initialized(&gtb->gtb_cursor_renderer)))
    glw_renderer_init_quad(&gtb->gtb_cursor_renderer);

  if(gtb->gtb_background_alpha > GLW_ALPHA_EPSILON &&
     unlikely(!glw_renderer_initialized(&gtb->gtb_cursor_renderer))) {
    glw_renderer_init_quad(&gtb->gtb_background_renderer);
    glw_renderer_vtx_pos(&gtb->gtb_background_renderer, 0, -1, -1, 0);
    glw_renderer_vtx_pos(&gtb->gtb_background_renderer, 1,  1, -1, 0);
    glw_renderer_vtx_pos(&gtb->gtb_background_renderer, 2,  1,  1, 0);
    glw_renderer_vtx_pos(&gtb->gtb_background_renderer, 3, -1,  1, 0);
  }

  // Upload texture

  image_component_t *ic = image_find_component(gtb->gtb_image, IMAGE_PIXMAP);
  if(ic != NULL) {
    glw_tex_upload(gr, &gtb->gtb_texture, ic->pm, 0);
    gtb->gtb_margin = ic->pm->pm_margin;
    image_clear_component(ic);
    gtb->gtb_need_layout = 1;
  }

  const int tex_width  = glw_tex_width(&gtb->gtb_texture);
  const int tex_height = glw_tex_height(&gtb->gtb_texture);

  ic = image_find_component(gtb->gtb_image, IMAGE_TEXT_INFO);
  image_component_text_info_t *ti = ic ? &ic->text_info : NULL;

  // Check if we need to repaint

  if(gtb->gtb_saved_width  != rc->rc_width ||
     gtb->gtb_saved_height != rc->rc_height) {

    if(ti != NULL && gtb->gtb_state == GTB_VALID) {

      if(ti->ti_flags & IMAGE_TEXT_WRAPPED)
	gtb->gtb_state = GTB_NEED_RENDER;

      if(rc->rc_width > gtb->gtb_saved_width &&
	 ti->ti_flags & IMAGE_TEXT_TRUNCATED)
	gtb->gtb_state = GTB_NEED_RENDER;

      if(gtb->gtb_flags & GTB_ELLIPSIZE) {

	if(ti->ti_flags & IMAGE_TEXT_TRUNCATED) {
	  gtb->gtb_state = GTB_NEED_RENDER;
	} else {

	  if(rc->rc_width - gtb->gtb_padding[2] - gtb->gtb_padding[0] <
	     tex_width - gtb->gtb_margin * 2)
	    gtb->gtb_state = GTB_NEED_RENDER;

	  if(rc->rc_height - gtb->gtb_padding[1] - gtb->gtb_padding[3] <
	     tex_height - gtb->gtb_margin * 2)
	    gtb->gtb_state = GTB_NEED_RENDER;
	}
      }
    }

    gtb->gtb_saved_width  = rc->rc_width;
    gtb->gtb_saved_height = rc->rc_height;
    gtb->gtb_update_cursor = 1;
    gtb->gtb_need_layout = 1;

    if(gtb->w.glw_flags2 & GLW2_DEBUG)
      printf("  textbitmap: Parent widget gives us :%d x %d ti=%p\n",
             rc->rc_width, rc->rc_height, ti);

  }

  if(ti != NULL && gtb->gtb_need_layout) {

    const int margin = gtb->gtb_margin;

    int left   =                 gtb->gtb_padding[0] - margin;
    int top    = rc->rc_height - gtb->gtb_padding[1] + margin;
    int right  = rc->rc_width  - gtb->gtb_padding[2] + margin;
    int bottom =                 gtb->gtb_padding[3] - margin;

    int text_width  = tex_width;
    int text_height = tex_height;

    float x1, y1, x2, y2;

    if(gtb->w.glw_flags2 & GLW2_DEBUG)
      printf("  textbitmap: text_width:%d left:%d right:%d margin:%d\n",
             text_width, left, right, margin);

    // Vertical
    if(text_height > top - bottom) {
      // Oversized, must cut
      text_height = top - bottom;
    } else {
      switch(w->glw_alignment) {
      case LAYOUT_ALIGN_CENTER:
      case LAYOUT_ALIGN_LEFT:
      case LAYOUT_ALIGN_RIGHT:
	bottom = (bottom + top - text_height) / 2;
	top = bottom + text_height;
	break;

      case LAYOUT_ALIGN_TOP_LEFT:
      case LAYOUT_ALIGN_TOP_RIGHT:
      case LAYOUT_ALIGN_TOP:
      case LAYOUT_ALIGN_JUSTIFIED:
	bottom = top - tex_height;
	break;

      case LAYOUT_ALIGN_BOTTOM:
      case LAYOUT_ALIGN_BOTTOM_LEFT:
      case LAYOUT_ALIGN_BOTTOM_RIGHT:
	top = bottom + tex_height;
	break;
      }
    }

    y1 = -1.0f + 2.0f * bottom / (float)rc->rc_height;
    y2 = -1.0f + 2.0f * top    / (float)rc->rc_height;



    // Horizontal
    if(text_width > right - left || ti->ti_flags & IMAGE_TEXT_TRUNCATED) {

      // Oversized, must cut

      text_width = right - left;

      if(gtb->w.glw_flags2 & GLW2_DEBUG)
        printf("  textbitmap: Oversized, must cut. Width is now %d\n",
               text_width);
#if 0
      if(!(gtb->gtb_flags & GTB_ELLIPSIZE)) {
	glw_renderer_vtx_col(&gtb->gtb_text_renderer, 0, 1,1,1,1+text_width/20);
	glw_renderer_vtx_col(&gtb->gtb_text_renderer, 1, 1,1,1,0);
	glw_renderer_vtx_col(&gtb->gtb_text_renderer, 2, 1,1,1,0);
	glw_renderer_vtx_col(&gtb->gtb_text_renderer, 3, 1,1,1,1+text_width/20);
      }
#endif
    } else {

      glw_renderer_vtx_col_reset(&gtb->gtb_text_renderer);

      switch(w->glw_alignment) {
      case LAYOUT_ALIGN_JUSTIFIED:
      case LAYOUT_ALIGN_CENTER:
      case LAYOUT_ALIGN_BOTTOM:
      case LAYOUT_ALIGN_TOP:
	left = (left + right - text_width) / 2;
	right = left + text_width;
	break;

      case LAYOUT_ALIGN_LEFT:
      case LAYOUT_ALIGN_TOP_LEFT:
      case LAYOUT_ALIGN_BOTTOM_LEFT:
	right = left + tex_width;
	break;

      case LAYOUT_ALIGN_RIGHT:
      case LAYOUT_ALIGN_TOP_RIGHT:
      case LAYOUT_ALIGN_BOTTOM_RIGHT:
	left = right - tex_width;
	break;
      }
    }


    x1 = -1.0f + 2.0f * left   / (float)rc->rc_width;
    x2 = -1.0f + 2.0f * right  / (float)rc->rc_width;


    const float s = text_width  / (float)tex_width;
    const float t = text_height / (float)tex_height;

    if(gtb->w.glw_flags2 & GLW2_DEBUG)
      printf("  s=%f t=%f\n", s, t);

    glw_renderer_vtx_pos(&gtb->gtb_text_renderer, 0, x1, y1, 0.0);
    glw_renderer_vtx_st (&gtb->gtb_text_renderer, 0, 0, t);

    glw_renderer_vtx_pos(&gtb->gtb_text_renderer, 1, x2, y1, 0.0);
    glw_renderer_vtx_st (&gtb->gtb_text_renderer, 1, s, t);

    glw_renderer_vtx_pos(&gtb->gtb_text_renderer, 2, x2, y2, 0.0);
    glw_renderer_vtx_st (&gtb->gtb_text_renderer, 2, s, 0);

    glw_renderer_vtx_pos(&gtb->gtb_text_renderer, 3, x1, y2, 0.0);
    glw_renderer_vtx_st (&gtb->gtb_text_renderer, 3, 0, 0);
  }

  if(w->glw_class == &glw_text && gtb->gtb_update_cursor &&
     gtb->gtb_state == GTB_VALID) {

    int i = gtb->gtb_edit_ptr;
    int left;
    float x1, y1, x2, y2;

    if(ti != NULL && ti->ti_charpos != NULL) {

      if(i < ti->ti_charposlen) {
	left  = ti->ti_charpos[i*2  ];
      } else {
	left  = ti->ti_charpos[2 * ti->ti_charposlen - 1];
      }

    } else {

      left = 0;
    }

    left  += gtb->gtb_padding[0];

    x1 = -1.0f + 2.0f * (left - 1)  / (float)rc->rc_width;
    x2 = -1.0f + 2.0f * (left    )  / (float)rc->rc_width;
    y1 = -1.0f + 2.0f * gtb->gtb_padding[3] / (float)rc->rc_height;
    y2 =  1.0f - 2.0f * gtb->gtb_padding[1] / (float)rc->rc_height;

    glw_renderer_vtx_pos(&gtb->gtb_cursor_renderer, 0, x1, y1, 0.0);
    glw_renderer_vtx_pos(&gtb->gtb_cursor_renderer, 1, x2, y1, 0.0);
    glw_renderer_vtx_pos(&gtb->gtb_cursor_renderer, 2, x2, y2, 0.0);
    glw_renderer_vtx_pos(&gtb->gtb_cursor_renderer, 3, x1, y2, 0.0);

    if(w->glw_flags2 & GLW2_DEBUG) {
      printf("Cursor updated %f %f %f %f  rect:%d,%d\n",
             x1, y1, x2, y2, rc->rc_width, rc->rc_height);
    }

    gtb->gtb_update_cursor = 0;
  }

  gtb->gtb_paint_cursor =
    gtb->gtb_flags & GTB_PERMANENT_CURSOR ||
    (w->glw_class == &glw_text && glw_is_focused(w));

  if(gtb->gtb_paint_cursor && rc->rc_alpha > GLW_ALPHA_EPSILON)
    glw_need_refresh(gr, 0);

  gtb->gtb_need_layout = 0;

  if(gtb->gtb_state == GTB_VALID && gtb->gtb_deferred_realize) {
    gtb->gtb_deferred_realize = 0;
    gtb_realize(gtb);
  }

  if(gtb->gtb_state != GTB_NEED_RENDER)
    return;

  TAILQ_INSERT_TAIL(&gr->gr_gtb_render_queue, gtb, gtb_workq_link);
  gtb->gtb_state = GTB_QUEUED_FOR_RENDERING;

  hts_cond_signal(&gr->gr_gtb_work_cond);
}
コード例 #26
0
ファイル: audio.c プロジェクト: dreamcat4/showtime
static void *
dummy_audio_thread(void *aux)
{
  audio_decoder_t *ad = aux;
  media_pipe_t *mp = ad->ad_mp;
  media_queue_t *mq = &mp->mp_audio;
  media_buf_t *mb;
  int hold = 0;
  int run = 1;
  int64_t rt = 0;
  int64_t base = AV_NOPTS_VALUE;


  hts_mutex_lock(&mp->mp_mutex);

  while(run) {

    if((mb = TAILQ_FIRST(&mq->mq_q)) == NULL) {
      hts_cond_wait(&mq->mq_avail, &mp->mp_mutex);
      continue;
    }

    if(mb->mb_data_type == MB_AUDIO && hold && mb->mb_skip == 0) {
      hts_cond_wait(&mq->mq_avail, &mp->mp_mutex);
      continue;
    }

    TAILQ_REMOVE(&mq->mq_q, mb, mb_link);
    mq->mq_packets_current--;
    mp->mp_buffer_current -= mb->mb_size;
    mq_update_stats(mp, mq);
    hts_cond_signal(&mp->mp_backpressure);
    hts_mutex_unlock(&mp->mp_mutex);

    switch(mb->mb_data_type) {
    case MB_CTRL_EXIT:
      run = 0;
      break;

    case MB_CTRL_PAUSE:
      hold = 1;
      break;

    case MB_CTRL_PLAY:
      hold = 0;
      base = AV_NOPTS_VALUE;
      break;

    case MB_FLUSH:
      base = AV_NOPTS_VALUE;
      break;

    case MB_AUDIO:
      if(mb->mb_skip || mb->mb_stream != mq->mq_stream) 
	break;
      if(mb->mb_pts != AV_NOPTS_VALUE) {
        audio_set_clock(mp, mb->mb_pts, 0, mb->mb_epoch);

        if(base == AV_NOPTS_VALUE) {
          base = mb->mb_pts;
          rt = showtime_get_ts();
        } else {
          int64_t d = mb->mb_pts - base;
          if(d > 0) {
            int sleeptime = rt + d - showtime_get_ts();
	    if(sleeptime > 0)
	      usleep(sleeptime);
          }
        }
      }
      break;

    default:
      abort();
    }
    hts_mutex_lock(&mp->mp_mutex);
    media_buf_free_locked(mp, mb);
  }
  hts_mutex_unlock(&mp->mp_mutex);
  return NULL;
}
コード例 #27
0
ファイル: audio.c プロジェクト: dreamcat4/showtime
void *
audio_decode_thread(void *aux)
{
  audio_decoder_t *ad = aux;
  const audio_class_t *ac = ad->ad_ac;
  int run = 1;
  media_pipe_t *mp = ad->ad_mp;
  media_queue_t *mq = &mp->mp_audio;
  media_buf_t *mb;
  int blocked = 0;

  if(ac->ac_init != NULL)
    ac->ac_init(ad);

  ad->ad_discontinuity = 1;

  hts_mutex_lock(&mp->mp_mutex);

  while(run) {

    int avail;
    
    if(ad->ad_spdif_muxer != NULL) {
      avail = ad->ad_spdif_frame_size;
    } else {
      avail = ad->ad_avr != NULL ? avresample_available(ad->ad_avr) : 0;
    }
    media_buf_t *data = TAILQ_FIRST(&mq->mq_q_data);
    media_buf_t *ctrl = TAILQ_FIRST(&mq->mq_q_ctrl);

    if(avail >= ad->ad_tile_size && blocked == 0 && !ad->ad_paused && !ctrl) {
      assert(avail != 0);

      int samples = MIN(ad->ad_tile_size, avail);
      int r;

      if(ac->ac_deliver_locked != NULL) {
        r = ac->ac_deliver_locked(ad, samples, ad->ad_pts, ad->ad_epoch);
        if(r) {
          hts_cond_wait(&mq->mq_avail, &mp->mp_mutex);
          continue;
        }
      } else {
        hts_mutex_unlock(&mp->mp_mutex);
        r = ac->ac_deliver_unlocked(ad, samples, ad->ad_pts, ad->ad_epoch);
        hts_mutex_lock(&mp->mp_mutex);
      }

      if(r) {
	blocked = 1;
      } else {
	ad->ad_pts = AV_NOPTS_VALUE;
      }
      continue;
    }

    if(ctrl != NULL) {
      TAILQ_REMOVE(&mq->mq_q_ctrl, ctrl, mb_link);
      mb = ctrl;
    } else if(data != NULL && avail < ad->ad_tile_size) {
      TAILQ_REMOVE(&mq->mq_q_data, data, mb_link);
      mb = data;
    } else {
      hts_cond_wait(&mq->mq_avail, &mp->mp_mutex);
      continue;
    }

    mq->mq_packets_current--;
    mp->mp_buffer_current -= mb->mb_size;
    mq_update_stats(mp, mq);
    hts_cond_signal(&mp->mp_backpressure);

    if(mb->mb_data_type == MB_CTRL_UNBLOCK) {
      assert(blocked);
      blocked = 0;
    } else if(ad->ad_mode == AUDIO_MODE_CODED && 
	      ac->ac_deliver_coded_locked != NULL &&
	      mb->mb_data_type == MB_AUDIO) {

      ac->ac_deliver_coded_locked(ad, mb->mb_data, mb->mb_size,
				  mb->mb_pts, mb->mb_epoch);

    } else {

      hts_mutex_unlock(&mp->mp_mutex);

      switch(mb->mb_data_type) {
      case MB_AUDIO:
	audio_process_audio(ad, mb);
	break;

      case MB_SET_PROP_STRING:
        prop_set_string(mb->mb_prop, (void *)mb->mb_data);
	break;

      case MB_CTRL_SET_VOLUME_MULTIPLIER:
        ad->ad_vol_scale = mb->mb_float;
	if(ac->ac_set_volume != NULL)
	  ac->ac_set_volume(ad, ad->ad_vol_scale);
        break;

      case MB_CTRL_PAUSE:
	ad->ad_paused = 1;
	if(ac->ac_pause)
	  ac->ac_pause(ad);
	break;

      case MB_CTRL_PLAY:
	ad->ad_paused = 0;
	if(ac->ac_play)
	  ac->ac_play(ad);
	break;

      case MB_CTRL_FLUSH:
        // Reset some error reporting filters
        ad->ad_channel_layout_fail = 0;
        ad->ad_sample_rate_fail = 0;

	if(ac->ac_flush)
	  ac->ac_flush(ad);
	ad->ad_pts = AV_NOPTS_VALUE;

	if(mp->mp_seek_audio_done != NULL)
	  mp->mp_seek_audio_done(mp);
	ad->ad_discontinuity = 1;

	if(ad->ad_avr != NULL) {
	  avresample_read(ad->ad_avr, NULL, avresample_available(ad->ad_avr));
	  assert(avresample_available(ad->ad_avr) == 0);
	}
	break;

      case MB_CTRL_EXIT:
	run = 0;
	break;

      default:
	abort();
      }

      hts_mutex_lock(&mp->mp_mutex);
    }
    media_buf_free_locked(mp, mb);
  }

  hts_mutex_unlock(&mp->mp_mutex);

  if(ac->ac_fini != NULL)
    ac->ac_fini(ad);
  return NULL;
}
コード例 #28
0
ファイル: audio_decoder.c プロジェクト: Rautz/showtime
static void *
ad_thread(void *aux)
{
  audio_decoder_t *ad = aux;
  media_pipe_t *mp = ad->ad_mp;
  media_queue_t *mq = &mp->mp_audio;
  media_buf_t *mb;
  int hold = 0;
  int run = 1;
  int64_t silence_start_pts = AV_NOPTS_VALUE;
  uint64_t silence_start_realtime = 0;

  hts_mutex_lock(&mp->mp_mutex);

  while(run) {

    if((mb = TAILQ_FIRST(&mq->mq_q)) == NULL) {
      hts_cond_wait(&mq->mq_avail, &mp->mp_mutex);
      continue;
    }

    if(mb->mb_data_type == MB_AUDIO && hold && mb->mb_skip == 0) {
      hts_cond_wait(&mq->mq_avail, &mp->mp_mutex);
      continue;
    }

    TAILQ_REMOVE(&mq->mq_q, mb, mb_link);
    mq->mq_packets_current--;
    mp->mp_buffer_current -= mb->mb_size;
    mq_update_stats(mp, mq);
    hts_cond_signal(&mp->mp_backpressure);
    hts_mutex_unlock(&mp->mp_mutex);

    switch(mb->mb_data_type) {
    case MB_CTRL_EXIT:
      run = 0;
      break;

    case MB_CTRL_PAUSE:
      /* Copy back any pending audio in the output fifo */
      audio_fifo_purge(thefifo, ad, &ad->ad_hold_queue);
      hold = 1;
      break;

    case MB_CTRL_PLAY:
      hold = 0;
      break;

    case MB_FLUSH:
      ad->ad_do_flush = 1;
      /* Flush any pending audio in the output fifo */
      audio_fifo_purge(thefifo, ad, NULL);
      audio_decoder_flush(ad);
      break;

    case MB_AUDIO:
      if(mb->mb_skip != 0)
	break;

      if(mq->mq_stream == -1) {
	if(mb->mb_pts == AV_NOPTS_VALUE)
	  break;

	if(silence_start_pts == AV_NOPTS_VALUE) {
	  silence_start_pts = mb->mb_pts;
	  silence_start_realtime = showtime_get_ts();
	} else {
	  int64_t d = mb->mb_pts - silence_start_pts;
	  if(d > 0) {
	    int64_t sleeptime = silence_start_realtime + d - showtime_get_ts();
	    if(sleeptime > 0)
	      usleep(sleeptime);
	  }
	}
	break;
      }

      if(mb->mb_stream != mq->mq_stream) 
	break;
      ad_decode_buf(ad, mp, mq, mb);
      silence_start_pts = AV_NOPTS_VALUE;
      break;

    case MB_END:
      mp_set_current_time(mp, AV_NOPTS_VALUE);
      break;

    default:
      abort();
    }
    hts_mutex_lock(&mp->mp_mutex);
    media_buf_free_locked(mp, mb);
  }
  hts_mutex_unlock(&mp->mp_mutex);
  audio_fifo_purge(thefifo, ad, NULL);
  return NULL;
}
コード例 #29
0
ファイル: audio.c プロジェクト: copernic-us/showtime-1
void *
audio_decode_thread(void *aux)
{
  audio_decoder_t *ad = aux;
  const audio_class_t *ac = ad->ad_ac;
  int run = 1;
  media_pipe_t *mp = ad->ad_mp;
  media_queue_t *mq = &mp->mp_audio;
  media_buf_t *mb;
  int blocked = 0;

  if(ac->ac_init != NULL)
    ac->ac_init(ad);

  hts_mutex_lock(&mp->mp_mutex);

  while(run) {

    int avail = ad->ad_avr != NULL ? avresample_available(ad->ad_avr) : 0;

    media_buf_t *data = TAILQ_FIRST(&mq->mq_q_data);
    media_buf_t *ctrl = TAILQ_FIRST(&mq->mq_q_ctrl);

    if(avail >= ad->ad_tile_size && blocked == 0 && !ad->ad_paused) {
      assert(avail != 0);
      assert(ad->ad_avr != NULL);

      int samples = MIN(ad->ad_tile_size, avail);
      int r;

      if(ac->ac_deliver_locked != NULL) {
        r = ac->ac_deliver_locked(ad, samples, ad->ad_pts, ad->ad_epoch);
        if(r) {
          hts_cond_wait(&mq->mq_avail, &mp->mp_mutex);
          continue;
        }
        ad->ad_pts = AV_NOPTS_VALUE;
        continue;
      } else {
        hts_mutex_unlock(&mp->mp_mutex);
        r = ac->ac_deliver_unlocked(ad, samples, ad->ad_pts, ad->ad_epoch);
        hts_mutex_lock(&mp->mp_mutex);
      }

      if(r) {
	blocked = 1;
	continue;
      } else {
	ad->ad_pts = AV_NOPTS_VALUE;
      }
      continue;
    }

    if(ctrl != NULL) {
      TAILQ_REMOVE(&mq->mq_q_ctrl, ctrl, mb_link);
      mb = ctrl;
    } else if(data != NULL && avail < 16384) {
      TAILQ_REMOVE(&mq->mq_q_data, data, mb_link);
      mb = data;
    } else {
      hts_cond_wait(&mq->mq_avail, &mp->mp_mutex);
      continue;
    }

    mq->mq_packets_current--;
    mp->mp_buffer_current -= mb->mb_size;
    mq_update_stats(mp, mq);
    hts_cond_signal(&mp->mp_backpressure);

    if(mb->mb_data_type == MB_CTRL_UNBLOCK) {
      assert(blocked);
      blocked = 0;
    } else {

      hts_mutex_unlock(&mp->mp_mutex);

      switch(mb->mb_data_type) {
      case MB_AUDIO:
	audio_process_audio(ad, mb);
	break;

      case MB_CTRL_PAUSE:
	ad->ad_paused = 1;
	if(ac->ac_pause)
	  ac->ac_pause(ad);
	break;

      case MB_CTRL_PLAY:
	ad->ad_paused = 0;
	if(ac->ac_play)
	  ac->ac_play(ad);
	break;

      case MB_CTRL_FLUSH:
	if(ac->ac_flush)
	  ac->ac_flush(ad);
	ad->ad_pts = AV_NOPTS_VALUE;
	break;

      case MB_CTRL_EXIT:
	run = 0;
	break;

      default:
	abort();
      }

      hts_mutex_lock(&mp->mp_mutex);
    }
    media_buf_free_locked(mp, mb);
  }

  hts_mutex_unlock(&mp->mp_mutex);

  if(ac->ac_fini != NULL)
    ac->ac_fini(ad);
  return NULL;
}
コード例 #30
0
ファイル: glw_video_vdpau.c プロジェクト: Allba/showtime
static int64_t
vdpau_newframe(glw_video_t *gv, video_decoder_t *vd0, int flags)
{
  glw_root_t *gr = gv->w.glw_root;
  vdpau_dev_t *vd = gr->gr_be.gbr_vdpau_dev;
  media_pipe_t *mp = gv->gv_mp;
  VdpStatus st;
  glw_video_surface_t *s;
  int64_t pts = AV_NOPTS_VALUE;

  if(flags & GLW_REINITIALIZE_VDPAU) {

    int i;
    for(i = 0; i < GLW_VIDEO_MAX_SURFACES; i++)
      gv->gv_surfaces[i].gvs_vdpau_surface = VDP_INVALID_HANDLE;

    gv->gv_vdpau_pq = VDP_INVALID_HANDLE;
    gv->gv_vdpau_pqt = VDP_INVALID_HANDLE;
    
    gv->gv_cfg_cur.gvc_valid = 0;
    
    mp_send_cmd_head(mp, &mp->mp_video, MB_REINITIALIZE);

    drain(gv, &gv->gv_displaying_queue);
    drain(gv, &gv->gv_decoded_queue);
    hts_cond_signal(&gv->gv_avail_queue_cond);
    
    return AV_NOPTS_VALUE;
  }

  /* Remove frames from displaying queue if they are idle and push
   * back to the decoder 
   */
  while((s = TAILQ_FIRST(&gv->gv_displaying_queue)) != NULL) {
    VdpPresentationQueueStatus qs;
    VdpTime t;

    gv->gv_vdpau_running = 1;

    st = vd->vdp_presentation_queue_query_surface_status(gv->gv_vdpau_pq,
							 s->gvs_vdpau_surface,
							 &qs, &t);
    if(st != VDP_STATUS_OK || qs == VDP_PRESENTATION_QUEUE_STATUS_IDLE) {
      TAILQ_REMOVE(&gv->gv_displaying_queue, s, gvs_link);
      TAILQ_INSERT_TAIL(&gv->gv_avail_queue, s, gvs_link);
      hts_cond_signal(&gv->gv_avail_queue_cond);
    } else {
      break;
    }
  }


  while((s = TAILQ_FIRST(&gv->gv_decoded_queue)) != NULL) {
    int64_t delta = gr->gr_frameduration * 2;
    int64_t aclock, d;
    pts = s->gvs_pts;

    hts_mutex_lock(&mp->mp_clock_mutex);
    aclock = mp->mp_audio_clock + gr->gr_frame_start - 
      mp->mp_audio_clock_realtime + mp->mp_avdelta;
    hts_mutex_unlock(&mp->mp_clock_mutex);

    d = s->gvs_pts - aclock;

    if(s->gvs_pts == AV_NOPTS_VALUE || d < -5000000LL || d > 5000000LL)
      pts = gv->gv_nextpts;

    if(pts != AV_NOPTS_VALUE && (pts - delta) >= aclock)
      break;

    st = vd->vdp_presentation_queue_display(gv->gv_vdpau_pq,
					    s->gvs_vdpau_surface, 
					    0, 0, 0);
    if(pts != AV_NOPTS_VALUE)
      gv->gv_nextpts = pts + s->gvs_duration;

    gv->gv_fwidth  = s->gvs_width;
    gv->gv_fheight = s->gvs_height;

    TAILQ_REMOVE(&gv->gv_decoded_queue, s, gvs_link);
    TAILQ_INSERT_TAIL(&gv->gv_displaying_queue, s, gvs_link);
  }
  return pts;
}