Example #1
0
int bg_avdec_start(void * priv)
  {
  int i;
  const gavl_video_format_t * format;
  avdec_priv * avdec = priv;
  
  if(!bgav_start(avdec->dec))
    {
    return 0;
    }
  for(i = 0; i < avdec->current_track->num_video_streams; i++)
    {
    gavl_video_format_copy(&(avdec->current_track->video_streams[i].format),
                           bgav_get_video_format(avdec->dec, i));

    gavl_metadata_copy(&avdec->current_track->video_streams[i].m,
                       bgav_get_video_metadata(avdec->dec, i));
    
    avdec->current_track->video_streams[i].duration =
      bgav_video_duration(avdec->dec, i);
    
    }
  for(i = 0; i < avdec->current_track->num_audio_streams; i++)
    {
    gavl_audio_format_copy(&(avdec->current_track->audio_streams[i].format),
                           bgav_get_audio_format(avdec->dec, i));

    gavl_metadata_copy(&avdec->current_track->audio_streams[i].m,
                       bgav_get_audio_metadata(avdec->dec, i));
    
    avdec->current_track->audio_streams[i].duration =
      bgav_audio_duration(avdec->dec, i);
    }

  for(i = 0; i < avdec->current_track->num_text_streams; i++)
    {
    gavl_metadata_copy(&avdec->current_track->text_streams[i].m,
                       bgav_get_text_metadata(avdec->dec, i));
    
    avdec->current_track->text_streams[i].duration =
      bgav_text_duration(avdec->dec, i);

    avdec->current_track->text_streams[i].timescale = 
      bgav_get_text_timescale(avdec->dec, i);
    }

  for(i = 0; i < avdec->current_track->num_overlay_streams; i++)
    {
    gavl_metadata_copy(&avdec->current_track->overlay_streams[i].m,
                       bgav_get_overlay_metadata(avdec->dec, i));
    
    avdec->current_track->overlay_streams[i].duration =
      bgav_overlay_duration(avdec->dec, i);
    
    format = bgav_get_overlay_format(avdec->dec, i);
    gavl_video_format_copy(&avdec->current_track->overlay_streams[i].format,
                           format);
    }
  return 1;
  }
Example #2
0
static gavl_video_convert_context_t *
add_context(gavl_video_converter_t * cnv,
            const gavl_video_format_t * input_format,
            const gavl_video_format_t * output_format)
  {
  gavl_video_convert_context_t * ctx;
  ctx = calloc(1, sizeof(*ctx));
  ctx->options = &cnv->options;
  gavl_video_format_copy(&ctx->input_format,
                         input_format);
  
  gavl_video_format_copy(&ctx->output_format,
                         output_format);
  
  if(cnv->last_context)
    {
    cnv->last_context->next = ctx;
    cnv->last_context = cnv->last_context->next;
    }
  else
    {
    cnv->first_context = ctx;
    cnv->last_context = ctx;
    }
  cnv->num_contexts++;
  return ctx;
  }
Example #3
0
static gavl_video_source_t *
connect_cropscale(void * priv, gavl_video_source_t * src,
                   const gavl_video_options_t * opt)
  {
  cropscale_priv_t * vp = priv;
  const gavl_video_format_t * format;

  format = gavl_video_source_get_src_format(src);
  
  gavl_video_format_copy(&vp->in_format, format);
  gavl_video_format_copy(&vp->out_format, format);
  set_framesize(vp);
  set_out_format(vp);
  
  if(vp->deinterlace != DEINTERLACE_NEVER)
    vp->out_format.interlace_mode = GAVL_INTERLACE_NONE;

  vp->in_src = src;
  gavl_video_source_set_dst(vp->in_src, 0, &vp->in_format);

  if(opt)
    gavl_video_options_copy(vp->global_opt, opt);
  
  vp->need_reinit = 1;
  vp->need_restart = 0;

  vp->out_src =
    gavl_video_source_create(read_func,
                             vp, 0,
                             &vp->out_format);
  return vp->out_src;
  }
Example #4
0
int gavl_deinterlacer_init_scale(gavl_video_deinterlacer_t * d)
{
    gavl_video_options_t * scaler_opt;
    gavl_video_format_t in_format;
    gavl_video_format_t out_format;

    if(!d->scaler)
        d->scaler = gavl_video_scaler_create();
    scaler_opt = gavl_video_scaler_get_options(d->scaler);
    gavl_video_options_copy(scaler_opt, &d->opt);

    gavl_video_format_copy(&in_format, &d->format);
    gavl_video_format_copy(&out_format, &d->format);

    if(in_format.interlace_mode == GAVL_INTERLACE_NONE)
        in_format.interlace_mode = GAVL_INTERLACE_TOP_FIRST;
    out_format.interlace_mode = GAVL_INTERLACE_NONE;

    gavl_video_scaler_init(d->scaler,
                           &in_format,
                           &out_format);

    d->func = deinterlace_scale;
    return 1;
}
Example #5
0
int gavl_video_converter_init(gavl_video_converter_t * cnv,
                              const gavl_video_format_t * input_format,
                              const gavl_video_format_t * output_format)
  {
  gavl_video_format_copy(&cnv->input_format, input_format);
  gavl_video_format_copy(&cnv->output_format, output_format);
  return gavl_video_converter_reinit(cnv);
  }
Example #6
0
File: gavf.c Project: kidaa/gmerlin
static void get_overlay_format(const gavl_video_format_t * src,
                               gavl_video_format_t * dst,
                               const gavl_rectangle_i_t * src_rect)
{
    gavl_video_format_copy(dst, src);
    dst->image_width  = src_rect->w + src_rect->x;
    dst->image_height = src_rect->h + src_rect->y;
    gavl_video_format_set_frame_size(dst, 0, 0);
}
Example #7
0
static void
bg_gavf_get_video_format(void * data, int stream,
                         gavl_video_format_t*ret)
  {
  bg_gavf_t * priv;
  priv = data;

  gavl_video_format_copy(ret, &priv->video_streams[stream].format);
  }
Example #8
0
gavl_video_sink_t *
bg_ov_add_overlay_stream(bg_ov_t * ov, gavl_video_format_t * format)
  {
  ovl_stream_t * str;

  ov->ovl_str = realloc(ov->ovl_str,
                        (ov->num_ovl_str+1)*
                        sizeof(*ov->ovl_str));
  str = ov->ovl_str + ov->num_ovl_str;
  memset(str, 0, sizeof(*str));
  
  ov->num_ovl_str++;
 
  if(!format->image_width || !format->image_height)
    {
    format->image_width = ov->format.image_width;
    format->image_height = ov->format.image_height;
    format->pixel_width = ov->format.pixel_width;
    format->pixel_height = ov->format.pixel_height;
    gavl_video_format_set_frame_size(format, 0, 0);

    }
 
  if(ov->plugin->add_overlay_stream)
    {
    /* Try hardware overlay */
    LOCK(ov);
    str->sink_int = ov->plugin->add_overlay_stream(ov->priv, format);
    UNLOCK(ov);
    
    if(str->sink_int)
      {
      bg_log(BG_LOG_INFO, LOG_DOMAIN,
             "Using hardeware overlay for stream %d",
             ov->num_ovl_str-1);
      return str->sink_int;
      }
    }
  /* Software overlay */
  ov->flags |= FLAG_EMULATE_OVL;
  
  bg_log(BG_LOG_INFO, LOG_DOMAIN,
         "Using software overlay for stream %d",
         ov->num_ovl_str - 1);
  
  str->ctx = gavl_overlay_blend_context_create();
  
  gavl_overlay_blend_context_init(str->ctx,
                                  &ov->format, format);
  str->sink_ext = gavl_video_sink_create(NULL, put_overlay, str, format);
  
  gavl_video_format_copy(&str->format, format);
  
  return str->sink_ext;
  }
Example #9
0
static void set_format(void * priv, const gavl_video_format_t * format)
  {
  swapfields_priv_t * vp;
  vp = priv;
  
  vp->framerate_mult = 1;
  vp->noop = 0;
    
  gavl_video_format_copy(&vp->format, format);
  gavl_get_field_format(format, &vp->field_format[0], 0);
  gavl_get_field_format(format, &vp->field_format[1], 1);
  
  if(vp->format.interlace_mode == GAVL_INTERLACE_TOP_FIRST)
    {
    vp->format.interlace_mode = GAVL_INTERLACE_BOTTOM_FIRST;

    /* Top first -> bottom first: Delay bottom field */
    vp->delay_field = 1;
    }
  else if(vp->format.interlace_mode == GAVL_INTERLACE_BOTTOM_FIRST)
    {
    vp->format.interlace_mode = GAVL_INTERLACE_TOP_FIRST;

    /* Bottom first -> top first: Delay top field */
    vp->delay_field = 0;
    }
  else
    {
    bg_log(BG_LOG_WARNING, LOG_DOMAIN,
           "Unsupported interlace mode, need top-first or bottom-first");
    vp->noop = 1;
    }

  if(!vp->noop)
    {
    if(vp->format.frame_duration % 2)
      {
      vp->framerate_mult = 2;
      vp->format.timescale *= 2;
      vp->format.frame_duration *= 2;
      }
    }
  if(vp->fields[0])
    {
    gavl_video_frame_destroy(vp->fields[0]);
    vp->fields[0] = NULL;
    }
  if(vp->fields[1])
    {
    gavl_video_frame_destroy(vp->fields[1]);
    vp->fields[1] = NULL;
    }
  vp->init = 1;
  }
Example #10
0
static void
set_input_format_cropscale(void * priv,
                           gavl_video_format_t * format, int port)
  {
  cropscale_priv_t * vp;
  vp = priv;

  if(!port)
    {
    gavl_video_format_copy(&vp->in_format, format);
    gavl_video_format_copy(&vp->out_format, format);
    set_framesize(vp);
    set_out_format(vp);
    
    if(vp->deinterlace != DEINTERLACE_NEVER)
      vp->out_format.interlace_mode = GAVL_INTERLACE_NONE;
    
    vp->need_reinit = 1;
    vp->need_restart = 0;
    }
  }
Example #11
0
int gavl_video_deinterlacer_init(gavl_video_deinterlacer_t * d,
                                 const gavl_video_format_t * src_format)
  {
  
  gavl_video_format_copy(&d->format, src_format);
  gavl_video_format_copy(&d->half_height_format, src_format);

  if((d->format.interlace_mode == GAVL_INTERLACE_MIXED) ||
     (d->format.interlace_mode == GAVL_INTERLACE_MIXED_TOP) ||
     (d->format.interlace_mode == GAVL_INTERLACE_MIXED_BOTTOM))
    d->mixed = 1;
  else
    d->mixed = 0;
  
  d->half_height_format.image_height /= 2;
  d->half_height_format.frame_height /= 2;

  d->num_planes = gavl_pixelformat_num_planes(d->format.pixelformat);
  gavl_pixelformat_chroma_sub(d->format.pixelformat, &d->sub_h, &d->sub_v);
  
  switch(d->opt.deinterlace_mode)
    {
    case GAVL_DEINTERLACE_NONE:
      break;
    case GAVL_DEINTERLACE_COPY:
      gavl_deinterlacer_init_copy(d);
      break;
    case GAVL_DEINTERLACE_SCALE:
      gavl_deinterlacer_init_scale(d);
      break;
    case GAVL_DEINTERLACE_BLEND:
      if(!gavl_deinterlacer_init_blend(d))
        return 0;
      break;
    }
  return 1;
  }
Example #12
0
bg_ogg_stream_t *
bg_ogg_encoder_add_video_stream(void * data,
                                const gavl_metadata_t * m,
                                const gavl_video_format_t * format)
  {
  bg_ogg_stream_t * s;
  bg_ogg_encoder_t * e = data;
  s = append_stream(e, &e->video_streams, &e->num_video_streams, m);

  gavl_video_format_copy(&s->vfmt,
                         format);
  gavl_metadata_copy(&s->m_stream, m);
  gavl_metadata_delete_compression_fields(&s->m_stream);
  return s;
  }
Example #13
0
int bg_video_filter_chain_init(bg_video_filter_chain_t * ch,
                               const gavl_video_format_t * in_format,
                               gavl_video_format_t * out_format)
  {
  ch->need_restart = 0;
  

  if(ch->in_src)
    gavl_video_source_destroy(ch->in_src);

  ch->in_src = gavl_video_source_create(read_func_in, ch, 0, in_format);

  bg_video_filter_chain_connect(ch, ch->in_src);
  gavl_video_format_copy(out_format, gavl_video_source_get_src_format(ch->out_src));
  return ch->num_filters;
  }
Example #14
0
static video_stream_t * append_video_stream(bg_gavf_t * f, const gavl_metadata_t * m,
                                            const gavl_video_format_t * format)
  {
  video_stream_t * ret;

  f->video_streams =
    realloc(f->video_streams,
            (f->num_video_streams+1)*sizeof(*f->video_streams));

  ret = f->video_streams + f->num_video_streams;
  f->num_video_streams++;
  memset(ret, 0, sizeof(*ret));
  gavl_video_format_copy(&ret->format, format);
  if(m)
    gavl_metadata_copy(&ret->com.m, m);
  return ret;
  }
Example #15
0
int gavl_get_color_channel_format(const gavl_video_format_t * frame_format,
                                  gavl_video_format_t * channel_format,
                                  gavl_color_channel_t ch)
{
    channel_data_t d;

    gavl_video_format_copy(channel_format, frame_format);

    if(!get_channel_properties(frame_format->pixelformat,
                               &channel_format->pixelformat,
                               ch, &d))
        return 0;

    channel_format->image_width /= d.sub_h;
    channel_format->frame_width /= d.sub_h;

    channel_format->image_height /= d.sub_v;
    channel_format->frame_height /= d.sub_v;
    return 1;
}
Example #16
0
static int write_header_tga(void * priv, const char * filename,
                            gavl_video_format_t * format,
                            const gavl_metadata_t * m)
  {
  tga_t * tga = priv;

  if(gavl_pixelformat_has_alpha(format->pixelformat))
    format->pixelformat = GAVL_RGBA_32;
  else
    format->pixelformat = GAVL_BGR_24;

  gavl_video_format_copy(&tga->format, format);

  tga->filename = bg_filename_ensure_extension(filename, "tga");
  
  if(!bg_iw_cb_create_output_file(tga->cb, tga->filename))
    return 0;
  
  return 1;
  }
Example #17
0
static int read_header_gavl(void * priv, const char * filename,
                            gavl_video_format_t * format)
  {
  FILE * f;
  gavl_t * gavl = priv;

  f = fopen(filename, "r");
  if(!f)
    return 0;
  gavl->io = gavf_io_create_file(f, 0, 1, 1);

  if(!gavl_image_read_header(gavl->io,
                             &gavl->m,
                             format))
    return 0;
  
  gavl_video_format_copy(&gavl->format, format);
  return 1;
  
  }
Example #18
0
int bg_avdec_set_track(void * priv, int track)
  {
  int i;
  avdec_priv * avdec = priv;
  
  if(!bgav_select_track(avdec->dec, track))
    return 0;
  avdec->current_track = &(avdec->track_info[track]);
  
  /* Get formats (need them for compressed output */
  for(i = 0; i < avdec->current_track->num_audio_streams; i++)
    gavl_audio_format_copy(&(avdec->current_track->audio_streams[i].format),
                           bgav_get_audio_format(avdec->dec, i));

  for(i = 0; i < avdec->current_track->num_video_streams; i++)
    gavl_video_format_copy(&(avdec->current_track->video_streams[i].format),
                           bgav_get_video_format(avdec->dec, i));

  
  return 1;
  }
Example #19
0
int bg_ov_open(bg_ov_t * ov, gavl_video_format_t * format, int keep_aspect)
  {
  int ret;

  LOCK(ov);
  ret = ov->plugin->open(ov->priv, format, keep_aspect);
  if(ret)
    ov->sink_int = ov->plugin->get_sink(ov->priv);
  UNLOCK(ov);

  if(!ret)
    return ret;
  
  gavl_video_format_copy(&ov->format, format);
  ov->flags = FLAG_OPEN;
  
  ov->sink_ext = gavl_video_sink_create(get_frame_func,
                                        put_frame_func, ov, format);
  
  return ret;
  }
Example #20
0
static gavl_video_source_t *
connect_shift(void * priv, gavl_video_source_t * src,
              const gavl_video_options_t * opt)
  {
  shift_priv_t * vp;
  int width_mult;

  vp = priv;

  if(vp->out_src)
    gavl_video_source_destroy(vp->out_src);
  
  vp->in_src = src;
  gavl_video_format_copy(&vp->format,
                         gavl_video_source_get_src_format(vp->in_src));

  width_mult = 3;
  vp->format.pixelformat =
    gavl_pixelformat_get_best(vp->format.pixelformat,
                              pixelformats,
                              NULL);
  //  gavl_video_format_copy(&vp->format, format);
  
  if(gavl_pixelformat_is_gray(vp->format.pixelformat))
    width_mult = 1;
  if(gavl_pixelformat_has_alpha(vp->format.pixelformat))
    width_mult++;
  vp->samples_per_line = vp->format.image_width * width_mult;
  
  gavl_video_source_set_dst(vp->in_src, 0, &vp->format);

  vp->out_src =
    gavl_video_source_create_source(read_func,
                                    vp, 0,
                                    vp->in_src);
  return vp->out_src;
  }
Example #21
0
int gavl_video_scaler_init(gavl_video_scaler_t * scaler,
                           const gavl_video_format_t * src_format,
                           const gavl_video_format_t * dst_format)
  {
  gavl_rectangle_f_t src_rect;
  gavl_rectangle_i_t  dst_rect;
  gavl_video_options_t opt;

  int field, plane;
 
  int sub_h_out = 1, sub_v_out = 1;
  
  /* Copy options because we want to change them */

  gavl_video_options_copy(&opt, &scaler->opt);

  /* TODO: If the image is smaller than the number of filter taps,
     reduce scaling algorithm */
  
  /* Copy formats */
  
  gavl_video_format_copy(&scaler->src_format, src_format);
  gavl_video_format_copy(&scaler->dst_format, dst_format);
  
  /* Check if we have rectangles */

  if(!opt.have_rectangles)
    {
    gavl_rectangle_f_set_all(&src_rect, &scaler->src_format);
    gavl_rectangle_i_set_all(&dst_rect, &scaler->dst_format);
    gavl_video_options_set_rectangles(&opt, &src_rect, &dst_rect);
    }
  
  /* Check how many fields we must handle */

  if((opt.deinterlace_mode == GAVL_DEINTERLACE_SCALE) &&
     (opt.conversion_flags & GAVL_FORCE_DEINTERLACE))
    {
    /* Deinterlacing mode */
    scaler->src_fields = 2;
    scaler->dst_fields = 1;

    /* Fake formats for scale context */
    if(scaler->src_format.interlace_mode == GAVL_INTERLACE_NONE)
      scaler->src_format.interlace_mode = GAVL_INTERLACE_TOP_FIRST;
    scaler->dst_format.interlace_mode = GAVL_INTERLACE_NONE;
    }
  else if((opt.deinterlace_mode == GAVL_DEINTERLACE_SCALE) &&
          (scaler->dst_format.interlace_mode == GAVL_INTERLACE_NONE) &&
          (scaler->src_format.interlace_mode != GAVL_INTERLACE_NONE))
    {
    /* Deinterlacing mode */
    scaler->src_fields = 2;
    scaler->dst_fields = 1;
    }
  else if(scaler->src_format.interlace_mode != GAVL_INTERLACE_NONE)
    {
    /* Interlaced scaling */
    scaler->src_fields = 2;
    scaler->dst_fields = 2;
    }
  else
    {
    /* Progressive scaling */
    scaler->src_fields = 1;
    scaler->dst_fields = 1;
    }
  
  /* Copy destination rectangle so we know, which subframe to take */
  gavl_rectangle_i_copy(&scaler->dst_rect, &opt.dst_rect);
  
#if 0
  fprintf(stderr, "gavl_video_scaler_init:\n");
  gavl_rectangle_f_dump(&scaler->opt.src_rect);
  fprintf(stderr, "\n");
  gavl_rectangle_i_dump(&scaler->dst_rect);
  fprintf(stderr, "\n");
#endif                      
  
  /* Crop source and destination rectangles to the formats */

  
  
  /* Align the destination rectangle to the output formtat */

  gavl_pixelformat_chroma_sub(scaler->dst_format.pixelformat, &sub_h_out, &sub_v_out);
  gavl_rectangle_i_align(&opt.dst_rect, sub_h_out, sub_v_out);
  
#if 0
  fprintf(stderr, "Initializing scaler:\n");
  fprintf(stderr, "Src format:\n");
  gavl_video_format_dump(&scaler->src_format);
  fprintf(stderr, "Dst format:\n");
  gavl_video_format_dump(&scaler->dst_format);

  fprintf(stderr, "Src rectangle:\n");
  gavl_rectangle_f_dump(&opt.src_rect);
  fprintf(stderr, "\nDst rectangle:\n");
  gavl_rectangle_i_dump(&scaler->dst_rect);
  fprintf(stderr, "\n");
#endif
  
  /* Check how many planes we have */
  
  if((scaler->src_format.pixelformat == GAVL_YUY2) ||
     (scaler->src_format.pixelformat == GAVL_UYVY))
    scaler->num_planes = 3;
  else
    scaler->num_planes = gavl_pixelformat_num_planes(scaler->src_format.pixelformat);
  
  if((scaler->src_fields == 2) && (!scaler->src_field))
    scaler->src_field = gavl_video_frame_create(NULL);
  
  if((scaler->dst_fields == 2) && (!scaler->dst_field))
    scaler->dst_field = gavl_video_frame_create(NULL);
  
  
#if 0
  fprintf(stderr, "src_fields: %d, dst_fields: %d, planes: %d\n",
          scaler->src_fields, scaler->dst_fields, scaler->num_planes);
#endif    

  /* Handle automatic mode selection */

  if(opt.scale_mode == GAVL_SCALE_AUTO)
    {
    if(opt.quality < 2)
      opt.scale_mode = GAVL_SCALE_NEAREST;
    else if(opt.quality <= 3)
      opt.scale_mode = GAVL_SCALE_BILINEAR;
    else
      opt.scale_mode = GAVL_SCALE_CUBIC_BSPLINE;
    }
  
  
  /* Now, initialize all fields and planes */

  if(scaler->src_fields > scaler->dst_fields)
    {
    /* Deinterlace mode */
    field = (scaler->opt.deinterlace_drop_mode == GAVL_DEINTERLACE_DROP_BOTTOM) ? 0 : 1;
    for(plane = 0; plane < scaler->num_planes; plane++)
      {
      if(!gavl_video_scale_context_init(&scaler->contexts[field][plane],
                                    &opt,
                                    plane, &scaler->src_format, &scaler->dst_format, field, 0,
                                    scaler->src_fields, scaler->dst_fields))
        return 0;
      }
    if(scaler->src_format.interlace_mode == GAVL_INTERLACE_MIXED)
      {
      for(plane = 0; plane < scaler->num_planes; plane++)
        {
        if(!gavl_video_scale_context_init(&scaler->contexts[2][plane],
                                          &opt,
                                          plane, &scaler->src_format, &scaler->dst_format, 0, 0, 1, 1))
          return 0;
        }
      }
    }
  else
    {
    /* src_fields == dst_fields */
    for(field = 0; field < scaler->src_fields; field++)
      {
      for(plane = 0; plane < scaler->num_planes; plane++)
        {
        if(!gavl_video_scale_context_init(&scaler->contexts[field][plane],
                                          &opt,
                                          plane, &scaler->src_format, &scaler->dst_format, field, field,
                                          scaler->src_fields, scaler->dst_fields))
          return 0;
        }
      }

    if(scaler->src_format.interlace_mode == GAVL_INTERLACE_MIXED)
      {
      for(plane = 0; plane < scaler->num_planes; plane++)
        {
        if(!gavl_video_scale_context_init(&scaler->contexts[2][plane],
                                          &opt,
                                          plane, &scaler->src_format, &scaler->dst_format, 0, 0, 1, 1))
          return 0;
        }
      }
    }
  return 1;
  }
Example #22
0
int gavl_video_scaler_init_convolve(gavl_video_scaler_t * scaler,
                                    const gavl_video_format_t * format,
                                    int h_radius, const float * h_coeffs,
                                    int v_radius, const float * v_coeffs)
  {
  gavl_rectangle_f_t src_rect;
  gavl_rectangle_i_t  dst_rect;
  gavl_video_options_t opt;

  int field, plane;
 
  /* Copy options because we want to change them */

  gavl_video_options_copy(&opt, &scaler->opt);
  
  /* Copy formats */
  
  gavl_video_format_copy(&scaler->src_format, format);
  gavl_video_format_copy(&scaler->dst_format, format);
  
  gavl_rectangle_f_set_all(&src_rect, &scaler->src_format);
  gavl_rectangle_i_set_all(&dst_rect, &scaler->dst_format);
  gavl_video_options_set_rectangles(&opt, &src_rect, &dst_rect);
    
  /* Check how many fields we must handle */

  if(format->interlace_mode != GAVL_INTERLACE_NONE)
    {
    scaler->src_fields = 2;
    scaler->dst_fields = 2;
    }
  else
    {
    scaler->src_fields = 1;
    scaler->dst_fields = 1;
    }
  
  /* Copy destination rectangle so we know, which subframe to take */
  gavl_rectangle_i_copy(&scaler->dst_rect, &opt.dst_rect);
  
  /* Check how many planes we have */
  
  if((scaler->src_format.pixelformat == GAVL_YUY2) ||
     (scaler->src_format.pixelformat == GAVL_UYVY))
    scaler->num_planes = 3;
  else
    scaler->num_planes = 
      gavl_pixelformat_num_planes(scaler->src_format.pixelformat);
  
  if((scaler->src_fields == 2) && (!scaler->src_field))
    scaler->src_field = gavl_video_frame_create(NULL);
  
  if((scaler->dst_fields == 2) && (!scaler->dst_field))
    scaler->dst_field = gavl_video_frame_create(NULL);
  
  /* Now, initialize all fields and planes */
  
  for(field = 0; field < scaler->src_fields; field++)
    {
    for(plane = 0; plane < scaler->num_planes; plane++)
      {
      gavl_video_scale_context_init_convolve(&scaler->contexts[field][plane],
                                             &opt,
                                             plane, format, 
                                             scaler->src_fields,
                                             h_radius, h_coeffs,
                                             v_radius, v_coeffs);
      }
    
    if(scaler->src_format.interlace_mode == GAVL_INTERLACE_MIXED)
      {
      for(plane = 0; plane < scaler->num_planes; plane++)
        {
        gavl_video_scale_context_init_convolve(&scaler->contexts[2][plane],
                                               &opt,
                                               plane, format, 
                                               1,
                                               h_radius, h_coeffs,
                                               v_radius, v_coeffs);
        }
      }
    
    }
  return 1;
  }
Example #23
0
void ReadMedia::copyVideoFormat(gavl_video_format_t * dst ){ 
	lockState();
	//if (m_state == STATE_READY)
	gavl_video_format_copy(	dst, &m_video_format);
	unlockState();
}
Example #24
0
bool ReadMedia::initFormat() {

	const gavl_audio_format_t * open_audio_format;
	const gavl_video_format_t * open_video_format;

	// we use the m_vfifosize to see if the user app wants video or not
	// then, we set m_video_stream_count to 0 if he doesn't want video
	if (m_video_stream_count > 0 && m_vfifosize > 0) {
		open_video_format = bgav_get_video_format(m_file, 0);

		if (open_video_format->pixelformat == GAVL_PIXELFORMAT_NONE) {
			printf("!!!sorry, pixelformat is not recognized.\n");
			return false;
		}

		// let's check to see if the formats are the same, if they are the same
		// there is no reason to recreate the fifo or frames
		if ( gavl_video_formats_equal( &m_video_format, open_video_format) == 0 ) { 	
			// the formats are different
			gavl_video_format_copy (&m_video_format, open_video_format);
			if (m_video_frame != NULL)
				gavl_video_frame_destroy(m_video_frame);
			m_video_frame = gavl_video_frame_create(&m_video_format);
			gavl_video_frame_clear( m_video_frame, &m_video_format);
			if (m_fifovideo != NULL)
				delete m_fifovideo;
			m_fifovideo=  new FifoVideoFrames( m_vfifosize ,  &m_video_format); 
		}
	} else {
		m_video_stream_count = 0;
		m_veof = true;
	}

	// we use the m_afifosize to see if the user app wants audio or not
	// then, we set m_audio_stream_count to 0 if he doesn't want audio
	if (m_audio_stream_count > 0 && m_afifosize > 0) {  
		open_audio_format = bgav_get_audio_format(m_file, 0);    
	
		// we can get audio formats that are unkown
		if ( open_audio_format->sample_format == GAVL_SAMPLE_NONE) {
			printf("sorry, this file has unsupported audio.\n"); 
			return false;	
		}

		if ( gavl_audio_formats_equal(&m_audio_format, open_audio_format) == 0 ) { 	
			// audio formats are different
			// save the old spf
			int spf = m_audio_format.samples_per_frame; 
			gavl_audio_format_copy(&m_audio_format, open_audio_format);

			if (m_audio_frame != NULL) {
				gavl_audio_frame_destroy(m_audio_frame);
			}

			// set it back to original
			m_audio_format.samples_per_frame = spf ;

			m_audio_frame = gavl_audio_frame_create(&m_audio_format);
	
			gavl_audio_frame_mute( m_audio_frame, &m_audio_format);
			if( m_fifoaudio != NULL )
				delete m_fifoaudio;
			m_fifoaudio = new FifoAudioFrames( m_afifosize , &m_audio_format); 
		}
	} else {
		// user doesn't want audio
		m_audio_stream_count = 0;
		m_aeof=true;
	}


	m_length_in_gavltime = bgav_get_duration ( m_file, 0);;
	m_length_in_seconds = gavl_time_to_seconds(  m_length_in_gavltime );
	m_num_samples = 0;
	m_num_frames = 0;

	if (m_audio_stream_count) {
		if ( bgav_can_seek_sample(m_file) == 1 ) {
			m_num_samples=	bgav_audio_duration ( m_file, 0) ;
	 } else { 
			m_num_samples=	gavl_time_to_samples( m_audio_format.samplerate ,  bgav_get_duration ( m_file, 0) );
		}
	}

	// set frames   WE NEED TO take care here for non-constant frame-rates
	if(m_video_stream_count) {
		if ( bgav_can_seek_sample(m_file) == 1  && m_video_format.framerate_mode == GAVL_FRAMERATE_CONSTANT) { 
			m_num_frames =	bgav_video_duration ( m_file, 0)/ m_video_format.frame_duration;
		} else if ( bgav_can_seek_sample(m_file) == 1  && m_video_format.framerate_mode == GAVL_FRAMERATE_VARIABLE ) {
			// FIXME what to do with non constant frame rates?
			m_num_frames=0;
		} else { 
			m_num_frames =	gavl_time_to_frames( m_video_format.timescale, m_video_format.frame_duration ,  bgav_get_duration ( m_file, 0) );
		}
	}

  //	printf("m_num_frames =%lld, duration = %lld , vid_duration=%lld\n", 
	//		m_num_frames, bgav_get_duration ( m_file, 0),  bgav_video_duration ( m_file, 0) );
	// set seconds
	if ( bgav_can_seek_sample(m_file) == 1) {
		gavl_time_t atime=0,vtime=0;
		if ( m_audio_stream_count ) 
			atime =  gavl_samples_to_time( m_audio_format.samplerate, m_num_samples );
		if (m_video_stream_count &&  m_video_format.frame_duration > 0) {
			vtime =  gavl_frames_to_time( m_video_format.timescale, m_video_format.frame_duration, m_num_frames );
		} else if ( m_video_stream_count  ) { // non constant framerate			
			vtime = bgav_video_duration( m_file, 0);
		}
		// else rely on audio time
		m_length_in_gavltime = atime > vtime ? atime :vtime;
		m_length_in_seconds = gavl_time_to_seconds( m_length_in_gavltime );
		//printf("atime=%ld,  vtime=%ld, l_in_sec=%f\n", atime, vtime, m_length_in_seconds);
	} 

	m_pcm_seek = SEEK_NOTHING;
	m_frame_seek = SEEK_NOTHING;

	return true;
}
Example #25
0
void write_png(char * filename, gavl_video_format_t * format, gavl_video_frame_t * frame)
  {
  int i;
  unsigned char ** rows;
  gavl_video_options_t * opt;
  int color_type;
  FILE * output;

  png_structp png_ptr;
  png_infop   info_ptr;
  
  gavl_video_converter_t * cnv;
    
  gavl_video_format_t format_1;
  gavl_video_frame_t * frame_1 = NULL;

  
  if((format->pixelformat != GAVL_RGB_24) && (format->pixelformat != GAVL_RGBA_32))
    {
    cnv = gavl_video_converter_create();
    
    gavl_video_format_copy(&format_1, format);

    if(gavl_pixelformat_has_alpha(format->pixelformat))
      {
      format_1.pixelformat = GAVL_RGBA_32;
      color_type = PNG_COLOR_TYPE_RGBA;
      }
    else
      {
      format_1.pixelformat = GAVL_RGB_24;
      color_type = PNG_COLOR_TYPE_RGB;
      }
    frame_1 = gavl_video_frame_create(&format_1);

    opt = gavl_video_converter_get_options(cnv);
    gavl_video_options_set_alpha_mode(opt, GAVL_ALPHA_BLEND_COLOR);    
    gavl_video_converter_init(cnv, format, &format_1);
    
    gavl_video_convert(cnv, frame, frame_1);
    gavl_video_converter_destroy(cnv);
    }
  else if(format->pixelformat == GAVL_RGB_24)
    {
    color_type = PNG_COLOR_TYPE_RGB;
    }
  else
    {
    color_type = PNG_COLOR_TYPE_RGBA;
    }
  
  output = fopen(filename, "wb");
  if(!output)
    return;

  png_ptr = png_create_write_struct(PNG_LIBPNG_VER_STRING, NULL,
                                         NULL, NULL);

  info_ptr = png_create_info_struct(png_ptr);
  setjmp(png_jmpbuf(png_ptr));
  png_init_io(png_ptr, output);
  
  png_set_IHDR(png_ptr, info_ptr,
               format->image_width,
               format->image_height,
               8, color_type, PNG_INTERLACE_NONE,
               PNG_COMPRESSION_TYPE_DEFAULT, PNG_FILTER_TYPE_DEFAULT);

  rows = malloc(format->image_height * sizeof(*rows));

  if(frame_1)
    {
    for(i = 0; i < format->image_height; i++)
      rows[i] = frame_1->planes[0] + i * frame_1->strides[0];
    }
  else
    {
    for(i = 0; i < format->image_height; i++)
      rows[i] = frame->planes[0] + i * frame->strides[0];
    }
  
  png_set_rows(png_ptr, info_ptr, rows);
  png_write_png(png_ptr, info_ptr, PNG_TRANSFORM_IDENTITY, NULL);

  png_destroy_write_struct(&png_ptr, &info_ptr);
  fclose(output);
  free(rows);
  if(frame_1)
    gavl_video_frame_destroy(frame_1);
  }
Example #26
0
gavl_video_frame_t * read_png(const char * filename,
                              gavl_video_format_t * format,
                              gavl_pixelformat_t pixelformat)
  {
  int i;
  unsigned char ** rows;
  
  gavl_video_converter_t * cnv;
  gavl_video_options_t * opt;
  gavl_video_format_t format_1;
  gavl_video_frame_t * frame, * frame_1;
    
  int bit_depth;
  int color_type;
  int has_alpha = 0;

  png_structp png_ptr;
  png_infop info_ptr;
  png_infop end_info;

  FILE * file;
  
  file = fopen(filename, "rb");

  if(!file)
    {
    fprintf(stderr, "Cannot open file %s\n", filename);
    return NULL;
    }
  
  png_ptr = png_create_read_struct
    (PNG_LIBPNG_VER_STRING, NULL,
     NULL, NULL);
  
  setjmp(png_jmpbuf(png_ptr));
  info_ptr = png_create_info_struct(png_ptr);


  end_info = png_create_info_struct(png_ptr);

  png_init_io(png_ptr, file);

  png_read_info(png_ptr, info_ptr);

  format->frame_width  = png_get_image_width(png_ptr, info_ptr);
  format->frame_height = png_get_image_height(png_ptr, info_ptr);

  format->image_width  = format->frame_width;
  format->image_height = format->frame_height;
  format->pixel_width = 1;
  format->pixel_height = 1;

  bit_depth  = png_get_bit_depth(png_ptr,  info_ptr);
  color_type = png_get_color_type(png_ptr, info_ptr);
  switch(color_type)
    {
    case PNG_COLOR_TYPE_GRAY:       /*  (bit depths 1, 2, 4, 8, 16) */
      if(bit_depth < 8)
#if GAVL_MAKE_BUILD(PNG_LIBPNG_VER_MAJOR, PNG_LIBPNG_VER_MINOR, PNG_LIBPNG_VER_RELEASE) < GAVL_MAKE_BUILD(1,2,9)
        png_set_gray_1_2_4_to_8(png_ptr);
#else
      png_set_expand_gray_1_2_4_to_8(png_ptr);
#endif
      if (png_get_valid(png_ptr, info_ptr, PNG_INFO_tRNS))
        {
        png_set_tRNS_to_alpha(png_ptr);
        has_alpha = 1;
        }
      png_set_gray_to_rgb(png_ptr);
      break;
    case PNG_COLOR_TYPE_GRAY_ALPHA: /*  (bit depths 8, 16) */
      if(bit_depth == 16)
        png_set_strip_16(png_ptr);
      png_set_gray_to_rgb(png_ptr);
      break;
    case PNG_COLOR_TYPE_PALETTE:    /*  (bit depths 1, 2, 4, 8) */
      png_set_palette_to_rgb(png_ptr);
      if (png_get_valid(png_ptr, info_ptr, PNG_INFO_tRNS))
        {
        png_set_tRNS_to_alpha(png_ptr);
        has_alpha = 1;
        }
      break;
    case PNG_COLOR_TYPE_RGB:        /*  (bit_depths 8, 16) */
      if(png_get_valid(png_ptr, info_ptr, PNG_INFO_tRNS))
        {
        png_set_tRNS_to_alpha(png_ptr);
        has_alpha = 1;
        }
      if(bit_depth == 16)
        png_set_strip_16(png_ptr);
      break;
    case PNG_COLOR_TYPE_RGB_ALPHA:  /*  (bit_depths 8, 16) */
      if(bit_depth == 16)
        png_set_strip_16(png_ptr);
      has_alpha = 1;
      break;
    }
  if(has_alpha)
    format->pixelformat = GAVL_RGBA_32;
  else
    format->pixelformat = GAVL_RGB_24;

  frame = gavl_video_frame_create(format);
  rows = malloc(format->frame_height * sizeof(*rows));
  for(i = 0; i < format->frame_height; i++)
    rows[i] = frame->planes[0] + i * frame->strides[0];

  png_read_image(png_ptr, rows);
  png_read_end(png_ptr, end_info);

  png_destroy_read_struct(&png_ptr, &info_ptr,
                          &end_info);
  fclose(file);
  free(rows);
  
  /* Check wether to set up the converter */

  if(format->pixelformat != pixelformat)
    {
    cnv = gavl_video_converter_create();
    opt = gavl_video_converter_get_options(cnv);
    gavl_video_options_set_alpha_mode(opt, GAVL_ALPHA_BLEND_COLOR);    

    gavl_video_format_copy(&format_1, format);
    format_1.pixelformat = pixelformat;
    frame_1 = gavl_video_frame_create(&format_1);
    
    gavl_video_converter_init(cnv, format, &format_1);
    
    gavl_video_convert(cnv, frame, frame_1);
    gavl_video_converter_destroy(cnv);
    format->pixelformat = pixelformat;
    }
  else
    frame_1 = NULL;

  if(frame_1)
    {
    gavl_video_frame_destroy(frame);
    return frame_1;
    }
  else
    return frame;
  }
Example #27
0
static Pixmap make_icon(bg_x11_window_t * win,
                        const gavl_video_frame_t * icon,
                        const gavl_video_format_t * format)
  {
  XImage * im;
  gavl_video_format_t out_format;
  gavl_video_converter_t * cnv;
  gavl_video_options_t * opt;
  int do_convert;
  const gavl_video_frame_t * image_frame;
  gavl_video_frame_t * out_frame;
  
  Pixmap ret;
  
  /* Create converter */
  cnv = gavl_video_converter_create();
  opt = gavl_video_converter_get_options(cnv);
  gavl_video_options_set_alpha_mode(opt, GAVL_ALPHA_IGNORE);

  /* Create pixmap */
  ret = XCreatePixmap(win->dpy, win->root, format->image_width,
                      format->image_height, win->depth);

  /* Set up format and converter */
  gavl_video_format_copy(&out_format, format);
  out_format.pixelformat =
    bg_x11_window_get_pixelformat(win->dpy, win->visual, win->depth);
  
  do_convert = gavl_video_converter_init(cnv, format, &out_format);
  if(do_convert)
    {
    out_frame = gavl_video_frame_create(&out_format);
    image_frame = out_frame;
    gavl_video_convert(cnv, icon, out_frame);
    }
  else
    {
    image_frame = icon;
    out_frame = NULL;
    }
  
  /* Make image */
  
  im = XCreateImage(win->dpy, win->visual, win->depth,
                    ZPixmap,
                    0, (char*)(image_frame->planes[0]),
                    format->image_width,
                    format->image_height,
                    32,
                    image_frame->strides[0]);
  
  XPutImage(win->dpy,            /* dpy        */
            ret, /* d          */
            win->gc,             /* gc         */
            im, /* image      */
            0,    /* src_x      */
            0,    /* src_y      */
            0,          /* dst_x      */
            0,          /* dst_y      */
            format->image_width,    /* src_width  */
            format->image_height);  /* src_height */
  
  /* Cleanup */
  gavl_video_converter_destroy(cnv);
  if(out_frame)
    gavl_video_frame_destroy(out_frame);
  
  im->data = NULL;
  XDestroyImage(im);
  
  /* Return */
  return ret;
  
  }
Example #28
0
int main(int argc, char ** argv)
  {
  bg_plugin_registry_t * plugin_reg;
  bg_cfg_registry_t * cfg_reg;
  bg_cfg_section_t * cfg_section;
  int i;
  char * tmp_path;
  bg_track_info_t * info;

  /* Plugins */
  bg_input_plugin_t * input_plugin;

  /* Plugin handles */
  bg_plugin_handle_t * input_handle = NULL;

  /* Frames */
  gavl_video_frame_t * frame = NULL;
  gavl_video_format_t in_format;
  gavl_video_format_t out_format;
  
  /* Filter chain */
  /* Create registries */
  
  char ** gmls = NULL;
  
  gavl_video_source_t * src;

  gavl_timer_t * timer = gavl_timer_create();
  
  cfg_reg = bg_cfg_registry_create();
  tmp_path =  bg_search_file_read("generic", "config.xml");
  bg_cfg_registry_load(cfg_reg, tmp_path);
  if(tmp_path)
    free(tmp_path);

  cfg_section = bg_cfg_registry_find_section(cfg_reg, "plugins");
  plugin_reg = bg_plugin_registry_create(cfg_section);

  /* Create filter chain */
  memset(&opt, 0, sizeof(opt));
  bg_gavl_video_options_init(&opt);
  fc = bg_video_filter_chain_create(&opt, plugin_reg);
  fv_parameters = bg_video_filter_chain_get_parameters(fc);
  fv_section =
    bg_cfg_section_create_from_parameters("fv", fv_parameters);
  opt_section =
    bg_cfg_section_create_from_parameters("opt", opt_parameters);
  
  /* Get commandline options */
  bg_cmdline_init(&app_data);

  update_global_options();
  
  bg_cmdline_parse(global_options, &argc, &argv, NULL);
  gmls = bg_cmdline_get_locations_from_args(&argc, &argv);

  if(!gmls || !gmls[0])
    {
    fprintf(stderr, "No input file given\n");
    return 0;
    }
  if(!gmls[1])
    {
    fprintf(stderr, "No output file given\n");
    return 0;
    }
  if(gmls[2])
    {
    fprintf(stderr, "Unknown argument %s\n", gmls[2]);
    }
  
  /* Load input plugin */
  if(!bg_input_plugin_load(plugin_reg,
                           gmls[0],
                           NULL,
                           &input_handle,
                           NULL, 0))
    {
    fprintf(stderr, "Cannot open %s\n", gmls[0]);
    return -1;
    }
  input_plugin = (bg_input_plugin_t*)(input_handle->plugin);

  info = input_plugin->get_track_info(input_handle->priv, 0);
  
  /* Select track */
  if(input_plugin->set_track)
    input_plugin->set_track(input_handle->priv, 0);
  
  if(!info->num_video_streams)
    {
    fprintf(stderr, "File %s has no video\n", gmls[0]);
    return -1;
    }

  /* Select first stream */
  input_plugin->set_video_stream(input_handle->priv, 0,
                                 BG_STREAM_ACTION_DECODE);
  
  /* Start playback */
  if(input_plugin->start)
    input_plugin->start(input_handle->priv);

  gavl_video_format_copy(&in_format, &info->video_streams[0].format);
  
  /* Initialize filter chain */

  src = input_plugin->get_video_source(input_handle->priv, 0);

  src = bg_video_filter_chain_connect(fc, src);
  
  
  if(frameno >= 0)
    {
    frame = NULL;
    gavl_timer_start(timer);
    for(i = 0; i < frameno+1; i++)
      {
      if(gavl_video_source_read_frame(src, &frame) != GAVL_SOURCE_OK)
        {
        fprintf(stderr, "Unexpected EOF\n");
        return -1;
        }
      }
    gavl_timer_stop(timer);
    }
  else
    {
    frame = NULL;
    gavl_timer_start(timer);
    while(1)
      {
      if(gavl_video_source_read_frame(src, &frame) != GAVL_SOURCE_OK)
        {
        break;
        }
      }
    gavl_timer_stop(timer);
    }

  fprintf(stderr, "Processing took %f seconds\n", gavl_time_to_seconds(gavl_timer_get(timer)));
  
  bg_plugin_registry_save_image(plugin_reg, gmls[1], frame, &out_format, NULL);

  /* Destroy everything */
  bg_plugin_unref(input_handle);
  bg_video_filter_chain_destroy(fc);
  bg_gavl_video_options_free(&opt);
  bg_plugin_registry_destroy(plugin_reg);
  bg_cfg_registry_destroy(cfg_reg);
  gavl_timer_destroy(timer);
  return 0;
  }
Example #29
0
static void *
make_thumbnail(bg_db_t * db,
               bg_db_object_t * obj,
               int max_width, int max_height,
               const char * mimetype)
  {
  bg_db_file_t * thumb;
  int ret = 0;
  /* Formats */
  gavl_video_format_t input_format;
  gavl_video_format_t output_format;
  
  /* Frames */
  gavl_video_frame_t * input_frame = NULL;

  gavl_video_converter_t * cnv = 0;
  const char * src_ext;
  bg_db_image_file_t * image = (bg_db_image_file_t *)obj;

  char * path_abs;
  bg_db_scan_item_t item;

  double ext_x, ext_y;
  double ar;
  
  src_ext = strrchr(image->file.path, '.');
  if(src_ext)
    src_ext++;
  
  /* Return early */
  if(image->file.mimetype &&
     !strcasecmp(image->file.mimetype, mimetype) &&
     (image->width <= max_width) &&
     (image->height <= max_height))
    {
    bg_db_object_ref(image);
    return obj;
    }

  memset(&input_format, 0, sizeof(input_format));
  
  cnv = gavl_video_converter_create();
  input_frame = bg_plugin_registry_load_image(db->plugin_reg,
                                              image->file.path,
                                              &input_format, NULL);

  ar = (double)input_format.image_width / (double)input_format.image_height;
  
  gavl_video_format_copy(&output_format, &input_format);
    
  ext_x = (double)input_format.image_width / (double)max_width;
  ext_y = (double)input_format.image_height / (double)max_height;

    
  if((ext_x > 1.0) || (ext_y > 1.0))
    {
    if(ext_x > ext_y) // Fit to max_width
      {
      output_format.image_width  = max_width;
      output_format.image_height = (int)((double)max_width / ar + 0.5);
      }
    else // Fit to max_height
      {
      output_format.image_height  = max_height;
      output_format.image_width = (int)((double)max_height * ar + 0.5);
      }
    }
  
  /* Save image */

  thumb = bg_db_object_create(db);
  
  path_abs = save_image(db, input_frame, &input_format, &output_format,
                        cnv, bg_db_object_get_id(thumb), mimetype);
  if(!path_abs)
    goto end;
  
  /* Create a new image object */

  memset(&item, 0, sizeof(item));
  if(!bg_db_scan_item_set(&item, path_abs))
    goto end;

  bg_log(BG_LOG_INFO, LOG_DOMAIN, "Made thumbnail %dx%d in %s",
         output_format.image_width,
         output_format.image_height, path_abs);
  
  
  thumb = bg_db_file_create_from_object(db, (bg_db_object_t*)thumb, ~0, &item, -1);
  if(!thumb)
    goto end;
  
  bg_db_object_set_type(thumb, BG_DB_OBJECT_THUMBNAIL);
  thumb->obj.ref_id = bg_db_object_get_id(image);
  bg_db_object_set_parent_id(db, thumb, -1);
  
  
  bg_db_scan_item_free(&item);
  
  ret = 1;
  
  end:

  if(input_frame)
    gavl_video_frame_destroy(input_frame);
  if(cnv)
    gavl_video_converter_destroy(cnv);

  if(!ret)
    {
    bg_db_object_delete(db, thumb);
    return NULL;
    }
  return thumb;
  }
Example #30
0
int gavl_video_converter_reinit(gavl_video_converter_t * cnv)
  {
  int csp_then_scale = 0;
  gavl_pixelformat_t tmp_csp = GAVL_PIXELFORMAT_NONE;
  
  int do_csp = 0;
  int do_scale = 0;
  int do_deinterlace = 0;
  
  int in_sub;
  int out_sub;

  int sub_h;
  int sub_v;

  gavl_video_format_t tmp_format;
  gavl_video_format_t tmp_format1;

  gavl_video_format_t * input_format;
  gavl_video_format_t * output_format;

  input_format = &cnv->input_format;
  output_format = &cnv->output_format;
  
  // #ifdef DEBUG
#if 0
  //  fprintf(stderr, "Initializing video converter, quality: %d, Flags: 0x%08x\n",
  //          cnv->options.quality, cnv->options.accel_flags);
  gavl_video_format_dump(input_format);
  gavl_video_format_dump(output_format);

#endif
  
  video_converter_cleanup(cnv);
  
  gavl_video_format_copy(&tmp_format, input_format);
    
  /* Adjust pixelformat */
  
  if((cnv->options.alpha_mode == GAVL_ALPHA_IGNORE) &&
     (tmp_format.pixelformat == GAVL_RGBA_32) &&
     (output_format->pixelformat == GAVL_RGB_32))
    tmp_format.pixelformat = GAVL_RGB_32;
  
  /* Check for pixelformat conversion */

  if(tmp_format.pixelformat != output_format->pixelformat)
    {
    do_csp = 1;
    }
  
  if(cnv->options.src_rect.x  || cnv->options.src_rect.y ||
     cnv->options.dst_rect.x  || cnv->options.dst_rect.y ||
     (cnv->options.src_rect.w &&
      (cnv->options.src_rect.w != tmp_format.image_width)) ||
     (cnv->options.src_rect.h &&
      (cnv->options.src_rect.h != tmp_format.image_height)) ||
     (cnv->options.dst_rect.w &&
      (cnv->options.dst_rect.w != output_format->image_width)) ||
     (cnv->options.dst_rect.h &&
      (cnv->options.dst_rect.h != output_format->image_height)) ||
     (tmp_format.image_width  != output_format->image_width) ||
     (tmp_format.image_height != output_format->image_height) ||
     (tmp_format.pixel_width  != output_format->pixel_width) ||
     (tmp_format.pixel_height != output_format->pixel_height))
    {
    do_scale = 1;
    }
    
  /* For quality levels above 3, we switch on scaling, if it provides a more
     accurate conversion. This is especially true if the chroma subsampling
     ratios change or when the chroma placement becomes different */
    
  if(((cnv->options.quality > 3) ||
      (cnv->options.conversion_flags & GAVL_RESAMPLE_CHROMA) ||
      do_scale))
    {
    if(do_csp)
      {
      /* Check, if pixelformat conversion can be replaced by simple scaling
         (True if only the subsampling changes) */
      if(gavl_pixelformat_can_scale(tmp_format.pixelformat, output_format->pixelformat))
        {
        do_scale = 1;
        do_csp = 0;
        }
      else
        {
        tmp_csp = gavl_pixelformat_get_intermediate(tmp_format.pixelformat,
                                                    output_format->pixelformat);
        if(tmp_csp != GAVL_PIXELFORMAT_NONE)
          do_scale = 1;
        }
      }
    /* Having different chroma placements also switches on scaling */
    else if(tmp_format.chroma_placement != output_format->chroma_placement)
      {
      do_scale = 1;
      }
    }

  /* Check if we must deinterlace */

  if(((input_format->interlace_mode != GAVL_INTERLACE_NONE) &&
      (output_format->interlace_mode == GAVL_INTERLACE_NONE)) ||
     (cnv->options.conversion_flags & GAVL_FORCE_DEINTERLACE))
    {
    // fprintf(stderr, "Forcing deinterlacing\n");
    if(cnv->options.deinterlace_mode == GAVL_DEINTERLACE_SCALE)
      do_scale = 1;
    else if(cnv->options.deinterlace_mode != GAVL_DEINTERLACE_NONE)
      do_deinterlace = 1;
    }
  
  /* Deinterlacing must always be the first step */

  if(do_deinterlace)
    {
    gavl_video_format_copy(&tmp_format1, &tmp_format);

    tmp_format1.interlace_mode = GAVL_INTERLACE_NONE;
    if(!add_context_deinterlace(cnv, &tmp_format, &tmp_format1))
      return -1;
    gavl_video_format_copy(&tmp_format, &tmp_format1);
    }
  
  if(do_csp && do_scale)
    {
    /* For qualities below 3, we scale in the pixelformat with the
       smaller subsampling */

    if(tmp_csp == GAVL_PIXELFORMAT_NONE)
      {
      gavl_pixelformat_chroma_sub(tmp_format.pixelformat, &sub_h, &sub_v);
      in_sub = sub_h * sub_v;
      
      gavl_pixelformat_chroma_sub(output_format->pixelformat, &sub_h, &sub_v);
      out_sub = sub_h * sub_v;

      if(((in_sub < out_sub) && cnv->options.quality < 3) ||
         ((in_sub >= out_sub) && cnv->options.quality >= 3))
        csp_then_scale = 1;
      }
    else
      {
      if(!gavl_pixelformat_can_scale(input_format->pixelformat, tmp_csp))
        csp_then_scale = 1;
#if 0
      fprintf(stderr, "converting %s -> %s -> %s (%d, %d)\n",
              gavl_pixelformat_to_string(input_format->pixelformat),
              gavl_pixelformat_to_string(tmp_csp),
              gavl_pixelformat_to_string(output_format->pixelformat),
              gavl_pixelformat_can_scale(input_format->pixelformat, tmp_csp),
              gavl_pixelformat_can_scale(tmp_csp, output_format->pixelformat));
#endif
      }
    
    if(csp_then_scale) /* csp then scale */
      {
#if 0
      fprintf(stderr, "csp then scale\n");
#endif
      /* csp (tmp_format -> tmp_format1) */
      
      gavl_video_format_copy(&tmp_format1, &tmp_format);

      if(tmp_csp != GAVL_PIXELFORMAT_NONE)
        tmp_format1.pixelformat = tmp_csp;
      else
        tmp_format1.pixelformat = output_format->pixelformat;

      if(!add_context_csp(cnv, &tmp_format, &tmp_format1))
        return -1;
      
      gavl_video_format_copy(&tmp_format, &tmp_format1);

      /* scale (tmp_format -> tmp_format1) */
      
      tmp_format1.pixelformat = output_format->pixelformat;

      tmp_format1.image_width  = output_format->image_width;
      tmp_format1.image_height = output_format->image_height;

      tmp_format1.pixel_width  = output_format->pixel_width;
      tmp_format1.pixel_height = output_format->pixel_height;

      tmp_format1.frame_width  = output_format->image_width;
      tmp_format1.frame_height = output_format->image_height;
      tmp_format1.chroma_placement = output_format->chroma_placement;
      tmp_format1.interlace_mode = output_format->interlace_mode;
      
      if(!add_context_scale(cnv, &tmp_format, &tmp_format1))
        return -1;

      gavl_video_format_copy(&tmp_format, &tmp_format1);
      
      }
    /* scale then csp */
    else
      {
#if 0
      fprintf(stderr, "scale then csp\n");
#endif
      /* scale (tmp_format -> tmp_format1) */

      gavl_video_format_copy(&tmp_format1, &tmp_format);

      tmp_format1.image_width  = output_format->image_width;
      tmp_format1.image_height = output_format->image_height;

      tmp_format1.pixel_width  = output_format->pixel_width;
      tmp_format1.pixel_height = output_format->pixel_height;

      tmp_format1.frame_width  = output_format->image_width;
      tmp_format1.frame_height = output_format->image_height;
      tmp_format1.interlace_mode = output_format->interlace_mode;
      
      if(tmp_csp != GAVL_PIXELFORMAT_NONE)
        {
        tmp_format1.pixelformat = tmp_csp;
        }
      tmp_format1.chroma_placement = output_format->chroma_placement;
      
      if(!add_context_scale(cnv, &tmp_format, &tmp_format1))
        return -1;

      gavl_video_format_copy(&tmp_format, &tmp_format1);

      /* csp (tmp_format -> tmp_format1) */

      tmp_format1.pixelformat = output_format->pixelformat;
      if(!add_context_csp(cnv, &tmp_format, &tmp_format1))
        return -1;

      gavl_video_format_copy(&tmp_format, &tmp_format1);
      }
    
    }

  else if(do_csp)
    {
    if(!add_context_csp(cnv, &tmp_format,
                        output_format))
      return -1;
    }

  else if(do_scale)
    {
    if(!add_context_scale(cnv, &tmp_format,
                          output_format))
      return -1;
    }

  /* Now, create temporary frames for the contexts */

  cnv->have_frames = 0;
  
  return cnv->num_contexts;
  }