Example #1
0
gavl_video_deinterlacer_t * gavl_video_deinterlacer_create()
  {
  gavl_video_deinterlacer_t * ret;
  ret = calloc(1, sizeof(*ret));
  gavl_video_options_set_defaults(&ret->opt);

  ret->src_field = gavl_video_frame_create(NULL);
  ret->dst_field = gavl_video_frame_create(NULL);
  return ret;
  }
Example #2
0
static gavl_sink_status_t put_still(bg_ov_t * ov, gavl_video_frame_t*frame)
  {
  gavl_sink_status_t ret;

  ov->flags |= FLAG_STILL_MODE;
  if(!(ov->plugin->common.flags & BG_PLUGIN_OV_STILL))
    ov->flags |= FLAG_EMULATE_STILL;

  /* Save this frame */

  if(ov->flags & (FLAG_EMULATE_STILL|FLAG_EMULATE_OVL))
    {
    if(!ov->still_frame)
      ov->still_frame = gavl_video_frame_create(&ov->format);
    gavl_video_frame_copy(&ov->format, ov->still_frame, frame);
    ov->still_frame->duration = -1;
    }

  if(ov->flags & FLAG_EMULATE_OVL)
    blend_overlays(ov, frame);
  
  LOCK(ov);
  ret = gavl_video_sink_put_frame(ov->sink_int, frame);
  UNLOCK(ov);

  bg_ov_handle_events(ov);
  
  return ret;
  }
Example #3
0
static void * create_swapfields()
  {
  swapfields_priv_t * ret;
  ret = calloc(1, sizeof(*ret));
  ret->cpy_field = gavl_video_frame_create(NULL);
  return ret;
  }
Example #4
0
void bg_recorder_video_finalize_encode(bg_recorder_t * rec)
  {
  bg_recorder_video_stream_t * vs = &rec->vs;
  bg_encoder_get_video_format(rec->enc, vs->enc_index, &vs->enc_format);

  /*
   *  The encoder might have changed the framerate.
   *  This affects the pipe_format as well, but not the other formats
   */
  
  vs->pipe_format.framerate_mode = vs->enc_format.framerate_mode;
  vs->pipe_format.timescale      = vs->enc_format.timescale;
  vs->pipe_format.frame_duration = vs->enc_format.frame_duration;
  
  bg_video_filter_chain_set_out_format(vs->fc,
                                       &vs->pipe_format);
  
  vs->do_convert_enc = gavl_video_converter_init(vs->enc_cnv, &vs->pipe_format,
                                                 &vs->enc_format);

  if(vs->do_convert_enc)
    vs->enc_frame = gavl_video_frame_create(&vs->enc_format);


  
  
  vs->flags |= STREAM_ENCODE_OPEN;
  
  }
Example #5
0
static int init_schroedinger(bgav_stream_t * s)
  {
  schroedinger_priv_t * priv;

  schro_init();
  
  priv = calloc(1, sizeof(*priv));
  priv->last_pts = GAVL_TIME_UNDEFINED;
  
  s->decoder_priv = priv;

  priv->dec = schro_decoder_new();

  priv->frame = gavl_video_frame_create(NULL);
  s->vframe = priv->frame;
  
  if(decode_picture(s) != GAVL_SOURCE_OK) /* Get format */
    return 0;

  gavl_metadata_set(&s->m, GAVL_META_FORMAT,
                    "Dirac");

  if(!s->ext_data)
    priv->header_sent = 1;
  
  return 1;
  }
Example #6
0
livido_init_f	init_instance( livido_port_t *my_instance )
{
	int width = 0, height = 0;

	lvd_extract_dimensions( my_instance, "out_channels", &width, &height );

	scale0tilt_instance_t* inst = (scale0tilt_instance_t*)livido_malloc(sizeof(scale0tilt_instance_t));

	livido_memset( inst, 0, sizeof(scale0tilt_instance_t) );

	inst->w = width;
	inst->h = height;
	inst->sx = 1.0;
	inst->sy = 1.0;
	
	inst->format_src.frame_width  = inst->w;
	inst->format_src.frame_height = inst->h;
	inst->format_src.image_width  = inst->w;
	inst->format_src.image_height = inst->h;
	inst->format_src.pixel_width = 1;
	inst->format_src.pixel_height = 1;
	inst->format_src.pixelformat = GAVL_YUVJ_444_P;

	inst->video_scaler = gavl_video_scaler_create();
	inst->frame_src = gavl_video_frame_create( NULL );
	inst->frame_dst = gavl_video_frame_create( NULL );

	inst->frame_src->strides[0] = width;
	inst->frame_src->strides[1] = width;
	inst->frame_src->strides[2] = width;

	inst->frame_dst->strides[0] = width;
	inst->frame_dst->strides[1] = width;
	inst->frame_dst->strides[2] = width;

	update_scaler(inst);
	
	inst->temp = gavl_video_frame_create( &(inst->format_src) );
	inst->temp_alpha = gavl_video_frame_create( &(inst->format_src) );
	
	livido_property_set( my_instance, "PLUGIN_private", LIVIDO_ATOM_TYPE_VOIDPTR,1, &inst);

	return LIVIDO_NO_ERROR;
}
Example #7
0
gavl_video_scaler_t * gavl_video_scaler_create()
  {
  gavl_video_scaler_t * ret;
  int i, j;
  ret = calloc(1, sizeof(*ret));

  ret->src = gavl_video_frame_create(NULL);
  ret->dst = gavl_video_frame_create(NULL);

  gavl_video_options_set_defaults(&ret->opt);

  for(i = 0; i < 3; i++)
    {
    for(j = 0; j < GAVL_MAX_PLANES; j++)
      ret->contexts[i][j].opt = &ret->opt;
    }
  
  return ret;
  }
Example #8
0
static Pixmap make_mask(bg_x11_window_t * win,
                        const gavl_video_frame_t * icon,
                        const gavl_video_format_t * format)
  {
  gavl_video_frame_t * alpha_frame;
  gavl_video_format_t alpha_format;
  char * image_data;
  
  Pixmap ret;
  int bytes_per_line;
  
  /* Extract alpha */
  if(!gavl_get_color_channel_format(format,
                                    &alpha_format,
                                    GAVL_CCH_ALPHA))
    return None; /* No alpha */

  alpha_frame = gavl_video_frame_create(&alpha_format);

  gavl_video_frame_extract_channel(format,
                                   GAVL_CCH_ALPHA,
                                   icon,
                                   alpha_frame);
  
  /* Create image */
  
  bytes_per_line = (format->image_width + 7) / 8;
  image_data = calloc(1, bytes_per_line * format->image_height);
  
  switch(alpha_format.pixelformat)
    {
    case GAVL_GRAY_8:
      create_mask_8(&alpha_format, alpha_frame, image_data, bytes_per_line);
      break;
    case GAVL_GRAY_16:
      create_mask_16(&alpha_format, alpha_frame, image_data, bytes_per_line);
      break;
    case GAVL_GRAY_FLOAT:
      create_mask_float(&alpha_format, alpha_frame, image_data, bytes_per_line);
      break;
    default:
      break;
    }
  ret = XCreateBitmapFromData(win->dpy, win->root,
                              image_data,
                              format->image_width,
                              format->image_height);
  
  gavl_video_frame_destroy(alpha_frame);
  free(image_data);
  return ret;
  }
Example #9
0
gavl_overlay_t * bg_ov_create_overlay(bg_ov_t * ov, int id)
  {
  gavl_overlay_t * ret;
  if(!(ov->flags & FLAG_EMULATE_OVL))
    {
    LOCK(ov);
    ret = ov->plugin->create_overlay(ov->priv, id);
    UNLOCK(ov);
    return ret;
    }
  else
    {
    ret = gavl_video_frame_create(&ov->ovl_str[id].format);
    gavl_video_frame_clear(ret, &ov->ovl_str[id].format);
    return ret;
    }
  }
Example #10
0
static void alloc_frames(gavl_video_converter_t * cnv)
  {
  gavl_video_convert_context_t * tmp_ctx;

  if(cnv->have_frames)
    return;

  tmp_ctx = cnv->first_context;
  while(tmp_ctx && tmp_ctx->next)
    {
    tmp_ctx->output_frame =
      gavl_video_frame_create(&tmp_ctx->output_format);
    gavl_video_frame_clear(tmp_ctx->output_frame, &tmp_ctx->output_format);
    
    tmp_ctx->next->input_frame = tmp_ctx->output_frame;
    tmp_ctx = tmp_ctx->next;
    }

  cnv->have_frames = 1;
  }
Example #11
0
static int write_image_tga(void * priv, gavl_video_frame_t * frame)
  {
  tga_t * tga = priv;
  gavl_video_frame_t * tmp_frame;
  int result, ret = 1;

  errno = 0;

  if(tga->format.pixelformat == GAVL_RGBA_32)
    {
    tmp_frame = gavl_video_frame_create(&tga->format);
    gavl_video_frame_copy(&tga->format, tmp_frame, frame);
    if(tga->rle)
      {
      result = tga_write_rgb(tga->filename, tmp_frame->planes[0],
                             tga->format.image_width,
                             tga->format.image_height, 32,
                             frame->strides[0]);
      }
    else
      {
      result =tga_write_rgb_rle(tga->filename, tmp_frame->planes[0],
                                tga->format.image_width,
                                tga->format.image_height, 32,
                                frame->strides[0]);
      }
    gavl_video_frame_destroy(tmp_frame);
    }
  else
    {
    if(tga->rle)
      {
      result = tga_write_bgr(tga->filename, frame->planes[0],
                             tga->format.image_width,
                             tga->format.image_height, 24,
                             frame->strides[0]);
      }
    else
      {
      result = tga_write_bgr_rle(tga->filename, frame->planes[0],
                                 tga->format.image_width,
                                 tga->format.image_height, 24,
                                 frame->strides[0]);
      }
    }

  if(result != TGA_NOERR)
    {
    if(errno)
      bg_log(BG_LOG_ERROR, LOG_DOMAIN, "Cannot save %s: %s",
             tga->filename, strerror(errno));
    else
      bg_log(BG_LOG_ERROR, LOG_DOMAIN, "Cannot save %s: %s",
             tga->filename, tga_error(result));
    ret = 0;
    }

  free(tga->filename);
  tga->filename = NULL;
  
  return ret;
  }
Example #12
0
void * bg_recorder_video_thread(void * data)
  {
  bg_recorder_t * rec = data;
  bg_recorder_video_stream_t * vs = &rec->vs;
  gavl_video_frame_t * monitor_frame = NULL;
  gavl_time_t idle_time = GAVL_TIME_SCALE / 100; // 10 ms
  bg_thread_wait_for_start(vs->th);

  gavl_timer_set(vs->timer, 0);
  gavl_timer_start(vs->timer);
  
  while(1)
    {
    if(!bg_thread_check(vs->th))
      break;

    if(bg_recorder_video_get_eof(vs))
      {
      gavl_time_delay(&idle_time);
      continue;
      }
    
    vs->pipe_frame = NULL;
    
    if(vs->flags & STREAM_MONITOR)
      {
      monitor_frame = gavl_video_sink_get_frame(vs->monitor_sink);
      if(!vs->do_convert_monitor)
        vs->pipe_frame = monitor_frame;
      }

    if(!vs->pipe_frame)
      {
      if(!vs->pipe_frame_priv)
        vs->pipe_frame_priv = gavl_video_frame_create(&vs->pipe_format);
      vs->pipe_frame = vs->pipe_frame_priv;
      }
    if(!vs->in_func(vs->in_data, vs->pipe_frame, vs->in_stream))
      {
      bg_log(BG_LOG_ERROR, LOG_DOMAIN, "Read failed (device unplugged?)");
      bg_recorder_video_set_eof(vs, 1);
      continue; // Need to go to bg_thread_check to stop the thread cleanly
      }
    /* Check whether to make a snapshot */
    check_snapshot(rec);
    
    /* Monitor */
    if(vs->flags & STREAM_MONITOR)
      {
      if(vs->do_convert_monitor)
        {
        if(!monitor_frame)
          {
          if(!vs->monitor_frame_priv)
            vs->monitor_frame_priv =
              gavl_video_frame_create(&vs->monitor_format);
          monitor_frame = vs->monitor_frame_priv;
          }
        gavl_video_convert(vs->monitor_cnv, vs->pipe_frame, monitor_frame);
          
        }
      else if(!monitor_frame)
        monitor_frame = vs->pipe_frame;
      
      gavl_video_sink_put_frame(vs->monitor_sink, monitor_frame);
      }
    if(vs->monitor_plugin && vs->monitor_plugin->handle_events)
      vs->monitor_plugin->handle_events(vs->monitor_handle->priv);

    /* Encoding */
    if(vs->flags & STREAM_ENCODE_OPEN)
      {
      bg_recorder_update_time(rec,
                              gavl_time_unscale(vs->pipe_format.timescale,
                                                vs->pipe_frame->timestamp));
      if(vs->do_convert_enc)
        {
        gavl_video_convert(vs->enc_cnv, vs->pipe_frame, vs->enc_frame);
        bg_encoder_write_video_frame(rec->enc, vs->enc_frame, vs->enc_index);
        }
      else
        bg_encoder_write_video_frame(rec->enc, vs->pipe_frame, vs->enc_index);
      }
    
    /* */
    
    }
  gavl_timer_stop(vs->timer);
  
  return NULL;
  }
Example #13
0
static char * save_image(bg_db_t * db,
                         gavl_video_frame_t * in_frame,
                         gavl_video_format_t * in_format,
                         gavl_video_format_t * out_format,
                         gavl_video_converter_t * cnv,
                         int64_t id, const char * mimetype)
  {
  int result = 0;
  int do_convert;
  gavl_video_frame_t * output_frame = NULL;
  bg_image_writer_plugin_t * output_plugin;
  bg_plugin_handle_t * output_handle = NULL;
  const bg_plugin_info_t * plugin_info;
  iw_t iw;
  bg_iw_callbacks_t cb;

  char * out_file_base = bg_sprintf("gmerlin-db/thumbnails/%016"PRId64, id);

  out_file_base = bg_db_filename_to_abs(db, out_file_base);
  memset(&iw, 0, sizeof(iw));
  memset(&cb, 0, sizeof(cb));
  
  cb.create_output_file = create_file;
  cb.data = &iw;
  
  out_format->pixel_width = 1;
  out_format->pixel_height = 1;
  out_format->interlace_mode = GAVL_INTERLACE_NONE;

  out_format->frame_width = out_format->image_width;
  out_format->frame_height = out_format->image_height;

  plugin_info =
    bg_plugin_find_by_mimetype(db->plugin_reg, mimetype, BG_PLUGIN_IMAGE_WRITER);
  
  if(!plugin_info)
    {
    bg_log(BG_LOG_ERROR, LOG_DOMAIN, "No plugin for %s", mimetype);
    goto end;
    }

  output_handle = bg_plugin_load(db->plugin_reg, plugin_info);

  if(!output_handle)
    {
    bg_log(BG_LOG_ERROR, LOG_DOMAIN, "Loading %s failed", plugin_info->long_name);
    goto end;
    }
  
  output_plugin = (bg_image_writer_plugin_t*)output_handle->plugin;

  output_plugin->set_callbacks(output_handle->priv, &cb);
  
  if(!output_plugin->write_header(output_handle->priv,
                                  out_file_base, out_format, NULL))
    {
    bg_log(BG_LOG_ERROR, LOG_DOMAIN, "Writing image header failed");
    goto end;
    }

  /* Initialize video converter */
  do_convert = gavl_video_converter_init(cnv, in_format, out_format);

  if(do_convert)
    {
    output_frame = gavl_video_frame_create(out_format);
    gavl_video_frame_clear(output_frame, out_format);
    gavl_video_convert(cnv, in_frame, output_frame);
    if(!output_plugin->write_image(output_handle->priv,
                                   output_frame))
      {
      bg_log(BG_LOG_ERROR, LOG_DOMAIN, "Writing image failed");
      goto end;
      }
    }
  else
    {
    if(!output_plugin->write_image(output_handle->priv,
                                   in_frame))
      {
      bg_log(BG_LOG_ERROR, LOG_DOMAIN, "Writing image failed");
      goto end;
      }
    }
  result = 1;
  
  end:

  if(output_frame)
    gavl_video_frame_destroy(output_frame);
  if(output_handle)
    bg_plugin_unref(output_handle);
  if(out_file_base)
    free(out_file_base);
  
  if(result)
    return iw.filename;
  
  if(iw.filename)
    free(iw.filename);
  return NULL;
  
  }
Example #14
0
static Pixmap make_icon(bg_x11_window_t * win,
                        const gavl_video_frame_t * icon,
                        const gavl_video_format_t * format)
  {
  XImage * im;
  gavl_video_format_t out_format;
  gavl_video_converter_t * cnv;
  gavl_video_options_t * opt;
  int do_convert;
  const gavl_video_frame_t * image_frame;
  gavl_video_frame_t * out_frame;
  
  Pixmap ret;
  
  /* Create converter */
  cnv = gavl_video_converter_create();
  opt = gavl_video_converter_get_options(cnv);
  gavl_video_options_set_alpha_mode(opt, GAVL_ALPHA_IGNORE);

  /* Create pixmap */
  ret = XCreatePixmap(win->dpy, win->root, format->image_width,
                      format->image_height, win->depth);

  /* Set up format and converter */
  gavl_video_format_copy(&out_format, format);
  out_format.pixelformat =
    bg_x11_window_get_pixelformat(win->dpy, win->visual, win->depth);
  
  do_convert = gavl_video_converter_init(cnv, format, &out_format);
  if(do_convert)
    {
    out_frame = gavl_video_frame_create(&out_format);
    image_frame = out_frame;
    gavl_video_convert(cnv, icon, out_frame);
    }
  else
    {
    image_frame = icon;
    out_frame = NULL;
    }
  
  /* Make image */
  
  im = XCreateImage(win->dpy, win->visual, win->depth,
                    ZPixmap,
                    0, (char*)(image_frame->planes[0]),
                    format->image_width,
                    format->image_height,
                    32,
                    image_frame->strides[0]);
  
  XPutImage(win->dpy,            /* dpy        */
            ret, /* d          */
            win->gc,             /* gc         */
            im, /* image      */
            0,    /* src_x      */
            0,    /* src_y      */
            0,          /* dst_x      */
            0,          /* dst_y      */
            format->image_width,    /* src_width  */
            format->image_height);  /* src_height */
  
  /* Cleanup */
  gavl_video_converter_destroy(cnv);
  if(out_frame)
    gavl_video_frame_destroy(out_frame);
  
  im->data = NULL;
  XDestroyImage(im);
  
  /* Return */
  return ret;
  
  }
Example #15
0
int bg_recorder_video_init(bg_recorder_t * rec)
  {
  bg_recorder_video_stream_t * vs = &rec->vs;

  vs->frame_counter = 0;
  vs->fps_frame_time = 0;
  vs->fps_frame_counter = 0;

  /* Open input */
  if(!vs->input_plugin->open(vs->input_handle->priv, NULL,
                             &vs->input_format, &vs->m))
    return 0;
  bg_metadata_date_now(&vs->m, GAVL_META_DATE_CREATE);
  
  vs->flags |= STREAM_INPUT_OPEN;
  

  vs->in_func   = read_video_internal;
  vs->in_stream = 0;
  vs->in_data   = rec;
  
  /* Set up filter chain */

  bg_video_filter_chain_connect_input(vs->fc,
                                      vs->in_func,
                                      vs->in_data,
                                      vs->in_stream);
  
  vs->in_func = bg_video_filter_chain_read;
  vs->in_data = vs->fc;
  vs->in_stream = 0;
  
  bg_video_filter_chain_init(vs->fc, &vs->input_format, &vs->pipe_format);
  
  /* Set up monitoring */

  if(vs->flags & STREAM_MONITOR)
    {
    gavl_video_format_copy(&vs->monitor_format, &vs->pipe_format);
    if(!vs->monitor_plugin->open(vs->monitor_handle->priv,
                                 &vs->monitor_format, 1))
      {
      bg_log(BG_LOG_ERROR, LOG_DOMAIN,
             "Opening monitor plugin failed");
      return 0;
      }
    vs->monitor_sink =
      vs->monitor_plugin->get_sink(vs->monitor_handle->priv);
    
    vs->do_convert_monitor =
      gavl_video_converter_init(vs->monitor_cnv,
                                &vs->pipe_format,
                                &vs->monitor_format);
    vs->flags |= STREAM_MONITOR_OPEN;

    if(vs->monitor_plugin->show_window && !rec->display_string)
      {
      vs->monitor_plugin->show_window(vs->monitor_handle->priv, 1);
      if(vs->monitor_plugin->set_window_title)
        vs->monitor_plugin->set_window_title(vs->monitor_handle->priv, "Gmerlin recorder "VERSION);
      }
    }
  else
    vs->do_convert_monitor = 0;
  
  /* Set up encoding */

  if(vs->flags & STREAM_ENCODE)
    {
    vs->enc_index =
      bg_encoder_add_video_stream(rec->enc, &vs->m, &vs->pipe_format, 0, NULL);
    }
  
  /* Create frames */

#if 0
  if(vs->flags & STREAM_MONITOR)
    {
    if(vs->monitor_plugin->create_frame)
      vs->monitor_frame = vs->monitor_plugin->create_frame(vs->monitor_handle->priv);
    else
      vs->monitor_frame = gavl_video_frame_create(&vs->monitor_format);
    }
#endif
  
  /* Initialize snapshot counter */
  
  
  return 1;
  }
Example #16
0
static gavl_source_status_t
read_func(void * priv, gavl_video_frame_t ** frame)
  {
  gavl_source_status_t st;

  swapfields_priv_t * vp;
  int64_t pts;
  gavl_video_frame_t * swp;
  
  vp = priv;

  /* Do nothing */
  if(vp->noop)
    return gavl_video_source_read_frame(vp->in_src, frame);
  
  if(!vp->fields[0])
    vp->fields[0] = gavl_video_frame_create(&vp->field_format[0]);
  if(!vp->fields[1])
    vp->fields[1] = gavl_video_frame_create(&vp->field_format[1]);
  
  if(vp->init)
    {
    if((st = gavl_video_source_read_frame(vp->in_src, frame)) !=
       GAVL_SOURCE_OK)
      return st;
    
    vp->last_field = vp->fields[0];
    vp->next_field = vp->fields[1];
    
    /* Save field for later use */
    gavl_video_frame_get_field(vp->format.pixelformat,
                               *frame,
                               vp->cpy_field, vp->delay_field);
    
    gavl_video_frame_copy(&vp->field_format[vp->delay_field],
                          vp->last_field, vp->cpy_field);
    vp->init = 0;
    vp->next_pts = (*frame)->timestamp * vp->framerate_mult +
      ((*frame)->duration * vp->framerate_mult) / 2;
    }

  if((st = gavl_video_source_read_frame(vp->in_src, frame)) !=
     GAVL_SOURCE_OK)
    return st;
  
  gavl_video_frame_get_field(vp->format.pixelformat,
                             *frame,
                             vp->cpy_field, vp->delay_field);

  /* Save field for later use */
  gavl_video_frame_copy(&vp->field_format[vp->delay_field],
                        vp->next_field, vp->cpy_field);
  
  /* Copy field from last frame */
  gavl_video_frame_copy(&vp->field_format[vp->delay_field],
                        vp->cpy_field, vp->last_field);

  /* Swap pointers */
  swp = vp->next_field;
  vp->next_field = vp->last_field;
  vp->last_field = swp;
  
  /* Adjust pts */
  pts = (*frame)->timestamp;
  (*frame)->timestamp = vp->next_pts;

  vp->next_pts = pts * vp->framerate_mult +
    ((*frame)->duration * vp->framerate_mult) / 2;

  (*frame)->duration *= vp->framerate_mult;

  //  fprintf(stderr, "PTS: %ld duration: %ld\n",
  //          frame->timestamp, frame->duration);
  
  return GAVL_SOURCE_OK;
  }
Example #17
0
static void check_snapshot(bg_recorder_t * rec)
  {
  int doit = 0;
  char * filename;
  gavl_time_t frame_time;
  
  bg_recorder_video_stream_t * vs = &rec->vs;

  frame_time =
    gavl_time_unscale(vs->pipe_format.timescale,
                      vs->pipe_frame->timestamp);
  
  /* Check whether to make a snapshot */

  pthread_mutex_lock(&rec->snapshot_mutex);
  if(rec->snapshot)
    {
    doit = 1;
    rec->snapshot = 0;
    }
  
  if(!doit &&
     ((vs->flags & STREAM_SNAPSHOT_AUTO) &&
      (!(vs->flags & STREAM_SNAPSHOT_INIT) ||
       frame_time >= vs->last_snapshot_time + vs->snapshot_interval)))
    {
    doit = 1;
    }
  pthread_mutex_unlock(&rec->snapshot_mutex);
  
  if(!doit)
    return;
  
  filename = create_snapshot_filename(rec, NULL);
  
  /* Initialize snapshot plugin */
  if(!(vs->flags & STREAM_SNAPSHOT_INIT))
    gavl_video_format_copy(&vs->snapshot_format,
                           &vs->pipe_format);

  if(!vs->snapshot_plugin->write_header(vs->snapshot_handle->priv,
                                        filename,
                                        &vs->snapshot_format,
                                        &rec->m))
    return;
  
  if(!(vs->flags & STREAM_SNAPSHOT_INIT))
    {
    vs->do_convert_snapshot =
      gavl_video_converter_init(vs->snapshot_cnv,
                                &vs->pipe_format,
                                &vs->snapshot_format);

    if(vs->do_convert_snapshot)
      vs->snapshot_frame = gavl_video_frame_create(&vs->snapshot_format);
    vs->flags |= STREAM_SNAPSHOT_INIT;
    }

  if(vs->do_convert_snapshot)
    {
    gavl_video_convert(vs->snapshot_cnv, vs->pipe_frame,
                       vs->snapshot_frame);
    vs->snapshot_plugin->write_image(vs->snapshot_handle->priv,
                                     vs->snapshot_frame);
    }
  else
    {
    vs->snapshot_plugin->write_image(vs->snapshot_handle->priv,
                                     vs->pipe_frame);
    }
  vs->snapshot_counter++;
  vs->last_snapshot_time = frame_time;
  }
Example #18
0
gavl_video_frame_t * read_png(const char * filename,
                              gavl_video_format_t * format,
                              gavl_pixelformat_t pixelformat)
  {
  int i;
  unsigned char ** rows;
  
  gavl_video_converter_t * cnv;
  gavl_video_options_t * opt;
  gavl_video_format_t format_1;
  gavl_video_frame_t * frame, * frame_1;
    
  int bit_depth;
  int color_type;
  int has_alpha = 0;

  png_structp png_ptr;
  png_infop info_ptr;
  png_infop end_info;

  FILE * file;
  
  file = fopen(filename, "rb");

  if(!file)
    {
    fprintf(stderr, "Cannot open file %s\n", filename);
    return NULL;
    }
  
  png_ptr = png_create_read_struct
    (PNG_LIBPNG_VER_STRING, NULL,
     NULL, NULL);
  
  setjmp(png_jmpbuf(png_ptr));
  info_ptr = png_create_info_struct(png_ptr);


  end_info = png_create_info_struct(png_ptr);

  png_init_io(png_ptr, file);

  png_read_info(png_ptr, info_ptr);

  format->frame_width  = png_get_image_width(png_ptr, info_ptr);
  format->frame_height = png_get_image_height(png_ptr, info_ptr);

  format->image_width  = format->frame_width;
  format->image_height = format->frame_height;
  format->pixel_width = 1;
  format->pixel_height = 1;

  bit_depth  = png_get_bit_depth(png_ptr,  info_ptr);
  color_type = png_get_color_type(png_ptr, info_ptr);
  switch(color_type)
    {
    case PNG_COLOR_TYPE_GRAY:       /*  (bit depths 1, 2, 4, 8, 16) */
      if(bit_depth < 8)
#if GAVL_MAKE_BUILD(PNG_LIBPNG_VER_MAJOR, PNG_LIBPNG_VER_MINOR, PNG_LIBPNG_VER_RELEASE) < GAVL_MAKE_BUILD(1,2,9)
        png_set_gray_1_2_4_to_8(png_ptr);
#else
      png_set_expand_gray_1_2_4_to_8(png_ptr);
#endif
      if (png_get_valid(png_ptr, info_ptr, PNG_INFO_tRNS))
        {
        png_set_tRNS_to_alpha(png_ptr);
        has_alpha = 1;
        }
      png_set_gray_to_rgb(png_ptr);
      break;
    case PNG_COLOR_TYPE_GRAY_ALPHA: /*  (bit depths 8, 16) */
      if(bit_depth == 16)
        png_set_strip_16(png_ptr);
      png_set_gray_to_rgb(png_ptr);
      break;
    case PNG_COLOR_TYPE_PALETTE:    /*  (bit depths 1, 2, 4, 8) */
      png_set_palette_to_rgb(png_ptr);
      if (png_get_valid(png_ptr, info_ptr, PNG_INFO_tRNS))
        {
        png_set_tRNS_to_alpha(png_ptr);
        has_alpha = 1;
        }
      break;
    case PNG_COLOR_TYPE_RGB:        /*  (bit_depths 8, 16) */
      if(png_get_valid(png_ptr, info_ptr, PNG_INFO_tRNS))
        {
        png_set_tRNS_to_alpha(png_ptr);
        has_alpha = 1;
        }
      if(bit_depth == 16)
        png_set_strip_16(png_ptr);
      break;
    case PNG_COLOR_TYPE_RGB_ALPHA:  /*  (bit_depths 8, 16) */
      if(bit_depth == 16)
        png_set_strip_16(png_ptr);
      has_alpha = 1;
      break;
    }
  if(has_alpha)
    format->pixelformat = GAVL_RGBA_32;
  else
    format->pixelformat = GAVL_RGB_24;

  frame = gavl_video_frame_create(format);
  rows = malloc(format->frame_height * sizeof(*rows));
  for(i = 0; i < format->frame_height; i++)
    rows[i] = frame->planes[0] + i * frame->strides[0];

  png_read_image(png_ptr, rows);
  png_read_end(png_ptr, end_info);

  png_destroy_read_struct(&png_ptr, &info_ptr,
                          &end_info);
  fclose(file);
  free(rows);
  
  /* Check wether to set up the converter */

  if(format->pixelformat != pixelformat)
    {
    cnv = gavl_video_converter_create();
    opt = gavl_video_converter_get_options(cnv);
    gavl_video_options_set_alpha_mode(opt, GAVL_ALPHA_BLEND_COLOR);    

    gavl_video_format_copy(&format_1, format);
    format_1.pixelformat = pixelformat;
    frame_1 = gavl_video_frame_create(&format_1);
    
    gavl_video_converter_init(cnv, format, &format_1);
    
    gavl_video_convert(cnv, frame, frame_1);
    gavl_video_converter_destroy(cnv);
    format->pixelformat = pixelformat;
    }
  else
    frame_1 = NULL;

  if(frame_1)
    {
    gavl_video_frame_destroy(frame);
    return frame_1;
    }
  else
    return frame;
  }
Example #19
0
int gavl_video_scaler_init(gavl_video_scaler_t * scaler,
                           const gavl_video_format_t * src_format,
                           const gavl_video_format_t * dst_format)
  {
  gavl_rectangle_f_t src_rect;
  gavl_rectangle_i_t  dst_rect;
  gavl_video_options_t opt;

  int field, plane;
 
  int sub_h_out = 1, sub_v_out = 1;
  
  /* Copy options because we want to change them */

  gavl_video_options_copy(&opt, &scaler->opt);

  /* TODO: If the image is smaller than the number of filter taps,
     reduce scaling algorithm */
  
  /* Copy formats */
  
  gavl_video_format_copy(&scaler->src_format, src_format);
  gavl_video_format_copy(&scaler->dst_format, dst_format);
  
  /* Check if we have rectangles */

  if(!opt.have_rectangles)
    {
    gavl_rectangle_f_set_all(&src_rect, &scaler->src_format);
    gavl_rectangle_i_set_all(&dst_rect, &scaler->dst_format);
    gavl_video_options_set_rectangles(&opt, &src_rect, &dst_rect);
    }
  
  /* Check how many fields we must handle */

  if((opt.deinterlace_mode == GAVL_DEINTERLACE_SCALE) &&
     (opt.conversion_flags & GAVL_FORCE_DEINTERLACE))
    {
    /* Deinterlacing mode */
    scaler->src_fields = 2;
    scaler->dst_fields = 1;

    /* Fake formats for scale context */
    if(scaler->src_format.interlace_mode == GAVL_INTERLACE_NONE)
      scaler->src_format.interlace_mode = GAVL_INTERLACE_TOP_FIRST;
    scaler->dst_format.interlace_mode = GAVL_INTERLACE_NONE;
    }
  else if((opt.deinterlace_mode == GAVL_DEINTERLACE_SCALE) &&
          (scaler->dst_format.interlace_mode == GAVL_INTERLACE_NONE) &&
          (scaler->src_format.interlace_mode != GAVL_INTERLACE_NONE))
    {
    /* Deinterlacing mode */
    scaler->src_fields = 2;
    scaler->dst_fields = 1;
    }
  else if(scaler->src_format.interlace_mode != GAVL_INTERLACE_NONE)
    {
    /* Interlaced scaling */
    scaler->src_fields = 2;
    scaler->dst_fields = 2;
    }
  else
    {
    /* Progressive scaling */
    scaler->src_fields = 1;
    scaler->dst_fields = 1;
    }
  
  /* Copy destination rectangle so we know, which subframe to take */
  gavl_rectangle_i_copy(&scaler->dst_rect, &opt.dst_rect);
  
#if 0
  fprintf(stderr, "gavl_video_scaler_init:\n");
  gavl_rectangle_f_dump(&scaler->opt.src_rect);
  fprintf(stderr, "\n");
  gavl_rectangle_i_dump(&scaler->dst_rect);
  fprintf(stderr, "\n");
#endif                      
  
  /* Crop source and destination rectangles to the formats */

  
  
  /* Align the destination rectangle to the output formtat */

  gavl_pixelformat_chroma_sub(scaler->dst_format.pixelformat, &sub_h_out, &sub_v_out);
  gavl_rectangle_i_align(&opt.dst_rect, sub_h_out, sub_v_out);
  
#if 0
  fprintf(stderr, "Initializing scaler:\n");
  fprintf(stderr, "Src format:\n");
  gavl_video_format_dump(&scaler->src_format);
  fprintf(stderr, "Dst format:\n");
  gavl_video_format_dump(&scaler->dst_format);

  fprintf(stderr, "Src rectangle:\n");
  gavl_rectangle_f_dump(&opt.src_rect);
  fprintf(stderr, "\nDst rectangle:\n");
  gavl_rectangle_i_dump(&scaler->dst_rect);
  fprintf(stderr, "\n");
#endif
  
  /* Check how many planes we have */
  
  if((scaler->src_format.pixelformat == GAVL_YUY2) ||
     (scaler->src_format.pixelformat == GAVL_UYVY))
    scaler->num_planes = 3;
  else
    scaler->num_planes = gavl_pixelformat_num_planes(scaler->src_format.pixelformat);
  
  if((scaler->src_fields == 2) && (!scaler->src_field))
    scaler->src_field = gavl_video_frame_create(NULL);
  
  if((scaler->dst_fields == 2) && (!scaler->dst_field))
    scaler->dst_field = gavl_video_frame_create(NULL);
  
  
#if 0
  fprintf(stderr, "src_fields: %d, dst_fields: %d, planes: %d\n",
          scaler->src_fields, scaler->dst_fields, scaler->num_planes);
#endif    

  /* Handle automatic mode selection */

  if(opt.scale_mode == GAVL_SCALE_AUTO)
    {
    if(opt.quality < 2)
      opt.scale_mode = GAVL_SCALE_NEAREST;
    else if(opt.quality <= 3)
      opt.scale_mode = GAVL_SCALE_BILINEAR;
    else
      opt.scale_mode = GAVL_SCALE_CUBIC_BSPLINE;
    }
  
  
  /* Now, initialize all fields and planes */

  if(scaler->src_fields > scaler->dst_fields)
    {
    /* Deinterlace mode */
    field = (scaler->opt.deinterlace_drop_mode == GAVL_DEINTERLACE_DROP_BOTTOM) ? 0 : 1;
    for(plane = 0; plane < scaler->num_planes; plane++)
      {
      if(!gavl_video_scale_context_init(&scaler->contexts[field][plane],
                                    &opt,
                                    plane, &scaler->src_format, &scaler->dst_format, field, 0,
                                    scaler->src_fields, scaler->dst_fields))
        return 0;
      }
    if(scaler->src_format.interlace_mode == GAVL_INTERLACE_MIXED)
      {
      for(plane = 0; plane < scaler->num_planes; plane++)
        {
        if(!gavl_video_scale_context_init(&scaler->contexts[2][plane],
                                          &opt,
                                          plane, &scaler->src_format, &scaler->dst_format, 0, 0, 1, 1))
          return 0;
        }
      }
    }
  else
    {
    /* src_fields == dst_fields */
    for(field = 0; field < scaler->src_fields; field++)
      {
      for(plane = 0; plane < scaler->num_planes; plane++)
        {
        if(!gavl_video_scale_context_init(&scaler->contexts[field][plane],
                                          &opt,
                                          plane, &scaler->src_format, &scaler->dst_format, field, field,
                                          scaler->src_fields, scaler->dst_fields))
          return 0;
        }
      }

    if(scaler->src_format.interlace_mode == GAVL_INTERLACE_MIXED)
      {
      for(plane = 0; plane < scaler->num_planes; plane++)
        {
        if(!gavl_video_scale_context_init(&scaler->contexts[2][plane],
                                          &opt,
                                          plane, &scaler->src_format, &scaler->dst_format, 0, 0, 1, 1))
          return 0;
        }
      }
    }
  return 1;
  }
Example #20
0
int gavl_video_scaler_init_convolve(gavl_video_scaler_t * scaler,
                                    const gavl_video_format_t * format,
                                    int h_radius, const float * h_coeffs,
                                    int v_radius, const float * v_coeffs)
  {
  gavl_rectangle_f_t src_rect;
  gavl_rectangle_i_t  dst_rect;
  gavl_video_options_t opt;

  int field, plane;
 
  /* Copy options because we want to change them */

  gavl_video_options_copy(&opt, &scaler->opt);
  
  /* Copy formats */
  
  gavl_video_format_copy(&scaler->src_format, format);
  gavl_video_format_copy(&scaler->dst_format, format);
  
  gavl_rectangle_f_set_all(&src_rect, &scaler->src_format);
  gavl_rectangle_i_set_all(&dst_rect, &scaler->dst_format);
  gavl_video_options_set_rectangles(&opt, &src_rect, &dst_rect);
    
  /* Check how many fields we must handle */

  if(format->interlace_mode != GAVL_INTERLACE_NONE)
    {
    scaler->src_fields = 2;
    scaler->dst_fields = 2;
    }
  else
    {
    scaler->src_fields = 1;
    scaler->dst_fields = 1;
    }
  
  /* Copy destination rectangle so we know, which subframe to take */
  gavl_rectangle_i_copy(&scaler->dst_rect, &opt.dst_rect);
  
  /* Check how many planes we have */
  
  if((scaler->src_format.pixelformat == GAVL_YUY2) ||
     (scaler->src_format.pixelformat == GAVL_UYVY))
    scaler->num_planes = 3;
  else
    scaler->num_planes = 
      gavl_pixelformat_num_planes(scaler->src_format.pixelformat);
  
  if((scaler->src_fields == 2) && (!scaler->src_field))
    scaler->src_field = gavl_video_frame_create(NULL);
  
  if((scaler->dst_fields == 2) && (!scaler->dst_field))
    scaler->dst_field = gavl_video_frame_create(NULL);
  
  /* Now, initialize all fields and planes */
  
  for(field = 0; field < scaler->src_fields; field++)
    {
    for(plane = 0; plane < scaler->num_planes; plane++)
      {
      gavl_video_scale_context_init_convolve(&scaler->contexts[field][plane],
                                             &opt,
                                             plane, format, 
                                             scaler->src_fields,
                                             h_radius, h_coeffs,
                                             v_radius, v_coeffs);
      }
    
    if(scaler->src_format.interlace_mode == GAVL_INTERLACE_MIXED)
      {
      for(plane = 0; plane < scaler->num_planes; plane++)
        {
        gavl_video_scale_context_init_convolve(&scaler->contexts[2][plane],
                                               &opt,
                                               plane, format, 
                                               1,
                                               h_radius, h_coeffs,
                                               v_radius, v_coeffs);
        }
      }
    
    }
  return 1;
  }
Example #21
0
bool ReadMedia::initFormat() {

	const gavl_audio_format_t * open_audio_format;
	const gavl_video_format_t * open_video_format;

	// we use the m_vfifosize to see if the user app wants video or not
	// then, we set m_video_stream_count to 0 if he doesn't want video
	if (m_video_stream_count > 0 && m_vfifosize > 0) {
		open_video_format = bgav_get_video_format(m_file, 0);

		if (open_video_format->pixelformat == GAVL_PIXELFORMAT_NONE) {
			printf("!!!sorry, pixelformat is not recognized.\n");
			return false;
		}

		// let's check to see if the formats are the same, if they are the same
		// there is no reason to recreate the fifo or frames
		if ( gavl_video_formats_equal( &m_video_format, open_video_format) == 0 ) { 	
			// the formats are different
			gavl_video_format_copy (&m_video_format, open_video_format);
			if (m_video_frame != NULL)
				gavl_video_frame_destroy(m_video_frame);
			m_video_frame = gavl_video_frame_create(&m_video_format);
			gavl_video_frame_clear( m_video_frame, &m_video_format);
			if (m_fifovideo != NULL)
				delete m_fifovideo;
			m_fifovideo=  new FifoVideoFrames( m_vfifosize ,  &m_video_format); 
		}
	} else {
		m_video_stream_count = 0;
		m_veof = true;
	}

	// we use the m_afifosize to see if the user app wants audio or not
	// then, we set m_audio_stream_count to 0 if he doesn't want audio
	if (m_audio_stream_count > 0 && m_afifosize > 0) {  
		open_audio_format = bgav_get_audio_format(m_file, 0);    
	
		// we can get audio formats that are unkown
		if ( open_audio_format->sample_format == GAVL_SAMPLE_NONE) {
			printf("sorry, this file has unsupported audio.\n"); 
			return false;	
		}

		if ( gavl_audio_formats_equal(&m_audio_format, open_audio_format) == 0 ) { 	
			// audio formats are different
			// save the old spf
			int spf = m_audio_format.samples_per_frame; 
			gavl_audio_format_copy(&m_audio_format, open_audio_format);

			if (m_audio_frame != NULL) {
				gavl_audio_frame_destroy(m_audio_frame);
			}

			// set it back to original
			m_audio_format.samples_per_frame = spf ;

			m_audio_frame = gavl_audio_frame_create(&m_audio_format);
	
			gavl_audio_frame_mute( m_audio_frame, &m_audio_format);
			if( m_fifoaudio != NULL )
				delete m_fifoaudio;
			m_fifoaudio = new FifoAudioFrames( m_afifosize , &m_audio_format); 
		}
	} else {
		// user doesn't want audio
		m_audio_stream_count = 0;
		m_aeof=true;
	}


	m_length_in_gavltime = bgav_get_duration ( m_file, 0);;
	m_length_in_seconds = gavl_time_to_seconds(  m_length_in_gavltime );
	m_num_samples = 0;
	m_num_frames = 0;

	if (m_audio_stream_count) {
		if ( bgav_can_seek_sample(m_file) == 1 ) {
			m_num_samples=	bgav_audio_duration ( m_file, 0) ;
	 } else { 
			m_num_samples=	gavl_time_to_samples( m_audio_format.samplerate ,  bgav_get_duration ( m_file, 0) );
		}
	}

	// set frames   WE NEED TO take care here for non-constant frame-rates
	if(m_video_stream_count) {
		if ( bgav_can_seek_sample(m_file) == 1  && m_video_format.framerate_mode == GAVL_FRAMERATE_CONSTANT) { 
			m_num_frames =	bgav_video_duration ( m_file, 0)/ m_video_format.frame_duration;
		} else if ( bgav_can_seek_sample(m_file) == 1  && m_video_format.framerate_mode == GAVL_FRAMERATE_VARIABLE ) {
			// FIXME what to do with non constant frame rates?
			m_num_frames=0;
		} else { 
			m_num_frames =	gavl_time_to_frames( m_video_format.timescale, m_video_format.frame_duration ,  bgav_get_duration ( m_file, 0) );
		}
	}

  //	printf("m_num_frames =%lld, duration = %lld , vid_duration=%lld\n", 
	//		m_num_frames, bgav_get_duration ( m_file, 0),  bgav_video_duration ( m_file, 0) );
	// set seconds
	if ( bgav_can_seek_sample(m_file) == 1) {
		gavl_time_t atime=0,vtime=0;
		if ( m_audio_stream_count ) 
			atime =  gavl_samples_to_time( m_audio_format.samplerate, m_num_samples );
		if (m_video_stream_count &&  m_video_format.frame_duration > 0) {
			vtime =  gavl_frames_to_time( m_video_format.timescale, m_video_format.frame_duration, m_num_frames );
		} else if ( m_video_stream_count  ) { // non constant framerate			
			vtime = bgav_video_duration( m_file, 0);
		}
		// else rely on audio time
		m_length_in_gavltime = atime > vtime ? atime :vtime;
		m_length_in_seconds = gavl_time_to_seconds( m_length_in_gavltime );
		//printf("atime=%ld,  vtime=%ld, l_in_sec=%f\n", atime, vtime, m_length_in_seconds);
	} 

	m_pcm_seek = SEEK_NOTHING;
	m_frame_seek = SEEK_NOTHING;

	return true;
}
Example #22
0
int main(int argc, char ** argv)
  {
    
  gavl_video_format_t format_1;
  gavl_video_format_t format_2;

  gavl_video_frame_t * frame_1;
  gavl_video_frame_t * frame_2;
  gavl_video_frame_t * frame_3;
  
  bg_cfg_registry_t * cfg_reg;
  bg_cfg_section_t * cfg_section;
  bg_plugin_registry_t * plugin_reg;
  char * tmp_path;

  gavl_metadata_t m;
  
  memset(&format_1, 0, sizeof(format_1));
  memset(&format_2, 0, sizeof(format_2));
  memset(&m, 0, sizeof(m));
  
  if(argc < 4)
    {
    fprintf(stderr, "Usage: %s <image1> <image2> <output>\n", argv[0]);
    return -1;
    }
  
  /* Create registries */
  
  cfg_reg = bg_cfg_registry_create();
  tmp_path =  bg_search_file_read("generic", "config.xml");
  bg_cfg_registry_load(cfg_reg, tmp_path);
  if(tmp_path)
    free(tmp_path);

  cfg_section = bg_cfg_registry_find_section(cfg_reg, "plugins");
  plugin_reg = bg_plugin_registry_create(cfg_section);

  frame_1 =
    bg_plugin_registry_load_image(plugin_reg, argv[1], &format_1, NULL);  
  if(!frame_1)
    {
    fprintf(stderr, "Cannot open %s\n", argv[1]);
    return -1;
    }
  
  frame_2 =
    bg_plugin_registry_load_image(plugin_reg, argv[2], &format_2, NULL);
  if(!frame_2)
    {
    fprintf(stderr, "Cannot open %s\n", argv[2]);
    return -1;
    }

  if((format_1.image_width != format_2.image_width) ||
     (format_1.image_height != format_2.image_height) ||
     (format_1.pixelformat != format_2.pixelformat))
    {
    fprintf(stderr, "Format mismatch\n");
    return -1;
    }
  
  fprintf(stderr, "Format:\n\n");
  gavl_video_format_dump(&format_1);

  frame_3 = gavl_video_frame_create(&format_1);
  
  gavl_video_frame_absdiff(frame_3,
                           frame_1,
                           frame_2,
                           &format_1);

  bg_plugin_registry_save_image(plugin_reg,
                                argv[3],
                                frame_3,
                                &format_1, &m);

  return 0;
  }
Example #23
0
void write_png(char * filename, gavl_video_format_t * format, gavl_video_frame_t * frame)
  {
  int i;
  unsigned char ** rows;
  gavl_video_options_t * opt;
  int color_type;
  FILE * output;

  png_structp png_ptr;
  png_infop   info_ptr;
  
  gavl_video_converter_t * cnv;
    
  gavl_video_format_t format_1;
  gavl_video_frame_t * frame_1 = NULL;

  
  if((format->pixelformat != GAVL_RGB_24) && (format->pixelformat != GAVL_RGBA_32))
    {
    cnv = gavl_video_converter_create();
    
    gavl_video_format_copy(&format_1, format);

    if(gavl_pixelformat_has_alpha(format->pixelformat))
      {
      format_1.pixelformat = GAVL_RGBA_32;
      color_type = PNG_COLOR_TYPE_RGBA;
      }
    else
      {
      format_1.pixelformat = GAVL_RGB_24;
      color_type = PNG_COLOR_TYPE_RGB;
      }
    frame_1 = gavl_video_frame_create(&format_1);

    opt = gavl_video_converter_get_options(cnv);
    gavl_video_options_set_alpha_mode(opt, GAVL_ALPHA_BLEND_COLOR);    
    gavl_video_converter_init(cnv, format, &format_1);
    
    gavl_video_convert(cnv, frame, frame_1);
    gavl_video_converter_destroy(cnv);
    }
  else if(format->pixelformat == GAVL_RGB_24)
    {
    color_type = PNG_COLOR_TYPE_RGB;
    }
  else
    {
    color_type = PNG_COLOR_TYPE_RGBA;
    }
  
  output = fopen(filename, "wb");
  if(!output)
    return;

  png_ptr = png_create_write_struct(PNG_LIBPNG_VER_STRING, NULL,
                                         NULL, NULL);

  info_ptr = png_create_info_struct(png_ptr);
  setjmp(png_jmpbuf(png_ptr));
  png_init_io(png_ptr, output);
  
  png_set_IHDR(png_ptr, info_ptr,
               format->image_width,
               format->image_height,
               8, color_type, PNG_INTERLACE_NONE,
               PNG_COMPRESSION_TYPE_DEFAULT, PNG_FILTER_TYPE_DEFAULT);

  rows = malloc(format->image_height * sizeof(*rows));

  if(frame_1)
    {
    for(i = 0; i < format->image_height; i++)
      rows[i] = frame_1->planes[0] + i * frame_1->strides[0];
    }
  else
    {
    for(i = 0; i < format->image_height; i++)
      rows[i] = frame->planes[0] + i * frame->strides[0];
    }
  
  png_set_rows(png_ptr, info_ptr, rows);
  png_write_png(png_ptr, info_ptr, PNG_TRANSFORM_IDENTITY, NULL);

  png_destroy_write_struct(&png_ptr, &info_ptr);
  fclose(output);
  free(rows);
  if(frame_1)
    gavl_video_frame_destroy(frame_1);
  }
Example #24
0
int main(int argc, char ** argv)
  {
  int i, j;
  bg_cfg_registry_t * cfg_reg;
  bg_cfg_section_t * cfg_section;
  bg_plugin_registry_t * plugin_reg;

  char * tmp_string;
  
  gavl_video_frame_t * in_frame;
  gavl_video_frame_t * tmp_frame = NULL;
  gavl_video_frame_t * out_frame;
  gavl_video_frame_t * f;
  
  gavl_video_format_t in_format;
  gavl_video_format_t tmp_format;
  gavl_video_format_t out_format;
  int do_convert;

  int num_formats;
    
  gavl_video_converter_t * cnv;
  gavl_video_options_t   * opt;
  
  if(argc != 2)
    {
    fprintf(stderr, "Usage: %s <image>\n", argv[0]);
    return -1;
    }
  
  /* Create registries */
  
  cfg_reg = bg_cfg_registry_create();
  tmp_string =  bg_search_file_read("generic", "config.xml");
  bg_cfg_registry_load(cfg_reg, tmp_string);
  if(tmp_string)
    free(tmp_string);

  cfg_section = bg_cfg_registry_find_section(cfg_reg, "plugins");
  plugin_reg = bg_plugin_registry_create(cfg_section);
  
  /* Load input image */
  in_frame = bg_plugin_registry_load_image(plugin_reg,
                                           argv[1],
                                           &in_format, NULL);
  
  if(!in_frame)
    {
    fprintf(stderr, "Couldn't load %s\n", argv[1]);
    return -1;
    }

  gavl_video_format_copy(&tmp_format, &in_format);
  
  /* Create converter */
  cnv = gavl_video_converter_create();

  opt = gavl_video_converter_get_options(cnv);
  gavl_video_options_set_alpha_mode(opt, GAVL_ALPHA_BLEND_COLOR);
  
  num_formats = gavl_num_pixelformats();

  for(i = 0; i < num_formats; i++)
    {
    tmp_format.pixelformat = gavl_get_pixelformat(i);

    do_convert = gavl_video_converter_init(cnv, &in_format, &tmp_format);

    if(do_convert)
      {
      tmp_frame = gavl_video_frame_create(&tmp_format);
      gavl_video_convert(cnv, in_frame, tmp_frame);
      f = tmp_frame;
      }
    else
      f = in_frame;
    
    for(j = 0; j < num_channels; j++)
      {
      /* Check if channel is available */
      if(!gavl_get_color_channel_format(&tmp_format,
                                        &out_format,
                                        channels[j].ch))
        continue;
      
      out_frame = gavl_video_frame_create(&out_format);

      if(!gavl_video_frame_extract_channel(&tmp_format,
                                           channels[j].ch,
                                           f,
                                           out_frame))
        {
        fprintf(stderr, "Huh? Extracting %s from %s failed\n",
                channels[j].name,
                gavl_pixelformat_to_string(tmp_format.pixelformat));
        return -1;
        }
      tmp_string =
        bg_sprintf("%s_%s.gavi",
                   gavl_pixelformat_to_string(tmp_format.pixelformat),
                   channels[j].name);

      bg_plugin_registry_save_image(plugin_reg,
                                    tmp_string,
                                    out_frame, &out_format,
                                    NULL);

      fprintf(stderr, "Wrote %s\n", tmp_string);
      
      free(tmp_string);
      gavl_video_frame_destroy(out_frame);
      
      }
    if(tmp_frame)
      {
      gavl_video_frame_destroy(tmp_frame);
      tmp_frame = NULL;
      }
    
    }

  gavl_video_frame_destroy(in_frame);

  gavl_video_converter_destroy(cnv);
  bg_plugin_registry_destroy(plugin_reg);
  bg_cfg_registry_destroy(cfg_reg);
  
  
  return 0;
  
  }