コード例 #1
0
C_RESULT vlib_stage_encoding_open(vlib_stage_encoding_config_t *cfg)
{
  video_codec_open( &cfg->controller, UVLC_CODEC );
  video_controller_set_mode( &cfg->controller, VIDEO_ENCODE );
  video_controller_set_format( &cfg->controller, cfg->width, cfg->height );
  video_controller_set_motion_estimation( &cfg->controller, FALSE );

  return C_OK;
}
コード例 #2
0
C_RESULT p264_read_picture_layer( video_controller_t* controller, video_stream_t* stream )
{
  uint32_t width, height;

  p264_codec_t* p264_codec = (p264_codec_t*) controller->video_codec;
  p264_picture_layer_t* picture_layer = &p264_codec->picture_layer;

  picture_layer->format       = 0;
  picture_layer->resolution   = 0;
  picture_layer->picture_type = 0;
  picture_layer->quant        = 0;

  video_read_data( stream, &picture_layer->format, 2 );
  video_read_data( stream, &picture_layer->resolution, 3 );
  video_read_data( stream, &picture_layer->picture_type, 3 );
  video_read_data( stream, &picture_layer->quant, 6 );
  video_read_data( stream, &controller->num_frames, 32 );

  switch( picture_layer->format )
  {
    case UVLC_FORMAT_CIF:
      width   = QQCIF_WIDTH << (picture_layer->resolution-1);
      height  = QQCIF_HEIGHT << (picture_layer->resolution-1);
      break;

    case UVLC_FORMAT_VGA:
      width   = QQVGA_WIDTH << (picture_layer->resolution-1);
      height  = QQVGA_HEIGHT << (picture_layer->resolution-1);
      break;

    default:
      width   = 0;
      height  = 0;
      break;
  }

  video_controller_set_format( controller, width, height );

  return C_OK;
}
コード例 #3
0
ファイル: libp264.c プロジェクト: rc500/drone_demo
p264_state_t* p264_open(void)
{
  p264_state_t* s = (p264_state_t*) calloc(1, sizeof(p264_state_t));
  s->controller = (video_controller_t*) calloc(1, sizeof(video_controller_t));
  s->picture = (vp_api_picture_t*) calloc(1, sizeof(vp_api_picture_t));

  s->picturebuf = (char*) calloc(1, WIDTH*HEIGHT*2);

  s->picture->format = PIX_FMT_RGB565;
  s->picture->framerate = 15;
  s->picture->width = WIDTH;
  s->picture->height = HEIGHT;
  s->picture->y_buf = (uint8_t*)s->picturebuf;
  s->picture->y_line_size = s->picture->width * 2;
  s->picture->cr_buf = calloc(1, WIDTH*HEIGHT*2);
  s->picture->cr_line_size = 0;
  s->picture->cb_buf = calloc(1, WIDTH*HEIGHT*2);
  s->picture->cb_line_size = 0;

  stream.bytes  = (uint32_t*)malloc(FRAME_MODE_BUFFER_SIZE*sizeof(uint32_t));
  stream.index  = 0;
  stream.used   = 0;
  stream.size   = FRAME_MODE_BUFFER_SIZE*sizeof(uint32_t);

  if(video_codec_open(s->controller, P264_CODEC))
  {
    printf("FAILED!\n");
    return NULL;
  }

  s->picture->vision_complete = 0;
  s->picture->complete = 0;
  s->picture->blockline = 0;

  video_controller_set_motion_estimation( s->controller, FALSE );
  video_controller_set_format( s->controller, WIDTH, HEIGHT );

  return s;
}
コード例 #4
0
ファイル: video_codec.c プロジェクト: evenator/senior-project
C_RESULT video_encode_picture( video_controller_t* controller, const vp_api_picture_t* picture, bool_t* got_image )
{
  vp_api_picture_t blockline = { 0 };

  controller->mode  = VIDEO_ENCODE;

  video_controller_set_format( controller, picture->width, picture->height );

  blockline                   = *picture;
  blockline.height            = MB_HEIGHT_Y;
  blockline.complete          = 1;
  blockline.vision_complete   = 0;

  // Reset internal stream for new blockline/picture
  controller->in_stream.used  = 0;
  controller->in_stream.index = 0;

  while( !controller->picture_complete )
  {
    video_encode_blockline( controller, &blockline, blockline.blockline == (controller->num_blockline-1) );

    blockline.y_buf  += MB_HEIGHT_Y * picture->y_line_size;
    blockline.cb_buf += MB_HEIGHT_C * picture->cb_line_size;
    blockline.cr_buf += MB_HEIGHT_C * picture->cr_line_size;

    blockline.blockline++;
  }

  if( picture->complete )
  {
    video_write_data( &controller->in_stream, 0, controller->in_stream.length+1 );
    controller->in_stream.length = 32;
    controller->picture_complete = 0;
    *got_image = TRUE;
  }

  return C_OK;
}
コード例 #5
0
ファイル: stream.cpp プロジェクト: homero304/proyectoVision
void stream_run(void)
{
    C_RESULT status;

    if( stream_thread )
        return;

    memset(&controller, 0, sizeof(controller));
    memset(&picture, 0, sizeof(picture));
    memset(picture_buf, 0, VIDEO_BUFFER_SIZE);
    pictureBpp			  = 2;

    /// Picture configuration
    picture.format	      = PIX_FMT_RGB565;
    picture.width         = H_ACQ_WIDTH;
    picture.height        = H_ACQ_HEIGHT;
    picture.framerate     = 15;
    picture.y_line_size   = picture.width * pictureBpp;
    picture.cb_line_size  = 0;
    picture.cr_line_size  = 0;
    picture.y_buf         = (uint8_t *)picture_buf;
    picture.cb_buf	      = NULL;
    picture.cr_buf	      = NULL;

    status = video_codec_open( &controller, UVLC_CODEC );
    if (status) {
        INFO("video_codec_open() failed\n");
    }
    video_controller_set_motion_estimation(&controller, FALSE);
    video_controller_set_format( &controller, H_ACQ_WIDTH, H_ACQ_HEIGHT );

    if( pthread_create(&stream_thread, NULL, stream_loop, NULL) )
    {
        video_codec_close(&controller);
        INFO("pthread_create: %s\n", strerror(errno));
    }
}
コード例 #6
0
C_RESULT p263_read_picture_layer( video_controller_t* controller, video_stream_t* stream )
{
  uint32_t pei = 0;
  p263_codec_t* p263_codec = (p263_codec_t*) controller->video_codec;
  p263_picture_layer_t* picture_layer = &p263_codec->picture_layer;

  p263_codec->mb_types  = &standard_mb_types[0];
  p263_codec->cbpys     = &cbpy_standard[0];

  // Read Temporal Reference (TR) (8 bits)
  picture_layer->tr = 0;
  video_read_data( stream, &picture_layer->tr, 8 );

  // Read Type Information (PTYPE) (Variable Length)
  picture_layer->ptype      = 0;
  picture_layer->plusptype  = 0;
  picture_layer->opptype    = 0;
  video_read_data( stream, &picture_layer->ptype, 8 );

  switch( PICTURE_FORMAT(picture_layer->ptype) )
  {
    case P263_PICTURE_FORMAT_FORBIDDEN:
      break;

    case P263_PICTURE_FORMAT_SUBQCIF:
      video_controller_set_format( controller, 128, 96 );
      goto P263_PICTURE_FORMAT_NOT_EXTENDED;

    case P263_PICTURE_FORMAT_QCIF:
      video_controller_set_format( controller, 176, 144 );
      goto P263_PICTURE_FORMAT_NOT_EXTENDED;

    case P263_PICTURE_FORMAT_CIF:
      video_controller_set_format( controller, 352, 288 );
      goto P263_PICTURE_FORMAT_NOT_EXTENDED;

    case P263_PICTURE_FORMAT_4QCIF:
      video_controller_set_format( controller, 704, 576 );
      goto P263_PICTURE_FORMAT_NOT_EXTENDED;

    case P263_PICTURE_FORMAT_16CIF:
      video_controller_set_format( controller, 1408, 1152 );

    P263_PICTURE_FORMAT_NOT_EXTENDED:
      video_read_data( stream, &picture_layer->ptype, 5 );
      video_controller_set_picture_type( controller, PICTURE_TYPE(picture_layer->ptype) );
      break;

    case P263_PICTURE_FORMAT_RESERVED:
      break;

    case P263_PICTURE_FORMAT_EXTENDED:
      // Read Plus PTYPE (PLUSPTYPE) (Variable Length) -- Optionnal, see PTYPE
      // Read UFEP
      video_read_data( stream, &picture_layer->plusptype, 3 );
      if( picture_layer->plusptype == 1 )
      {
        // Read OPPTYPE
        video_read_data( stream, &picture_layer->opptype, 18 );
      }

      // Read MPPTYPE
      video_read_data( stream, &picture_layer->plusptype, 9 );
      video_controller_set_picture_type( controller, PICTURE_EXTENDED_TYPE(picture_layer->plusptype) );
      break;
  }

  if( picture_layer->plusptype )
  {
    // Read Continuous Presence Multipoint and Video Multiplex (CPM) (1 bit, see Annex C)
    video_read_data( stream, &picture_layer->cpm, 1 );

    if( picture_layer->cpm )
    {
      // Read Picture Sub-Bitstream Indicator (PSBI) (2 bits)
      video_read_data( stream, &picture_layer->psbi, 2 );
    }
  }

  if( picture_layer->opptype ) // eg UFEP == 001b
  {
    if( HAS_CUSTOM_PICTURE_FORMAT( picture_layer->opptype ) )
    {
      // Read Custom Picture Format (CPFMT) (23 bits)
      video_read_data( stream, &picture_layer->cpfmt, 23 );

      if( (picture_layer->cpfmt >> 19) == 0xF )
      {
        // Read Extended Pixel Aspect Ratio (EPAR) (16 bits)
        video_read_data( stream, &picture_layer->epar, 16 );
      }
    }

    if( HAS_CUSTOM_PCF(picture_layer->opptype) )
    {
      // Read Custom Picture Clock Frequency Code (CPCFC) (8 bits)
      video_read_data( stream, &picture_layer->cpcfc, 8 );

      // Read Extended Temporal Reference (ETR) (2 bits)
      video_read_data( stream, &picture_layer->etr, 2 );
    }

    if( HAS_UNRESTRICTED_MOTION_VECTOR(picture_layer->opptype) )
    {
      // Read Unlimited Unrestricted Motion Vectors Indicator (UUI) (Variable length) -- Optionnal
      video_read_data( stream, &picture_layer->uui, 1 );
      if( picture_layer->uui == 0 )
        video_read_data( stream, &picture_layer->uui, 1 );
    }

    if( HAS_SLICE_STRUCTURED(picture_layer->sss) )
    {
      // Read Slice Structured Submode bits (SSS) (2 bits) -- Optionnal
      video_read_data( stream, &picture_layer->sss, 2 );
    }
  }
コード例 #7
0
C_RESULT vlib_stage_encoding_transform(vlib_stage_encoding_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
{
  static int32_t local_subsampl = 0;

  vp_os_mutex_lock(&out->lock);

  if( out->status == VP_API_STATUS_INIT )
  {
    out->numBuffers   = 1;
    out->buffers      = (uint8_t**)&cfg->controller.in_stream.bytes;
    out->indexBuffer  = 0;

    out->status = VP_API_STATUS_PROCESSING;

    cfg->current_size = 0;
  }

  if( local_subsampl == 0 && out->status == VP_API_STATUS_PROCESSING )
  {
    // check video_codec didn't change
    if (cfg->controller.codec_type != cfg->codec_type)
    {
      video_codec_open( &cfg->controller, cfg->codec_type );
      video_controller_set_mode( &cfg->controller, VIDEO_ENCODE );
      video_controller_set_format( &cfg->controller, cfg->width, cfg->height );
      video_controller_set_motion_estimation( &cfg->controller, FALSE );
    }

    // update target size
    video_controller_set_target_size( &cfg->controller, cfg->target_size );
    RTMON_USTART(VIDEO_VLIB_ENCODE_EVENT);
    if(cfg->block_mode_enable)
      video_encode_blockline( &cfg->controller, cfg->picture, cfg->picture->complete );
    else
      video_encode_picture( &cfg->controller, cfg->picture, (bool_t*)&cfg->picture->complete );
    RTMON_USTOP(VIDEO_VLIB_ENCODE_EVENT);

    if(cfg->picture->complete)
    {
      RTMON_UVAL(ENCODED_PICTURE_UVAL, cfg->controller.num_frames);
      local_subsampl++;
    }

    cfg->current_size = cfg->controller.in_stream.used;

    if( cfg->controller.in_stream.length != 32 )
    {
      // flush & reset internal stream
      video_write_data( &cfg->controller.in_stream, 0, cfg->controller.in_stream.length+1 );
      cfg->controller.in_stream.length = 32;
    }
    out->size = cfg->controller.in_stream.used;

    RTMON_UVAL(ENCODED_BLOCKLINE_SIZE_UVAL, out->size);

    cfg->controller.in_stream.used  = 0;
    cfg->controller.in_stream.index = 0;
  }
  else
  {
    out->size = 0;

    if( cfg->picture->complete )
    {
      local_subsampl++;
    }
  }

  if(local_subsampl >= (int32_t)cfg->subsampl)
    local_subsampl = 0;
  vp_os_mutex_unlock( &out->lock );

  return C_OK;
}