void 
OWLDiff::relation(const tags_t &attrs, const std::list<member> &members, const tags_t &tags) {
  owl_diff::relation r(attrs, tags, members);
  update_metadata(attrs);
  list<owl_diff::change> changes = common(r, database, current_action);
  all_changes.splice(all_changes.end(), changes);
}
void 
OWLDiff::node(const tags_t &attrs, const tags_t &tags) {
  owl_diff::node n(attrs, tags);
  update_metadata(attrs);
  list<owl_diff::change> changes = common(n, database, current_action);
  all_changes.splice(all_changes.end(), changes);
}
void 
OWLDiff::way(const tags_t &attrs, const std::vector<id_t> &way_nodes, const tags_t &tags) {
  owl_diff::way w(attrs, tags, way_nodes);
  update_metadata(attrs);
  list<owl_diff::change> changes = common(w, database, current_action);
  all_changes.splice(all_changes.end(), changes);
}
Beispiel #4
0
int main(int argc, char ** argv) {
    int errcode;

    /* flvmeta default options */
    static flvmeta_opts options;
    options.command = FLVMETA_DEFAULT_COMMAND;
    options.input_file = NULL;
    options.output_file = NULL;
    options.metadata = NULL;
    options.check_level = FLVMETA_CHECK_LEVEL_WARNING;
    options.quiet = 0;
    options.check_xml_report = 0;
    options.dump_metadata = 0;
    options.insert_onlastsecond = 1;
    options.reset_timestamps = 0;
    options.all_keyframes = 0;
    options.preserve_metadata = 0;
    options.error_handling = FLVMETA_EXIT_ON_ERROR;
    options.dump_format = FLVMETA_FORMAT_XML;
    options.verbose = 0;
    options.metadata_event = NULL;


    /* Command-line parsing */
    errcode = parse_command_line(argc, argv, &options);

    /* free metadata if necessary */
    if ((errcode != OK || options.command != FLVMETA_UPDATE_COMMAND) && options.metadata != NULL) {
        amf_data_free(options.metadata);
    }

    if (errcode == OK) {
        /* execute command */
        switch (options.command) {
            case FLVMETA_DUMP_COMMAND: errcode = dump_metadata(&options); break;
            case FLVMETA_FULL_DUMP_COMMAND: errcode = dump_flv_file(&options); break;
            case FLVMETA_CHECK_COMMAND: errcode = check_flv_file(&options); break;
            case FLVMETA_UPDATE_COMMAND: errcode = update_metadata(&options); break;
            case FLVMETA_VERSION_COMMAND: version(); break;
            case FLVMETA_HELP_COMMAND: help(argv[0]); break;
        }

        /* error report */
        switch (errcode) {
            case ERROR_OPEN_READ: fprintf(stderr, "%s: cannot open %s for reading\n", argv[0], options.input_file); break;
            case ERROR_NO_FLV: fprintf(stderr, "%s: %s is not a valid FLV file\n", argv[0], options.input_file); break;
            case ERROR_EOF: fprintf(stderr, "%s: unexpected end of file\n", argv[0]); break;
            case ERROR_MEMORY: fprintf(stderr, "%s: memory allocation error\n", argv[0]); break;
            case ERROR_EMPTY_TAG: fprintf(stderr, "%s: empty FLV tag\n", argv[0]); break;
            case ERROR_OPEN_WRITE: fprintf(stderr, "%s: cannot open %s for writing\n", argv[0], options.output_file); break;
            case ERROR_INVALID_TAG: fprintf(stderr, "%s: invalid FLV tag\n", argv[0]); break;
            case ERROR_WRITE: fprintf(stderr, "%s: unable to write to %s\n", argv[0], options.output_file); break;
            case ERROR_SAME_FILE: fprintf(stderr, "%s: input file and output file must be different\n", argv[0]); break;
        }
    }

    return errcode;
}
void stream_encoders::update_metadata(metadb_handle_ptr p_track){
    if(p_track!=0){
        p_track->metadb_lock();
		const file_info*p_info;
        if(p_track->get_info_async_locked(p_info))
			update_metadata(*p_info);
		p_track->metadb_unlock();
    }
}
Beispiel #6
0
static OMX_ERRORTYPE
obtain_next_url (dirble_prc_t * ap_prc, int a_skip_value)
{
  OMX_ERRORTYPE rc = OMX_ErrorNone;
  const long pathname_max = PATH_MAX + NAME_MAX;

  assert (ap_prc);
  assert (ap_prc->p_dirble_);

  if (!ap_prc->p_uri_param_)
    {
      ap_prc->p_uri_param_ = tiz_mem_calloc (
        1, sizeof (OMX_PARAM_CONTENTURITYPE) + pathname_max + 1);
    }

  tiz_check_null_ret_oom (ap_prc->p_uri_param_ != NULL);

  ap_prc->p_uri_param_->nSize
    = sizeof (OMX_PARAM_CONTENTURITYPE) + pathname_max + 1;
  ap_prc->p_uri_param_->nVersion.nVersion = OMX_VERSION;

  {
    const char * p_next_url
      = a_skip_value > 0 ? tiz_dirble_get_next_url (ap_prc->p_dirble_,
                                                    ap_prc->remove_current_url_)
                         : tiz_dirble_get_prev_url (
                             ap_prc->p_dirble_, ap_prc->remove_current_url_);
    ap_prc->remove_current_url_ = false;
    tiz_check_null_ret_oom (p_next_url != NULL);

    {
      const OMX_U32 url_len = strnlen (p_next_url, pathname_max);
      TIZ_TRACE (handleOf (ap_prc), "URL [%s]", p_next_url);

      /* Verify we are getting an http scheme */
      if (!p_next_url || !url_len
          || (memcmp (p_next_url, "http://", 7) != 0
              && memcmp (p_next_url, "https://", 8) != 0))
        {
          rc = OMX_ErrorContentURIError;
        }
      else
        {
          strncpy ((char *) ap_prc->p_uri_param_->contentURI, p_next_url,
                   url_len);
          ap_prc->p_uri_param_->contentURI[url_len] = '\000';

          /* Song metadata is now available, update the IL client */
          rc = update_metadata (ap_prc);
        }
    }
  }

  return rc;
}
Beispiel #7
0
    PopulationSpace( boost::property_tree::ptree & config ) :
        sequences( config )
        , alleles( config )
        {
        create_space( free_space );
        create_space( pheno_space );

//        resize_space( free_space, allele_space_type::device_space_type::ALIGNMENT_SIZE );

        update_metadata();
    }
Beispiel #8
0
int get_docid(int docid, struct tobj* tobj)
{
    struct tobj* t = tobj;
    int block_num = t->block_num;
    int ret = 0;
    int maxid = t->maxid;
    //printf("get docid\n");

    while((t->cur_block + 1)<= block_num){
        ret = check_meta_docid(docid, t);
        if(ret != maxid)
            return ret;
        t->cur_block += 1;
        if(t->cur_block + 1 > block_num)
            break;
        update_metadata(t->start_addr + t->cur_block * t->c_blocksize, t);
        clear_chunk_access_flag(t);
        t->cur_chunk = 0;
    }
    return maxid;
}
Beispiel #9
0
static int store_icy(URLContext *h, int size)
{
    HTTPContext *s = h->priv_data;
    /* until next metadata packet */
    int remaining = s->icy_metaint - s->icy_data_read;

    if (remaining < 0)
        return AVERROR_INVALIDDATA;

    if (!remaining) {
        /* The metadata packet is variable sized. It has a 1 byte header
         * which sets the length of the packet (divided by 16). If it's 0,
         * the metadata doesn't change. After the packet, icy_metaint bytes
         * of normal data follows. */
        uint8_t ch;
        int len = http_read_stream_all(h, &ch, 1);
        if (len < 0)
            return len;
        if (ch > 0) {
            char data[255 * 16 + 1];
            int ret;
            len = ch * 16;
            ret = http_read_stream_all(h, data, len);
            if (ret < 0)
                return ret;
            data[len + 1] = 0;
            if ((ret = av_opt_set(s, "icy_metadata_packet", data, 0)) < 0)
                return ret;
            update_metadata(s, data);
        }
        s->icy_data_read = 0;
        remaining        = s->icy_metaint;
    }

    return FFMIN(size, remaining);
}
Beispiel #10
0
struct tobj* term_init(int* info)
{
    int ret = 0; 
    struct tobj *t = NULL;

    t = (struct tobj*)malloc(sizeof(struct tobj));
    if(t == NULL)
        return t;
    t->tindex = term_num;
    term_num += 1;
    t->fileindex = info[0];
    t->chunk_s = info[1];
    t->chunk_e = info[2];
    t->start_addr = info[3];
    t->end_addr = info[4];
    t->doc_num = info[5];
    t->block_num = (info[4] - info[3])/config.c_blocksize;
    t->c_blocksize = config.c_blocksize;
    t->cur_block = 0;
    t->cur_chunk = t->chunk_s;
    t->cur_meta_l = 0;
    t->flag = 0;
    t->maxid = config.maxid;

    file_open(t->fileindex);
    /* update metadata */
    ret = update_metadata(t->start_addr, t);
    if(ret){
        //printf("update metadata fail \n");
        file_close(t->fileindex);
        return NULL;
    }
    //printf("term init done %d s %d e %d \n", t->tindex, t->chunk_s, t->chunk_e);

    return t;
} 
Beispiel #11
0
int
film::process ()
{
  int audioSize;
  uint8_t *buffer;
  uint8_t *buffer2;
  int frameFinished;
  int numBytes;
  shot s;
  static struct SwsContext *img_convert_ctx = NULL;

  create_main_dir ();

  string graphpath = this->global_path + "/";
  g = new graph (600, 400, graphpath, threshold, this);
  g->set_title ("Motion quantity");

  /*
   * Register all formats and codecs 
   */
  av_register_all ();

  if (av_open_input_file (&pFormatCtx, input_path.c_str (), NULL, 0, NULL) != 0)
    {
      string error_msg = "Impossible to open file";
      error_msg += input_path;
      shotlog (error_msg);
      return -1;		// Couldn't open file
    }

  /*
   * Retrieve stream information 
   */
  if (av_find_stream_info (pFormatCtx) < 0)
    return -1;			// Couldn't find stream information


  // dump_format (pFormatCtx, 0, path.c_str (), false);
  videoStream = -1;
  audioStream = -1;


  /*
   * Detect streams types 
   */
  for (int j = 0; j < pFormatCtx->nb_streams; j++)
    {
      switch (pFormatCtx->streams[j]->codec->codec_type)
	{
	case AVMEDIA_TYPE_VIDEO:
	  videoStream = j;
	  break;

	case AVMEDIA_TYPE_AUDIO:
	  audioStream = j;
	  break;

	default:
	  break;
	}
    }



  /*
   * Get a pointer to the codec context for the video stream 
   */
  if (audioStream != -1)
    {
          if (audio_set)
    {
      string xml_audio = graphpath + "/" + "audio.xml";
      init_xml (xml_audio);
    }

      pCodecCtxAudio = pFormatCtx->streams[audioStream]->codec;
      pCodecAudio = avcodec_find_decoder (pCodecCtxAudio->codec_id);

      if (pCodecAudio == NULL)
	return -1;		// Codec not found
      if (avcodec_open (pCodecCtxAudio, pCodecAudio) < 0)
	return -1;		// Could not open codec

    }
  update_metadata ();
  /*
   * Find the decoder for the video stream 
   */
  if (videoStream != -1)
    {
      pCodecCtx = pFormatCtx->streams[videoStream]->codec;
      pCodec = avcodec_find_decoder (pCodecCtx->codec_id);

      if (pCodec == NULL)
	return -1;		// Codec not found
      if (avcodec_open (pCodecCtx, pCodec) < 0)
	return -1;		// Could not open codec

      /*
       * Allocate video frame 
       */
      pFrame = avcodec_alloc_frame ();
      pFrameRGB = avcodec_alloc_frame ();
      pFrameRGBprev = avcodec_alloc_frame ();

      /*
       * Determine required buffer size and allocate buffer 
       */
      numBytes = avpicture_get_size (PIX_FMT_RGB24, width, height);

      buffer = (uint8_t *) malloc (sizeof (uint8_t) * numBytes);
      buffer2 = (uint8_t *) malloc (sizeof (uint8_t) * numBytes);

      /*
       * Assign appropriate parts of buffer to image planes in pFrameRGB 
       */
      avpicture_fill ((AVPicture *) pFrameRGB, buffer, PIX_FMT_RGB24, width, height);

      avpicture_fill ((AVPicture *) pFrameRGBprev, buffer2, PIX_FMT_RGB24, width, height);


      /*
       * Mise en place du premier plan 
       */
      s.fbegin = 0;
      s.msbegin = 0;
      s.myid = 0;
      shots.push_back (s);



    }


  checknumber = (samplerate * samplearg) / 1000;

  /*
   * Boucle de traitement principale du flux 
   */
  this->frame_number = 0;
  while (av_read_frame (pFormatCtx, &packet) >= 0)
    {
      if (packet.stream_index == videoStream)
	{
          AVPacket pkt;
          av_init_packet (&pkt);
          pkt.data = packet.data;
          pkt.size = packet.size;
          avcodec_decode_video2 (pCodecCtx, pFrame, &frameFinished, &pkt);

        if (frameFinished)
	    {
	      // Convert the image into RGB24
	      if (! img_convert_ctx)
		{
		  img_convert_ctx = sws_getContext(width, height, pCodecCtx->pix_fmt,
						   width, height, PIX_FMT_RGB24, SWS_BICUBIC, 
						   NULL, NULL, NULL);
		  if (! img_convert_ctx) 
		  {
		    fprintf(stderr, "Cannot initialize the conversion context!\n");
		    exit(1);
		  }
		}
	      
	      /* API: int sws_scale(SwsContext *c, uint8_t *src, int srcStride[], int srcSliceY, int srcSliceH, uint8_t dst[], int dstStride[] )
	       */
	      sws_scale(img_convert_ctx, pFrame->data, 
			pFrame->linesize, 0, 
			pCodecCtx->height,
			pFrameRGB->data, pFrameRGB->linesize);

	      /*
		Old API doc (cf http://www.dranger.com/ffmpeg/functions.html )
		int img_convert(AVPicture *dst, int dst_pix_fmt,
		                const AVPicture *src, int src_pix_fmt,
				int src_width, int src_height)
	      */
	      /*
	      img_convert ((AVPicture *) pFrameRGB, PIX_FMT_RGB24, (AVPicture *) pFrame, pCodecCtx->pix_fmt, width, height);
	      */

            this->frame_number ++;
	      /* Si ce n'est pas la permiere image */
        if ( this->frame_number  > 2)
		{
		  CompareFrame (pFrameRGB, pFrameRGBprev);
		}
	      else
		{
		  /*
		   * Cas ou c'est la premiere image, on cree la premiere image dans tous les cas 
		   */
		  image *begin_i = new image (this, width, height, s.myid, BEGIN);
		  begin_i->create_img_dir ();
		  begin_i->SaveFrame (pFrameRGB);
		  shots.back ().img_begin = begin_i;
		}
	     memcpy (buffer2, buffer, numBytes);
	    }
	}
      if (audio_set && (packet.stream_index == audioStream))
	{
	  process_audio ();
	}
      /*
       * Free the packet that was allocated by av_read_frame 
       */
      if (packet.data != NULL)
	av_free_packet (&packet);
    }

  if (videoStream != -1)
    {
      /* Mise en place de la dernière image */
      int lastFrame = this->frame_number;
      shots.back ().fduration = lastFrame - shots.back ().fbegin;
      shots.back ().msduration = int (((shots.back ().fduration) * 1000) / fps);
      duration.mstotal = int (shots.back ().msduration + shots.back ().msbegin);
	  image *end_i = new image (this, width, height, shots.back ().myid, END);
	  end_i->SaveFrame (pFrameRGB);
	  shots.back ().img_end = end_i;

      /*
       * Graphe de la qté de mvmt 
       */
      g->init_gd ();
      g->draw_all_canvas ();
      g->draw_color_datas ();
      g->draw_datas ();
      if (video_set)
	{
	  string xml_color = graphpath + "/" + "video.xml";
	  g->write_xml (xml_color);
	}
      g->save ();

      /*
       * Free the RGB images 
       */
      free (buffer);
      free (buffer2);
      av_free (pFrameRGB);
      av_free (pFrame);
      av_free (pFrameRGBprev);
      avcodec_close (pCodecCtx);
    }
  /*
   * Close the codec 
   */
  if (audioStream != -1)
    {
        /* Fermetrure du fichier xml */
      if (audio_set) close_xml ();
      avcodec_close (pCodecCtxAudio);
    }

  /*
   * Close the video file 
   */
  av_close_input_file (pFormatCtx);


}
/**
 * @brief Camera sensor Capabilities
 * @param size buffer size
 * @param capabilities buffer address
 * @return zero for success or non-zero on any faillure
 */
int get_capabilities(uint32_t *size, uint8_t *capabilities)
{
    struct camera_metadata_package  *metadata_ptr;
    uint8_t *cap;
    int index = 0, buffercount = 0, ret;

    const uint8_t availableAntibandingModes[] = {
        CONTROL_AE_ANTIBANDING_MODE_OFF,
        CONTROL_AE_ANTIBANDING_MODE_50HZ,
        CONTROL_AE_ANTIBANDING_MODE_60HZ,
        CONTROL_AE_ANTIBANDING_MODE_AUTO
    };
    const uint8_t availableAeModes[] = {
        CONTROL_AE_MODE_OFF,
        CONTROL_AE_MODE_ON,
        CONTROL_AE_MODE_ON_AUTO_FLASH,
        CONTROL_AE_MODE_ON_ALWAYS_FLASH,
        CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
    };
    const int32_t availableTargetFpsRanges[] = {5, 30, 15, 30};
    const int32_t exposureCompensationRange[] = {-9, 9};
    const camera_metadata_rational_t exposureCompensationStep = {1, 3};
    const uint8_t availableAfModesBack[] = {
        CONTROL_AF_MODE_OFF,
        CONTROL_AF_MODE_AUTO,
        CONTROL_AF_MODE_MACRO,
        CONTROL_AF_MODE_CONTINUOUS_VIDEO,
        CONTROL_AF_MODE_CONTINUOUS_PICTURE
    };
    const uint8_t availableSceneModes[] = {
        CONTROL_SCENE_MODE_DISABLED,
        CONTROL_SCENE_MODE_FACE_PRIORITY,
        CONTROL_SCENE_MODE_ACTION,
        CONTROL_SCENE_MODE_PORTRAIT,
        CONTROL_SCENE_MODE_LANDSCAPE,
        CONTROL_SCENE_MODE_NIGHT,
        CONTROL_SCENE_MODE_NIGHT_PORTRAIT,
        CONTROL_SCENE_MODE_THEATRE,
        CONTROL_SCENE_MODE_BEACH,
        CONTROL_SCENE_MODE_SNOW,
        CONTROL_SCENE_MODE_SUNSET,
        CONTROL_SCENE_MODE_STEADYPHOTO,
        CONTROL_SCENE_MODE_FIREWORKS,
        CONTROL_SCENE_MODE_SPORTS,
        CONTROL_SCENE_MODE_PARTY,
        CONTROL_SCENE_MODE_CANDLELIGHT,
        CONTROL_SCENE_MODE_BARCODE,
        CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO,
        CONTROL_SCENE_MODE_HDR
    };
    const uint8_t availableVstabModes[] = {
        CONTROL_VIDEO_STABILIZATION_MODE_OFF,
        CONTROL_VIDEO_STABILIZATION_MODE_ON
    };
    const uint8_t availableAwbModes[] = {
        CONTROL_AWB_MODE_OFF,
        CONTROL_AWB_MODE_AUTO,
        CONTROL_AWB_MODE_INCANDESCENT,
        CONTROL_AWB_MODE_FLUORESCENT,
        CONTROL_AWB_MODE_WARM_FLUORESCENT,
        CONTROL_AWB_MODE_DAYLIGHT,
        CONTROL_AWB_MODE_CLOUDY_DAYLIGHT,
        CONTROL_AWB_MODE_TWILIGHT,
        CONTROL_AWB_MODE_SHADE,
    };
    const int32_t max3aRegions[] = {0, 0, 0};
    const uint8_t flashAvailable = FLASH_INFO_AVAILABLE_FALSE;
    const int32_t jpegThumbnailSizes[] = {0, 0, 160, 120, 320, 240};
    const float focalLength = 2.50f;
    const int32_t max_output_streams[] = {
        MAX_STALLING_STREAMS,
        MAX_PROCESSED_STREAMS,
        MAX_RAW_STREAMS
    };
    const int32_t scalar_formats[] = {
        SCALER_AVAILABLE_FORMATS_RAW16,
        SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
        SCALER_AVAILABLE_FORMATS_YV12,
        SCALER_AVAILABLE_FORMATS_YCrCb_420_SP,
        SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
        SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
        SCALER_AVAILABLE_FORMATS_BLOB
    };
    const float maxZoom = 10.0f;
    const int32_t orientation = 0;
    const int32_t SensitivityRange[2] = {100, 1600};
    const float sensorPhysicalSize[2] = {3.20f, 2.40f};
    const int32_t Resolution[] = {640, 480};
    const int32_t maxFaceCount = 8;

    /* buffer initial */
    cap = capabilities;
    metadata_ptr = zalloc(sizeof(struct camera_metadata_package));
    if (!metadata_ptr)
        return -ENOMEM;

    metadata_ptr->entries = zalloc(MAX_METADATA_NUMBER *
                                   sizeof(struct camera_metadata_entry));
    if (!metadata_ptr->entries) {
        goto err_free_metadata_ptr;
    }

    /* need to modify it */
    metadata_ptr->data = zalloc(MAX_METADATA_NUMBER * MAX_METADATA_SIZE);
    if (!metadata_ptr->data ) {
        goto err_free_metadata;
    }
    metadata_ptr->header.version = ARA_METADATA_VERSION;

    /* CONTROL_AE_AVAILABLE_ANTIBANDING_MODES */
    ret = update_metadata(metadata_ptr, TYPE_BYTE,
                          CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
                          sizeof(availableAntibandingModes),
                          availableAntibandingModes);
    if (ret) {
        goto err_free_all;
    }

    /* CONTROL_AE_AVAILABLE_MODES */
    ret = update_metadata(metadata_ptr, TYPE_BYTE, CONTROL_AE_AVAILABLE_MODES,
                          sizeof(availableAeModes), availableAeModes);
    if (ret) {
        goto err_free_all;
    }

    /* CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES */
    ret = update_metadata(metadata_ptr, TYPE_INT32,
                          CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
                          sizeof(availableTargetFpsRanges),
                          (uint8_t *)availableTargetFpsRanges);
    if (ret) {
        goto err_free_all;
    }

    /* CONTROL_AE_COMPENSATION_STEP */
    ret = update_metadata(metadata_ptr, TYPE_INT32,
                          CONTROL_AE_COMPENSATION_RANGE,
                          sizeof(exposureCompensationRange),
                          (uint8_t *)exposureCompensationRange);
    if (ret) {
        goto err_free_all;
    }

    /* CONTROL_AE_COMPENSATION_STEP */
    ret = update_metadata(metadata_ptr, TYPE_RATIONAL,
                          CONTROL_AE_COMPENSATION_STEP,
                          sizeof(exposureCompensationStep),
                          (uint8_t *)&exposureCompensationStep);
    if (ret) {
        goto err_free_all;
    }

    /* CONTROL_AF_AVAILABLE_MODES */
    ret = update_metadata(metadata_ptr, TYPE_BYTE, CONTROL_AF_AVAILABLE_MODES,
                          sizeof(availableAfModesBack), availableAfModesBack);
    if (ret) {
        goto err_free_all;
    }

    /* CONTROL_AVAILABLE_SCENE_MODES */
    ret = update_metadata(metadata_ptr, TYPE_BYTE,
                          CONTROL_AVAILABLE_SCENE_MODES,
                          sizeof(availableSceneModes), availableSceneModes);
    if (ret) {
        goto err_free_all;
    }

    /* CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES */
    ret = update_metadata(metadata_ptr, TYPE_BYTE,
                          CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
                          sizeof(availableVstabModes), availableVstabModes);
    if (ret) {
        goto err_free_all;
    }

    /* CONTROL_AWB_AVAILABLE_MODES */
    ret = update_metadata(metadata_ptr, TYPE_BYTE, CONTROL_AWB_AVAILABLE_MODES,
                          sizeof(availableAwbModes), availableAwbModes);
    if (ret) {
        goto err_free_all;
    }

    /* CONTROL_MAX_REGIONS */
    ret = update_metadata(metadata_ptr, TYPE_BYTE, CONTROL_MAX_REGIONS,
                          sizeof(max3aRegions), (uint8_t *)max3aRegions);
    if (ret) {
        goto err_free_all;
    }

    /* CONTROL_SCENE_MODES_OVERRIDES */

    /* FLASH_INFO_AVAILABLE */
    ret = update_metadata(metadata_ptr, TYPE_BYTE, FLASH_INFO_AVAILABLE,
                          sizeof(flashAvailable), &flashAvailable);
    if (ret) {
        goto err_free_all;
    }

    /* JPEG_AVAILABLE_THUMBNAIL_SIZES */
    ret = update_metadata(metadata_ptr, TYPE_INT32,
                          JPEG_AVAILABLE_THUMBNAIL_SIZES,
                          sizeof(jpegThumbnailSizes),
                          (uint8_t *)jpegThumbnailSizes);
    if (ret) {
        goto err_free_all;
    }

    /* LENS_INFO_AVAILABLE_FOCAL_LENGTHS */
    ret = update_metadata(metadata_ptr, TYPE_FLOAT,
                          LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
                          sizeof(focalLength), (uint8_t *)&focalLength);
    if (ret) {
        goto err_free_all;
    }

    /* REQUEST_MAX_NUM_OUTPUT_STREAMS */
    ret = update_metadata(metadata_ptr, TYPE_INT32,
                          REQUEST_MAX_NUM_OUTPUT_STREAMS,
                          sizeof(max_output_streams),
                          (uint8_t *)max_output_streams);
    if (ret) {
        goto err_free_all;
    }

    /* REQUEST_AVAILABLE_REQUEST_KEYS = 0x68 */
    /* REQUEST_AVALIABLE_RESULT_KEYS */
    /* REQUEST_AVALIABLE_CHARACTERISTICS */

    /* SCALER_AVAILABLE_FORMATS */
    ret = update_metadata(metadata_ptr, TYPE_INT32, SCALER_AVAILABLE_FORMATS,
                          sizeof(scalar_formats), (uint8_t *)scalar_formats);
    if (ret) {
        goto err_free_all;
    }

    /* SCALER_AVAILABLE_MAX_DIGITAL_ZOOM */
    ret = update_metadata(metadata_ptr, TYPE_FLOAT,
                          SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
                          sizeof(maxZoom), (uint8_t *)&maxZoom);
    if (ret) {
        goto err_free_all;
    }

    /* SENSOR_ORIENTATION */
    ret = update_metadata(metadata_ptr, TYPE_INT32, SENSOR_ORIENTATION,
                          sizeof(orientation), (uint8_t *)&orientation);
    if (ret) {
        goto err_free_all;
    }

    /* SENSOR_INFO_SENSITIVITY_RANGE */
    ret = update_metadata(metadata_ptr, TYPE_INT32,
                          SENSOR_INFO_SENSITIVITY_RANGE,
                          sizeof(SensitivityRange),
                          (uint8_t *)SensitivityRange);
    if (ret) {
        goto err_free_all;
    }

    /* SENSOR_INFO_PHYSICAL_SIZE */
    ret = update_metadata(metadata_ptr, TYPE_FLOAT, SENSOR_INFO_PHYSICAL_SIZE,
                          sizeof(sensorPhysicalSize),
                          (uint8_t *)sensorPhysicalSize);
    if (ret) {
        goto err_free_all;
    }

    /* SENSOR_INFO_PIXEL_ARRAY_SIZE */
    ret = update_metadata(metadata_ptr, TYPE_INT32,
                          SENSOR_INFO_PIXEL_ARRAY_SIZE,
                          sizeof(Resolution), (uint8_t *)Resolution);
    if (ret) {
        goto err_free_all;
    }

    /* STATISTICS_INFO_MAX_FACE_COUNT */
    ret = update_metadata(metadata_ptr, TYPE_INT32,
                          STATISTICS_INFO_MAX_FACE_COUNT,
                          sizeof(maxFaceCount), (uint8_t *)&maxFaceCount);
    if (ret) {
        goto err_free_all;
    }

    /* Do the Data Copy */
    memcpy(cap, &metadata_ptr->header, sizeof(metadata_ptr->header));
    buffercount = sizeof(metadata_ptr->header);
    metadata_ptr->header.entry_start = buffercount;
    cap += buffercount;

    for (index = 0; index < metadata_ptr->header.entry_count; index++) {
        memcpy(cap, &metadata_ptr->entries[index],
               sizeof(metadata_ptr->entries[0]));
        buffercount += sizeof(metadata_ptr->entries[0]);
        cap += sizeof(metadata_ptr->entries[0]);
    }

    metadata_ptr->header.metadata_data_start = buffercount;
    memcpy(cap, &metadata_ptr->data[0], metadata_ptr->header.size);

    /* total size of metadata package size */
    buffercount += metadata_ptr->header.size;
    metadata_ptr->header.size = buffercount;
    *size = buffercount;
    metadata_ptr->header.metadata_data_count =
        metadata_ptr->header.entry_count;

    free(metadata_ptr->data);
    free(metadata_ptr->entries);
    free(metadata_ptr);
    return 0;

err_free_all:
    free(metadata_ptr->data);
err_free_metadata:
    free(metadata_ptr->entries);
err_free_metadata_ptr:
    free(metadata_ptr);
    return -ENOMEM;
}
/**
 * @brief Camera sensor capture result metadata
 * @param size buffer size
 * @param capabilities buffer address
 * @return zero for success or non-zero on any faillure
 */
int get_capture_results_metadata(uint32_t *size, uint8_t *capabilities)
{
    struct camera_metadata_package  *metadata_ptr;
    uint8_t *cap;
    int index = 0, buffercount = 0, ret;

    const float focalLength = 2.50f;
    const float focusDistance = 0;

    /* buffer initial */
    cap = capabilities;
    metadata_ptr = zalloc(sizeof(struct camera_metadata_package));
    if (!metadata_ptr)
        return -ENOMEM;

    metadata_ptr->entries = zalloc(MAX_METADATA_NUMBER *
                                   sizeof(struct camera_metadata_entry));
    if (!metadata_ptr->entries) {
        free(metadata_ptr);
        goto err_free_metadata_ptr;
    }

    metadata_ptr->data = zalloc(MAX_METADATA_NUMBER * MAX_METADATA_SIZE);
    if (!metadata_ptr->data ) {
        goto err_free_metadata;
    }
    metadata_ptr->header.version = ARA_METADATA_VERSION;

    /* LENS_FOCUS_RANGE */
    /* SENSOR_TIMESTAMP */
    /* STATISTICS_FACE_IDS */
    /* STATISTICS_FACE_LANDMARKS */
    /* STATISTICS_FACE_RECTANGLES */
    /* STATISTICS_FACE_SCORES */

    /* LENS_INFO_AVAILABLE_FOCAL_LENGTHS */
    ret = update_metadata(metadata_ptr, TYPE_FLOAT,
                          LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
                          sizeof(focalLength), (uint8_t *)&focalLength);
    if (ret) {
        goto err_free_all;
    }

    /* LENS_FOCUS_DISTANCE */
    ret = update_metadata(metadata_ptr, TYPE_FLOAT, LENS_FOCUS_DISTANCE,
                          sizeof(focusDistance), (uint8_t *)&focusDistance);
    if (ret) {
        goto err_free_all;
    }

    /* Do the Data Copy */
    memcpy(cap, &metadata_ptr->header, sizeof(metadata_ptr->header));
    buffercount = sizeof(metadata_ptr->header);
    metadata_ptr->header.entry_start = buffercount;
    cap += buffercount;

    for (index = 0; index < metadata_ptr->header.entry_count; index++) {
        memcpy(cap, &metadata_ptr->entries[index],
               sizeof(metadata_ptr->entries[0]));
        buffercount += sizeof(metadata_ptr->entries[0]);
        cap += sizeof(metadata_ptr->entries[0]);
    }

    metadata_ptr->header.metadata_data_start = buffercount;
    memcpy(cap, &metadata_ptr->data[0], metadata_ptr->header.size);

    /* total size of metadata package size */
    buffercount += metadata_ptr->header.size;
    metadata_ptr->header.size = buffercount;
    *size = buffercount;
    metadata_ptr->header.metadata_data_count =
        metadata_ptr->header.entry_count;

    free(metadata_ptr->data);
    free(metadata_ptr->entries);
    free(metadata_ptr);
    return 0;

err_free_all:
    free(metadata_ptr->data);
err_free_metadata:
    free(metadata_ptr->entries);
err_free_metadata_ptr:
    free(metadata_ptr);
    return -ENOMEM;
}
Beispiel #14
0
void
cb_get_object(void *handle)
{
  dpl_async_task_t *atask = (dpl_async_task_t *) handle;
  int i;
  dpl_dict_var_t *metadatum = NULL;

  if (DPL_SUCCESS != atask->ret)
    {
      fprintf(stderr, "dpl_get failed: %s (%d)\n", dpl_status_str(atask->ret), atask->ret);
      exit(1);
    }

  fprintf(stderr, "checking object\n");

  if (DATA_LEN != atask->u.get.buf->size)
    {
      fprintf(stderr, "data lengths mismatch\n");
      exit(1);
    }

  for (i = 0;i < DATA_LEN;i++)
    if (atask->u.get.buf->ptr[i] != 'z')
      {
        fprintf(stderr, "data content mismatch\n");
        exit(1);
      }

  fprintf(stderr, "checking metadata\n");

  metadatum = dpl_dict_get(atask->u.get.metadata, "foo");
  if (NULL == metadatum)
    {
      fprintf(stderr, "missing metadatum\n");
      exit(1);
    }

  assert(metadatum->val->type == DPL_VALUE_STRING);
  if (strcmp(dpl_sbuf_get_str(metadatum->val->string), "bar"))
    {
      fprintf(stderr, "bad value in metadatum\n");
      exit(1);
    }
  
  metadatum = dpl_dict_get(atask->u.get.metadata, "foo2");
  if (NULL == metadatum)
    {
      fprintf(stderr, "missing metadatum\n");
      exit(1);
    }

  assert(metadatum->val->type == DPL_VALUE_STRING);
  if (strcmp(dpl_sbuf_get_str(metadatum->val->string), "qux"))
    {
      fprintf(stderr, "bad value in metadatum\n");
      exit(1);
    }

  dpl_async_task_free(atask);

  update_metadata();
}
Beispiel #15
0
static void update_image (void * data, GObject * object)
{
    recheck_image = TRUE;
    update_metadata (data, object);
}
int frame_parse::parse_mpeg4_frame ( OMX_BUFFERHEADERTYPE *source,
                                     OMX_BUFFERHEADERTYPE *dest ,
                                     OMX_U32 *partialframe)
{
    OMX_U8 *pdest = NULL,*psource = NULL;
    OMX_U32 dest_len =0, source_len = 0, temp_len = 0;
    OMX_U32 parsed_length = 0,i=0;
    int residue_byte = 0;

    if (source == NULL || dest == NULL || partialframe == NULL)
    {
        return -1;
    }

  /*Calculate how many bytes are left in source and destination*/
    dest_len = dest->nAllocLen - (dest->nFilledLen + dest->nOffset);
    psource = source->pBuffer + source->nOffset;
    pdest = dest->pBuffer + (dest->nFilledLen + dest->nOffset);
    source_len = source->nFilledLen;

    /*Need Minimum of 4 for destination to copy atleast Start code*/
    if (dest_len < 4 || source_len == 0)
    {
        DEBUG_PRINT_LOW("\n Dest_len %d source_len %d",dest_len,source_len);
        if (source_len == 0 && (source->nFlags & 0x01))
        {
            DEBUG_PRINT_LOW("\n EOS condition Inform Client that it is complete frame");
            *partialframe = 0;
            return 1;
        }
        DEBUG_PRINT_LOW("\n Error in Parsing bitstream");
        return -1;
    }

    /*Check if State of the previous find is a Start code*/
    if (parse_state == A4)
    {
        /*Check for minimun size should be 4*/
        dest->nFlags = flags;
        dest->nTimeStamp = time_stamp;
        update_metadata(source->nTimeStamp,source->nFlags);
        memcpy (pdest,start_code,4);
        pdest [2] = prev_one;
        pdest [3] = last_byte;
        dest->nFilledLen += 4;
        pdest += 4;
        parse_state = A0;
    }

    /*Entry State Machine*/
    while ( source->nFilledLen > 0 && parse_state != A0
            && parse_state != A4 && dest_len > 0
          )
    {
        //printf ("\n In the Entry Loop");
        switch (parse_state)
        {
         case A3:
             /*If fourth Byte is matching then start code is found*/
             if ((*psource & mask_code [3]) == start_code [3])
             {
               last_byte = *psource;
               parse_state = A4;
               source->nFilledLen--;
               source->nOffset++;
               psource++;
             }
             else if ((start_code [1] == start_code [0]) && (start_code [2]  == start_code [1]))
             {
                 parse_state = A2;
                 memcpy (pdest,start_code,1);
                 pdest++;
                 dest->nFilledLen++;
                 dest_len--;
             }
             else if (start_code [2] == start_code [0])
             {
                 parse_state = A1;
                 memcpy (pdest,start_code,2);
                 pdest += 2;
                 dest->nFilledLen += 2;
                 dest_len -= 2;
             }
             else
             {
                 parse_state = A0;
                 memcpy (pdest,start_code,3);
                 pdest += 3;
                 dest->nFilledLen +=3;
                 dest_len -= 3;
             }
             break;

             case A2:
                 if ((*psource & mask_code [2])  == start_code [2])
                 {
                     prev_one = *psource;
                     parse_state = A3;
                     source->nFilledLen--;
                     source->nOffset++;
                     psource++;
                 }
                 else if ( start_code [1] == start_code [0])
                 {
                     parse_state = A1;
                     memcpy (pdest,start_code,1);
                     dest->nFilledLen +=1;
                     dest_len--;
                     pdest++;
                 }
                 else
                 {
                     parse_state = A0;
                     memcpy (pdest,start_code,2);
                     dest->nFilledLen +=2;
                     dest_len -= 2;
                     pdest += 2;
                 }
             break;

         case A1:
             if ((*psource & mask_code [1]) == start_code [1])
             {
                 parse_state = A2;
                 source->nFilledLen--;
                 source->nOffset++;
                 psource++;
             }
             else
             {
                 memcpy (pdest,start_code,1);
                 dest->nFilledLen +=1;
                 pdest++;
                 dest_len--;
                 parse_state = A0;
             }
             break;
         case A4:
         case A0:
             break;
        }
        dest_len = dest->nAllocLen - (dest->nFilledLen + dest->nOffset);
    }

     if (parse_state == A4)
     {
         *partialframe = 0;
         DEBUG_PRINT_LOW("\n Nal Found length is %d",dest->nFilledLen);
         return 1;
     }

     /*Partial Frame is true*/
     *partialframe = 1;

    /*Calculate how many bytes are left in source and destination*/
    dest_len = dest->nAllocLen - (dest->nFilledLen + dest->nOffset);
    psource = source->pBuffer + source->nOffset;
    pdest = dest->pBuffer + (dest->nFilledLen + dest->nOffset);
    source_len = source->nFilledLen;

    temp_len = (source_len < dest_len)?source_len:dest_len;

    /*Check if entry state machine consumed source or destination*/
    if (temp_len == 0)
    {
        return 1;
    }

    /*Parsing State Machine*/
    while  (parsed_length < temp_len)
    {
      switch (parse_state)
      {
      case A0:
          if ((psource [parsed_length] & mask_code [0])  == start_code[0])
          {
            parse_state = A1;
          }
          parsed_length++;
          break;
      case A1:
          if ((psource [parsed_length] & mask_code [1]) == start_code [1])
          {
            parsed_length++;
            parse_state = A2;
          }
          else
          {
            parse_state = A0;
          }
      break;
      case A2:
          if ((psource [parsed_length] & mask_code [2]) == start_code [2])
          {
            prev_one = psource [parsed_length];
            parsed_length++;
            parse_state = A3;
          }
          else if (start_code [1] == start_code [0])
          {
            parse_state = A1;
          }
          else
          {
            parse_state = A0;
          }
          break;
      case A3:
          if ((psource [parsed_length] & mask_code [3]) == start_code [3])
          {
            last_byte = psource [parsed_length];
            parsed_length++;
            parse_state = A4;
          }
          else if ((start_code [1] == start_code [0]) && (start_code [2] == start_code [1]))
          {
             parse_state = A2;
          }
          else if (start_code [2] == start_code [0])
          {
              parse_state = A1;
          }
          else
          {
              parse_state = A0;
          }
          break;
      default:
          break;
      }

      /*Found the code break*/
      if (parse_state == A4)
      {
          break;
      }
    }

    /*Exit State Machine*/
    psource = source->pBuffer + source->nOffset;
    switch (parse_state)
    {
    case A4:
      *partialframe = 0;
      if (parsed_length > 4)
      {
        memcpy (pdest,psource,(parsed_length-4));
        dest->nFilledLen += (parsed_length-4);
      }
      break;
    case A3:
      if (parsed_length > 3)
      {
        memcpy (pdest,psource,(parsed_length-3));
        dest->nFilledLen += (parsed_length-3);
      }
      break;
    case A2:
        if (parsed_length > 2)
        {
          memcpy (pdest,psource,(parsed_length-2));
          dest->nFilledLen += (parsed_length-2);
        }
      break;
    case A1:
        if (parsed_length > 1)
        {
          memcpy (pdest,psource,(parsed_length-1));
          dest->nFilledLen += (parsed_length-1);
        }
      break;
    case A0:
      memcpy (pdest,psource,(parsed_length));
      dest->nFilledLen += (parsed_length);
      break;
    }

     if (source->nFilledLen < parsed_length)
     {
         printf ("\n FATAL Error");
         return -1;
     }
      source->nFilledLen -= parsed_length;
      source->nOffset += parsed_length;

    return 1;
}