コード例 #1
0
ファイル: video_capture.cpp プロジェクト: AdamdbUT/mac-gyver
bool start_capturing(int fd)
{
    switch(g_param.capture_method)
    {
        case VIDEO_READ_WRITE:
            return true;
        case VIDEO_MMAP:
            {
                if(!enqueue_buffer(fd))
                    return false;

                enum v4l2_buf_type type;
                type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
                if(xioctl(fd,VIDIOC_STREAMON,&type)==-1)
                {
                    printf("[video] Error: could not start streaming\n");
                    return false;
                }

                if(!dequeue_buffer(fd))
                    return false;

                return true;
            }
        case VIDEO_USERP:
            printf("[video] Error: start streaming for user pointer method unimplemented\n");
            return false;
        default:
            printf("[video] Error: no capture used[internal error]{stac}\n");
            return false;
    }
}
コード例 #2
0
ファイル: video_capture.cpp プロジェクト: AdamdbUT/mac-gyver
bool video_capture(int fd)
{
    int brightness,contrast,saturation,hue,gamma;
    if(!video_get_integer_control(fd,V4L2_CID_BRIGHTNESS,brightness))
        printf("[video] Warning: could not get brighness\n");
    if(!video_get_integer_control(fd,V4L2_CID_CONTRAST,contrast))
        printf("[video] Warning: could not get contrast\n");
    if(!video_get_integer_control(fd,V4L2_CID_SATURATION,saturation))
        printf("[video] Warning: could not get saturation\n");
    if(!video_get_integer_control(fd,V4L2_CID_HUE,hue))
        printf("[video] Warning: could not get hue\n");
    if(!video_get_integer_control(fd,V4L2_CID_GAMMA,gamma))
        printf("[video] Warning: could not get gamma adjust\n");
    printf("[video] (capture) brightness=%d contrast=%d saturation=%d hue=%d gamma=%d\n",brightness,contrast,saturation,hue,gamma);

    switch(g_param.capture_method)
    {
        case VIDEO_READ_WRITE:
            printf("[video] Error: video capturing using read/write method unimplemented\n");
            return false;
        case VIDEO_USERP:
            printf("[video] Error: video capturing using user pointer method unimplemented\n");
            return false;
        case VIDEO_MMAP:
            {
                if(!enqueue_buffer(fd))
                    return false;
                return dequeue_buffer(fd);
            }
        default:
            printf("[video] Error: no capture used[internal error]{vc}\n");
            return false;
    }
}
コード例 #3
0
MjpegFrame *MjpegCamera::get_frame( ) {
	size_t bytesused;
	int bufno = dequeue_buffer(bytesused);
	MjpegFrame *ret = new MjpegFrame(buffers[bufno].start, bytesused);
	queue_buffer(bufno);
	return ret;
}
コード例 #4
0
ファイル: gstappsink.c プロジェクト: iperry/gst-plugins-base
GstSample *
gst_app_sink_pull_sample (GstAppSink * appsink)
{
  GstSample *sample = NULL;
  GstBuffer *buffer;
  GstAppSinkPrivate *priv;

  g_return_val_if_fail (GST_IS_APP_SINK (appsink), NULL);

  priv = appsink->priv;

  g_mutex_lock (&priv->mutex);

  while (TRUE) {
    GST_DEBUG_OBJECT (appsink, "trying to grab a buffer");
    if (!priv->started)
      goto not_started;

    if (priv->num_buffers > 0)
      break;

    if (priv->is_eos)
      goto eos;

    /* nothing to return, wait */
    GST_DEBUG_OBJECT (appsink, "waiting for a buffer");
    g_cond_wait (&priv->cond, &priv->mutex);
  }
  buffer = dequeue_buffer (appsink);
  GST_DEBUG_OBJECT (appsink, "we have a buffer %p", buffer);
  sample = gst_sample_new (buffer, priv->last_caps, &priv->last_segment, NULL);
  gst_buffer_unref (buffer);

  g_cond_signal (&priv->cond);
  g_mutex_unlock (&priv->mutex);

  return sample;

  /* special conditions */
eos:
  {
    GST_DEBUG_OBJECT (appsink, "we are EOS, return NULL");
    g_mutex_unlock (&priv->mutex);
    return NULL;
  }
not_started:
  {
    GST_DEBUG_OBJECT (appsink, "we are stopped, return NULL");
    g_mutex_unlock (&priv->mutex);
    return NULL;
  }
}
コード例 #5
0
ファイル: v4l2_input.cpp プロジェクト: fritsjanb/exacore
void V4L2UpscaledInputAdapter::run_thread( ) {
    int bufno;
    size_t bufsize;
    RawFrame *out;

    for (;;) {
        bufno = dequeue_buffer(bufsize);
        
        out = new RawFrame(1920, 1080, RawFrame::CbYCrY8422);
        if (out_pipe.can_put( )) {
            do_upscale(out, (uint8_t *) buffers[bufno].start); 
            out_pipe.put(out);
        } else {
            fprintf(stderr, "V4L2 in: dropping input frame on floor\n");
            delete out;
        }

        queue_buffer(bufno);

    }
}
コード例 #6
0
ファイル: gstappsink.c プロジェクト: iperry/gst-plugins-base
static GstFlowReturn
gst_app_sink_render (GstBaseSink * psink, GstBuffer * buffer)
{
  GstFlowReturn ret;
  GstAppSink *appsink = GST_APP_SINK_CAST (psink);
  GstAppSinkPrivate *priv = appsink->priv;
  gboolean emit;

restart:
  g_mutex_lock (&priv->mutex);
  if (priv->flushing)
    goto flushing;

  /* queue holding caps event might have been FLUSHed,
   * but caps state still present in pad caps */
  if (G_UNLIKELY (!priv->last_caps &&
          gst_pad_has_current_caps (GST_BASE_SINK_PAD (psink)))) {
    priv->last_caps = gst_pad_get_current_caps (GST_BASE_SINK_PAD (psink));
    GST_DEBUG_OBJECT (appsink, "activating pad caps %" GST_PTR_FORMAT,
        priv->last_caps);
  }

  GST_DEBUG_OBJECT (appsink, "pushing render buffer %p on queue (%d)",
      buffer, priv->num_buffers);

  while (priv->max_buffers > 0 && priv->num_buffers >= priv->max_buffers) {
    if (priv->drop) {
      GstBuffer *old;

      /* we need to drop the oldest buffer and try again */
      if ((old = dequeue_buffer (appsink))) {
        GST_DEBUG_OBJECT (appsink, "dropping old buffer %p", old);
        gst_buffer_unref (old);
      }
    } else {
      GST_DEBUG_OBJECT (appsink, "waiting for free space, length %d >= %d",
          priv->num_buffers, priv->max_buffers);

      if (priv->unlock) {
        /* we are asked to unlock, call the wait_preroll method */
        g_mutex_unlock (&priv->mutex);
        if ((ret = gst_base_sink_wait_preroll (psink)) != GST_FLOW_OK)
          goto stopping;

        /* we are allowed to continue now */
        goto restart;
      }

      /* wait for a buffer to be removed or flush */
      g_cond_wait (&priv->cond, &priv->mutex);
      if (priv->flushing)
        goto flushing;
    }
  }
  /* we need to ref the buffer when pushing it in the queue */
  g_queue_push_tail (priv->queue, gst_buffer_ref (buffer));
  priv->num_buffers++;
  g_cond_signal (&priv->cond);
  emit = priv->emit_signals;
  g_mutex_unlock (&priv->mutex);

  if (priv->callbacks.new_sample) {
    ret = priv->callbacks.new_sample (appsink, priv->user_data);
  } else {
    ret = GST_FLOW_OK;
    if (emit)
      g_signal_emit (appsink, gst_app_sink_signals[SIGNAL_NEW_SAMPLE], 0, &ret);
  }
  return ret;

flushing:
  {
    GST_DEBUG_OBJECT (appsink, "we are flushing");
    g_mutex_unlock (&priv->mutex);
    return GST_FLOW_FLUSHING;
  }
stopping:
  {
    GST_DEBUG_OBJECT (appsink, "we are stopping");
    return ret;
  }
}
コード例 #7
0
static void * comp_proc(void * para)
{
    int i, j;
    int fd;
    int channel;
    int n_buffers = 0;
    unsigned int length, offset;
    int index, num;
    struct buffer_av buf_av;
    int result;
    struct pollfd pfd;
    unsigned int buf_index;
    unsigned int buf_num;
    char * packet_addr;
    struct packet_header pkt_header;
    Query_Buf_Res * buffers = NULL;				//用于保存获取视频数据
    Spct_Data comp_data;

    int got_video[CHS_PER_CARD];
    int got_audio[CHS_PER_CARD];
    struct timeval v_time_stamp[CHS_PER_CARD];
    struct timeval a_time_stamp[CHS_PER_CARD];
    unsigned int last_vtstamp[CHS_PER_CARD];
    unsigned int last_atstamp[CHS_PER_CARD];
    unsigned int duration;


    DebugPrintf("thread compress\n");

    Ip_Cam_Device * ipcam = (Ip_Cam_Device *) para;

    memset(&buf_av, 0, sizeof(buf_av));

    memset(&got_video, 0, sizeof(got_video));
    memset(&got_audio, 0, sizeof(got_audio));
    memset(&last_vtstamp, 0, sizeof(last_vtstamp));
    memset(&last_atstamp, 0, sizeof(last_atstamp));


    fd = ipcam->comp_fd;
    action_fd = fd;
    n_buffers = request_buffer(fd);

    buffers = calloc(n_buffers, sizeof(* buffers));
    if(!buffers)
            EXIT("Out of memory.");

    for(i = 0; i < n_buffers; ++i)
    {
        if(query_buffer(fd, i, &length, &offset) != 0)
                EXIT("VIDIOC_QUERYBUF");
        buffers[i].length = length;
        buffers[i].start = mmap(NULL, length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, offset);
        DebugPrintf("buffers[%d].start = 0x%x\n",i, buffers[i].start);
        if(MAP_FAILED == buffers[i].start)
                EXIT("mmap");
    }

    for(index = 0; index < n_buffers; index++)
            if(queue_buffer(fd, index) != 0)
                    EXIT("VIDIOC_QBUF");

    if(streaming_on(fd) != 0)
            EXIT("VIDIOC_STREAMON");
    DebugPrintf("card stream on================================\n");
    pfd.fd = fd;
    pfd.events = POLLIN;

    int comp_proc_count = 0;

    //set_action(fd, catch_sen); // fd threshold time
    /*检测ipcam是否处于运行状态*/
    while(ipcam->status == CAM_STATUS_RUN)
    {
        comp_proc_count++;
        /*定时打印信息*/
        if(comp_proc_count == 3000)
        {
            comp_proc_count = 0;
            PrintScreen("\n----- comp_proc thread running -----\n");
        }

        //IsSetaction(fd);
        /*一个结构体,函数阻塞时间为15s*/
        result = poll(&pfd, 1, 15000);

        /*函数调用失败*/
        if(result < 0)
            DebugPrintf("pool ing errro ==================\n");
        /*在规定时间内没有检测到可读套接字*/
        if(result == 0)
        {
            DebugPrintf("pool ing time out --------------\n");
            exit(1);
        }

        if(result < 0)
            continue;
        if(result == 0)
            continue;

        dequeue_buffer(fd, &buf_av);
        buf_index = buf_av.buf_index;
        buf_num = buf_av.length;
        for(i = 0; i < CHS_PER_CARD; i++)
        {
            index = buf_index & 0xff;
            num = buf_num & 0xff;

            buf_index >>= 8;
            buf_num >>= 8;
            if(index != 0xff)
            {
                /*DATA PACKET*/
                channel = i;

                for(j = 0, packet_addr = buffers[index].start; j < num; j++)
                {
                    /*s数据包处理,动态处理就在此实现*/
                    parse_packet(&packet_addr, &pkt_header);

                    //pkt_header.motion_addr, 4 * 9
                    /*捕捉到动作*/
                    if(pkt_header.motion_flag)
                    {
#if RELEASE_MODE
#else
                        PrintScreen("\n-------------receive a motion------------\n\n");
#endif
                        catchonemotion = 1;
                        is_action = 1;
                    }

                    if (pkt_header.videolost_flag)
                    {
                        //DebugPrintf("video lost\n");
                    }

                    if(pkt_header.audio_flag)
                    {//length
                        if(!got_audio[channel])
                        {
                            got_audio[channel] = 1;
                            //a_time_stamp[channel] = buf_av.timestamp;
                            a_time_stamp[channel].tv_usec = ((pkt_header.audio_timestamp % 32768) * 1000ULL * 1000ULL) >> 15;
                            a_time_stamp[channel].tv_sec = (pkt_header.audio_timestamp >> 15) + (a_time_stamp[channel].tv_usec / 1000000);
                            a_time_stamp[channel].tv_usec %= 1000000;
                        }
                        else
                        {
                            duration = pkt_header.audio_timestamp - last_atstamp[channel];
                            a_time_stamp[channel].tv_usec += ((duration % 32768) * 1000ULL * 1000ULL) >> 15;
                            a_time_stamp[channel].tv_sec += (duration >> 15) + (a_time_stamp[channel].tv_usec / 1000000);
                            a_time_stamp[channel].tv_usec %= 1000000;

                        }
                        last_atstamp[channel] = pkt_header.audio_timestamp;

                        //DebugPrintf("audio frame\n");
                        comp_data.channel = channel;
                        comp_data.type = DATA_AUDIO;
                        comp_data.flags = 0;
                        comp_data.timestamp = a_time_stamp[channel];
                        comp_data.size = pkt_header.audio_length;
                        comp_data.data = pkt_header.audio_addr;
                        ipcam->fun(ipcam->datahandler, &comp_data);
                    }

                    if(pkt_header.video_flag)
                    {
                        if(!got_video[channel])
                        {
                            got_video[channel] = 1;
                            //v_time_stamp[channel] = buf_av.timestamp;
                            v_time_stamp[channel].tv_usec = ((pkt_header.video_timestamp % 32768) * 1000ULL * 1000ULL) >> 15;
                            v_time_stamp[channel].tv_sec = (pkt_header.video_timestamp >> 15) + (v_time_stamp[channel].tv_usec / 1000000);
                            v_time_stamp[channel].tv_usec %= 1000000;
                        }
                        else
                        {
                            duration = pkt_header.video_timestamp - last_vtstamp[channel];
                            v_time_stamp[channel].tv_usec += ((duration % 32768) * 1000ULL * 1000ULL) >> 15;
                            v_time_stamp[channel].tv_sec += (duration >> 15) + (v_time_stamp[channel].tv_usec / 1000000);
                            v_time_stamp[channel].tv_usec %= 1000000;
                        }
                        last_vtstamp[channel] = pkt_header.video_timestamp;

                        comp_data.channel = channel;
                        comp_data.type = DATA_VIDEO;
                        comp_data.flags = pkt_header.video_type;
                        comp_data.timestamp = v_time_stamp[channel];
                        comp_data.size = pkt_header.video_length;
                        comp_data.data = pkt_header.video_addr;
                        ipcam->fun(ipcam->datahandler, &comp_data);
                    }
コード例 #8
0
ファイル: main.c プロジェクト: gerpayt/camshot
int main(int argc, char **argv)
{
    int shmid;
    int i,j;
    char in_str[16];

	process_arguments(argc, argv);

    /* open the camera device */
	if( (camera_fd = open(psz_video_dev, O_RDWR)) < 0 )
	{
        char error_buf[256];
        sprintf(error_buf, "open() %s", psz_video_dev);
		perror(error_buf);
		exit(-1);
	}

    get_caps();
    get_format();

	if( b_verbose ) printf("Device opened.\n");

	if( b_verbose )
	{
		printf("Video device:\t\t%s\n", psz_video_dev);
		print_caps();
        print_format();
		printf("Ouput directory:\t%s\n", psz_output_dir);
		printf("Image format:\t\t%s\n",str_formats[e_outfmt]);
		printf("\n");
		printf("Opening device %s\n", psz_video_dev);
        if( b_named_filename )
        {
            printf("Ouput filename:\t%s\n", psz_output_filename);
        }
        else if( b_named_pipe )
        {
            printf("Using named pipe %s\n", psz_named_pipe);
        }
        if( b_shared_mem )
            printf("Using shared memory. key = %i\n", shared_mem_key);
	}

    if( b_printinfo )
    {
        printf("Device info:\n");
        print_caps();
        print_format();
        close(camera_fd);
        exit(EXIT_SUCCESS);    
    }

    (void)signal(SIGINT, exit_program);

    if( b_shared_mem && b_named_pipe )
    {
        printf("WARNING: shared memory and named pipe can't be used together. Use more instances of camshot. Defaulting to named pipe.\n");
        b_shared_mem = 0;
    }

    if( b_named_pipe )
    {
        int ret_val = mkfifo(psz_named_pipe, 0666);

        if ((ret_val == -1) && (errno != EEXIST)) {
            perror("Error creating the named pipe");
            exit(EXIT_FAILURE);
        }
        
    }

    if( req_width && req_height )
    {
        if( b_verbose )
            printf("Trying to set resolution to %ux%u.\n", req_width, req_height);

        if( set_width_height(req_width,req_height) == -1 )
            printf("Unable to set the desired resolution.\n");
        else
            if( b_verbose )
                printf("Resolution set to %ux%u\n", req_width, req_height);
    } else {
        get_format();
        req_width = camera_format.fmt.pix.width;
        req_height = camera_format.fmt.pix.height;
    }

    if( b_shared_mem )
    {
        if((shmid = shmget(shared_mem_key, req_width*req_height*3, IPC_CREAT | 0666)) < 0) {
            perror("Error getting shared memory id");
            exit(EXIT_FAILURE);
        }

        if((p_shm = (uint8_t *)shmat(shmid, NULL, 0)) == (void *) -1) {
            perror("Error getting shared memory ptr");
            exit(EXIT_FAILURE);
        }       

        shm_sem = semget((key_t)shared_mem_key, 1, IPC_CREAT | 0666);

        sem_set(&shm_sem);

        if( b_verbose )
            printf("Shared memory ID: %i\nSemaphore ID: %i\n", shmid, shm_sem);
    }

	total_buffers = req_mmap_buffers(2);

	/* start the capture */
	streaming_on();

    /* let the camera self adjust by 'ignoring' 200 complete buffer queues */
    printf("Letting the camera automaticaly adjust the picture:");
    
    for(i=0; i<AUTO_ADJUST_TURNS; i++)
    {
        for(j=0; j<total_buffers; j++)
        {
            int ready_buf = dequeue_buffer();
            /* don't queue the last buffers */
            if( i<AUTO_ADJUST_TURNS-1 )
                queue_buffer(ready_buf);
        }

        printf(".");
        fflush(stdout);
    }

    printf("Done.\n");

    if( b_shared_mem || b_named_pipe )
    {
        pthread_create(&stream_thread, NULL, &stream_func, NULL);
        while(1)
        {
            pthread_join(stream_thread, NULL);
        }
    }
    else
    {
        pthread_create(&capture_thread, NULL, &capture_func, NULL);
    }

    if( b_named_filename )
    {
        usleep(200000);
        pthread_mutex_lock(&cond_mutex);
        pthread_cond_signal(&condition);
        pthread_mutex_unlock(&cond_mutex);
        usleep(200000);
        exit_program(SIGINT);
        fflush(stdout);
        return 0;
    }

    while( in_str[0] != 'q' )
    {
        
        printf("Command (h for help): ");
        fflush(stdout);

        if( fgets(in_str, 16, stdin) == NULL )
        {
            printf("Got NULL! Try again.\n");
            continue;
        }

        switch(in_str[0])
        {
            case 'x':
                pthread_mutex_lock(&cond_mutex);
                pthread_cond_signal(&condition);
                pthread_mutex_unlock(&cond_mutex);
                break;
            case 'h':
                printf("\nCommands:\n");
                printf("\tx\tCapture a picture from camera.\n");
                printf("\th\tPrints this help.\n");
                printf("\tq\tQuits the program.\n");
                printf("\n");
                break;
            case 'q':
            case '\n':
                break;
            default:
                fprintf(stderr, "Unknown command %c\n", in_str[0]);
                break;
        }
    }

	/* Clean up */
    exit_program(SIGINT);
	return 0;
}
コード例 #9
0
ファイル: main.c プロジェクト: gerpayt/camshot
void *stream_func(void *ptr)
{
    unsigned char *rgb_buffer;
    int ready_buf;
    char cur_name[64];
    int i;

    if( b_shared_mem )
    {
        rgb_buffer = p_shm;
    }
    else
    {
        rgb_buffer = (unsigned char *)malloc(req_width*req_height*3);
    }

    for(i=0; i<total_buffers; i++)
    {
        queue_buffer(i);
    }

    for(;;)
    {
        /* get the idx of ready buffer */
        for(i=0; i<total_buffers; i++)
        {
            /* Check if the thread should stop. */
            if( stream_finish ) return NULL;

		    ready_buf = dequeue_buffer();

            if( b_verbose )
            {
                printf("Buffer %d ready. Length: %uB\n", ready_buf, image_buffers[ready_buf].length);
            }

            switch( check_pixelformat() )
            {
                case V4L2_PIX_FMT_YUYV:
                    /* convert data to rgb */
                    if( b_shared_mem )
                        sem_down(&shm_sem);
                    if( convert_yuv_to_rgb_buffer(
                                        (unsigned char *)(image_buffers[ready_buf].start), 
                                        rgb_buffer, req_width, req_height) == 0 )
                    {
                        if( b_verbose )
                        {
                            printf("\tConverted to rgb.\n");
                        }
                    }
                    if( b_shared_mem )
                        sem_up(&shm_sem);
                    break;
                default:
                    print_pixelformat(stderr);
                    fprintf(stderr,"\n");
                    return NULL;
            }

            /* make the image */

            /* create the file name */
            if( b_named_pipe )
                sprintf(cur_name, "%s", psz_named_pipe);

            switch( e_outfmt )
            {
                case FORMAT_BMP:
                    if( b_shared_mem )
                    {
                        printf("Unsupported!\n");
                        break;
                    }
                    make_bmp(rgb_buffer, 
                             cur_name, 
                             req_width, 
                             req_height);
                    break;
                case FORMAT_RGB:
                    if( b_shared_mem )
                    {
                        /* The buffer is already rgb :) */
                        break;
                    }
                    make_rgb(rgb_buffer, 
                             cur_name, 
                             req_width, 
                             req_height);
                    break;
                default:
                    fprintf(stderr, "Not supported format requested!\n");
                    break;
            }   
            
            queue_buffer(ready_buf);
        }        
    }

    return NULL;
}
コード例 #10
0
ファイル: main.c プロジェクト: gerpayt/camshot
void *capture_func(void *ptr)
{
    unsigned char *rgb_buffer;
    int ready_buf;
    char cur_name[64];
    int i;
    struct timeval timestamp;

    if( b_shared_mem )
    {
        rgb_buffer = p_shm;
    }
    else
    {
        rgb_buffer = (unsigned char *)malloc(req_width*req_height*3);
    }

    for(;;)
    {
        /* Wait for the start condition */
        pthread_mutex_lock(&cond_mutex);
        pthread_cond_wait(&condition, &cond_mutex);
        pthread_mutex_unlock(&cond_mutex);

        /* queue one buffer and 'refresh it' */
        /* @todo Change 2 to some #define */
        for(i=0; i<2; i++)
        {
            queue_buffer(i);
        }
        for(i=0; i<2 - 1; i++)
        {
            dequeue_buffer();
        }

        /* get the idx of ready buffer */
		ready_buf = dequeue_buffer();

        if( b_verbose )
        {
            printf("Buffer %d ready. Length: %uB\n", ready_buf, image_buffers[ready_buf].length);
        }

        switch( check_pixelformat() )
        {
            case V4L2_PIX_FMT_YUYV:
                /* convert data to rgb */
                if( convert_yuv_to_rgb_buffer(
                                    (unsigned char *)(image_buffers[ready_buf].start), 
                                    rgb_buffer, req_width, req_height) == 0 )
                {
                    if( b_verbose )
                    {
                        printf("\tConverted to rgb.\n");
                    }
                }
                break;
            default:
                print_pixelformat(stderr);
                fprintf(stderr,"\n");
                return NULL;
        }

        timestamp = query_buffer(0);

        /* make the image */

        /* create the file name */
        if( b_named_filename )
        {
            sprintf(cur_name, "%s%s", psz_output_dir, psz_output_filename);
        }
        else if( !b_named_pipe )
            sprintf(cur_name, "%scamshot_%lu.bmp", psz_output_dir, timestamp.tv_sec);
        else
            sprintf(cur_name, "%s", psz_named_pipe);

        switch( e_outfmt )
        {
            case FORMAT_BMP:
                make_bmp(rgb_buffer, 
                         cur_name, 
                         req_width, 
                         req_height);
                break;
            case FORMAT_RGB:
                make_rgb(rgb_buffer, 
                         cur_name, 
                         req_width, 
                         req_height);
                break;
            default:
                fprintf(stderr, "Not supported format requested!\n");
                break;
        }   
    }

    return NULL;
}
コード例 #11
0
void MjpegCamera::discard_frame( ) {
    size_t bytesused; /* dummy here */
    int bufno = dequeue_buffer(bytesused);
    queue_buffer(bufno);
}
コード例 #12
0
void MjpegCamera::get_frame_to(MjpegFrame *target) {
    size_t bytesused;
    int bufno = dequeue_buffer(bytesused);
    target->assign(buffers[bufno].start, bytesused);
    queue_buffer(bufno);
}