Beispiel #1
0
void queue_raw_data(TCPsocket sock, const char* buf, int len)
{
	buffer* queued_buf = new buffer(sock);
	assert(*buf == 31);
	make_network_buffer(buf, len, queued_buf->raw_buffer);
	queue_buffer(sock, queued_buf);
}
Beispiel #2
0
void init_device()
{
	set_format();
	request_buffer();
	query_buf_and_mmap();
	queue_buffer();
}
MjpegFrame *MjpegCamera::get_frame( ) {
	size_t bytesused;
	int bufno = dequeue_buffer(bytesused);
	MjpegFrame *ret = new MjpegFrame(buffers[bufno].start, bytesused);
	queue_buffer(bufno);
	return ret;
}
Beispiel #4
0
void v4l2Capture::queue_all(void)
{
  //    fprintf(stderr, "v4l2Capture::queue_all()\n");
    for (;;)
    {
	if (queue - waiton >= reqbufs.count)
	    return;
	if (0 != queue_buffer())
	    return;
    }
}
Beispiel #5
0
size_t queue_data(TCPsocket sock,const config& buf, const std::string& packet_type)
{
	DBG_NW << "queuing data...\n";

	buffer* queued_buf = new buffer(sock);
	output_to_buffer(sock, buf, queued_buf->stream);
	const size_t size = queued_buf->stream.str().size();

	network::add_bandwidth_out(packet_type, size);
	queue_buffer(sock, queued_buf);
	return size;
}
Beispiel #6
0
static int cli_client_send(struct cli_client_t *tcln, const void *_buf, int size)
{
	struct tcp_client_t *cln = container_of(tcln, typeof(*cln), cli_client);
	int n, k;
	struct buffer_t *b;
	const uint8_t *buf = (const uint8_t *)_buf;

	if (cln->disconnect)
		return -1;

	if (cln->xmit_buf) {
		b = _malloc(sizeof(*b) + size);
		b->size = size;
		memcpy(b->buf, buf, size);
		queue_buffer(cln, b);
		return 0;
	}

	for (n = 0; n < size; n += k) {
		k = write(cln->hnd.fd, buf + n, size - n);
		if (k < 0) {
			if (errno == EAGAIN) {
				b = _malloc(sizeof(*b) + size - n);
				b->size = size - n;
				memcpy(b->buf, buf + n, size - n);
				queue_buffer(cln, b);

				triton_md_enable_handler(&cln->hnd, MD_MODE_WRITE);
				break;
			}
			if (errno != EPIPE)
				log_error("cli: write: %s\n", strerror(errno));
			//disconnect(cln);
			cln->disconnect = 1;
			return -1;
		}
	}
	return 0;
}
Beispiel #7
0
XCamReturn
V4l2Device::start ()
{
    XCamReturn ret = XCAM_RETURN_NO_ERROR;
    // request buffer first
    ret = request_buffer ();
    XCAM_FAIL_RETURN (
        ERROR, ret == XCAM_RETURN_NO_ERROR, ret,
        "device(%s) start failed", XCAM_STR (_name));

    //alloc buffers
    ret = init_buffer_pool ();
    XCAM_FAIL_RETURN (
        ERROR, ret == XCAM_RETURN_NO_ERROR, ret,
        "device(%s) start failed", XCAM_STR (_name));

    //queue all buffers
    for (uint32_t i = 0; i < _buf_count; ++i) {
        SmartPtr<V4l2Buffer> &buf = _buf_pool [i];
        XCAM_ASSERT (buf.ptr());
        XCAM_ASSERT (buf->get_buf().index == i);
        ret = queue_buffer (buf);
        if (ret != XCAM_RETURN_NO_ERROR) {
            XCAM_LOG_ERROR (
                "device(%s) start failed on queue index:%d",
                XCAM_STR (_name), i);
            stop ();
            return ret;
        }
    }

    // stream on
    if (io_control (VIDIOC_STREAMON, &_capture_buf_type) < 0) {
        XCAM_LOG_ERROR (
            "device(%s) start failed on VIDIOC_STREAMON",
            XCAM_STR (_name));
        stop ();
        return XCAM_RETURN_ERROR_IOCTL;
    }
    _active = true;
    XCAM_LOG_INFO ("device(%s) started successfully", XCAM_STR (_name));
    return XCAM_RETURN_NO_ERROR;
}
Beispiel #8
0
void V4L2UpscaledInputAdapter::run_thread( ) {
    int bufno;
    size_t bufsize;
    RawFrame *out;

    for (;;) {
        bufno = dequeue_buffer(bufsize);
        
        out = new RawFrame(1920, 1080, RawFrame::CbYCrY8422);
        if (out_pipe.can_put( )) {
            do_upscale(out, (uint8_t *) buffers[bufno].start); 
            out_pipe.put(out);
        } else {
            fprintf(stderr, "V4L2 in: dropping input frame on floor\n");
            delete out;
        }

        queue_buffer(bufno);

    }
}
Beispiel #9
0
int main(int argc, char **argv)
{
    int shmid;
    int i,j;
    char in_str[16];

	process_arguments(argc, argv);

    /* open the camera device */
	if( (camera_fd = open(psz_video_dev, O_RDWR)) < 0 )
	{
        char error_buf[256];
        sprintf(error_buf, "open() %s", psz_video_dev);
		perror(error_buf);
		exit(-1);
	}

    get_caps();
    get_format();

	if( b_verbose ) printf("Device opened.\n");

	if( b_verbose )
	{
		printf("Video device:\t\t%s\n", psz_video_dev);
		print_caps();
        print_format();
		printf("Ouput directory:\t%s\n", psz_output_dir);
		printf("Image format:\t\t%s\n",str_formats[e_outfmt]);
		printf("\n");
		printf("Opening device %s\n", psz_video_dev);
        if( b_named_filename )
        {
            printf("Ouput filename:\t%s\n", psz_output_filename);
        }
        else if( b_named_pipe )
        {
            printf("Using named pipe %s\n", psz_named_pipe);
        }
        if( b_shared_mem )
            printf("Using shared memory. key = %i\n", shared_mem_key);
	}

    if( b_printinfo )
    {
        printf("Device info:\n");
        print_caps();
        print_format();
        close(camera_fd);
        exit(EXIT_SUCCESS);    
    }

    (void)signal(SIGINT, exit_program);

    if( b_shared_mem && b_named_pipe )
    {
        printf("WARNING: shared memory and named pipe can't be used together. Use more instances of camshot. Defaulting to named pipe.\n");
        b_shared_mem = 0;
    }

    if( b_named_pipe )
    {
        int ret_val = mkfifo(psz_named_pipe, 0666);

        if ((ret_val == -1) && (errno != EEXIST)) {
            perror("Error creating the named pipe");
            exit(EXIT_FAILURE);
        }
        
    }

    if( req_width && req_height )
    {
        if( b_verbose )
            printf("Trying to set resolution to %ux%u.\n", req_width, req_height);

        if( set_width_height(req_width,req_height) == -1 )
            printf("Unable to set the desired resolution.\n");
        else
            if( b_verbose )
                printf("Resolution set to %ux%u\n", req_width, req_height);
    } else {
        get_format();
        req_width = camera_format.fmt.pix.width;
        req_height = camera_format.fmt.pix.height;
    }

    if( b_shared_mem )
    {
        if((shmid = shmget(shared_mem_key, req_width*req_height*3, IPC_CREAT | 0666)) < 0) {
            perror("Error getting shared memory id");
            exit(EXIT_FAILURE);
        }

        if((p_shm = (uint8_t *)shmat(shmid, NULL, 0)) == (void *) -1) {
            perror("Error getting shared memory ptr");
            exit(EXIT_FAILURE);
        }       

        shm_sem = semget((key_t)shared_mem_key, 1, IPC_CREAT | 0666);

        sem_set(&shm_sem);

        if( b_verbose )
            printf("Shared memory ID: %i\nSemaphore ID: %i\n", shmid, shm_sem);
    }

	total_buffers = req_mmap_buffers(2);

	/* start the capture */
	streaming_on();

    /* let the camera self adjust by 'ignoring' 200 complete buffer queues */
    printf("Letting the camera automaticaly adjust the picture:");
    
    for(i=0; i<AUTO_ADJUST_TURNS; i++)
    {
        for(j=0; j<total_buffers; j++)
        {
            int ready_buf = dequeue_buffer();
            /* don't queue the last buffers */
            if( i<AUTO_ADJUST_TURNS-1 )
                queue_buffer(ready_buf);
        }

        printf(".");
        fflush(stdout);
    }

    printf("Done.\n");

    if( b_shared_mem || b_named_pipe )
    {
        pthread_create(&stream_thread, NULL, &stream_func, NULL);
        while(1)
        {
            pthread_join(stream_thread, NULL);
        }
    }
    else
    {
        pthread_create(&capture_thread, NULL, &capture_func, NULL);
    }

    if( b_named_filename )
    {
        usleep(200000);
        pthread_mutex_lock(&cond_mutex);
        pthread_cond_signal(&condition);
        pthread_mutex_unlock(&cond_mutex);
        usleep(200000);
        exit_program(SIGINT);
        fflush(stdout);
        return 0;
    }

    while( in_str[0] != 'q' )
    {
        
        printf("Command (h for help): ");
        fflush(stdout);

        if( fgets(in_str, 16, stdin) == NULL )
        {
            printf("Got NULL! Try again.\n");
            continue;
        }

        switch(in_str[0])
        {
            case 'x':
                pthread_mutex_lock(&cond_mutex);
                pthread_cond_signal(&condition);
                pthread_mutex_unlock(&cond_mutex);
                break;
            case 'h':
                printf("\nCommands:\n");
                printf("\tx\tCapture a picture from camera.\n");
                printf("\th\tPrints this help.\n");
                printf("\tq\tQuits the program.\n");
                printf("\n");
                break;
            case 'q':
            case '\n':
                break;
            default:
                fprintf(stderr, "Unknown command %c\n", in_str[0]);
                break;
        }
    }

	/* Clean up */
    exit_program(SIGINT);
	return 0;
}
Beispiel #10
0
void MjpegCamera::get_frame_to(MjpegFrame *target) {
    size_t bytesused;
    int bufno = dequeue_buffer(bytesused);
    target->assign(buffers[bufno].start, bytesused);
    queue_buffer(bufno);
}
Beispiel #11
0
void queue_file(TCPsocket sock, const std::string& filename)
{
 	buffer* queued_buf = new buffer(sock);
 	queued_buf->config_error = filename;
 	queue_buffer(sock, queued_buf);
}
Beispiel #12
0
void MjpegCamera::discard_frame( ) {
    size_t bytesused; /* dummy here */
    int bufno = dequeue_buffer(bytesused);
    queue_buffer(bufno);
}
static void * comp_proc(void * para)
{
    int i, j;
    int fd;
    int channel;
    int n_buffers = 0;
    unsigned int length, offset;
    int index, num;
    struct buffer_av buf_av;
    int result;
    struct pollfd pfd;
    unsigned int buf_index;
    unsigned int buf_num;
    char * packet_addr;
    struct packet_header pkt_header;
    Query_Buf_Res * buffers = NULL;				//用于保存获取视频数据
    Spct_Data comp_data;

    int got_video[CHS_PER_CARD];
    int got_audio[CHS_PER_CARD];
    struct timeval v_time_stamp[CHS_PER_CARD];
    struct timeval a_time_stamp[CHS_PER_CARD];
    unsigned int last_vtstamp[CHS_PER_CARD];
    unsigned int last_atstamp[CHS_PER_CARD];
    unsigned int duration;


    DebugPrintf("thread compress\n");

    Ip_Cam_Device * ipcam = (Ip_Cam_Device *) para;

    memset(&buf_av, 0, sizeof(buf_av));

    memset(&got_video, 0, sizeof(got_video));
    memset(&got_audio, 0, sizeof(got_audio));
    memset(&last_vtstamp, 0, sizeof(last_vtstamp));
    memset(&last_atstamp, 0, sizeof(last_atstamp));


    fd = ipcam->comp_fd;
    action_fd = fd;
    n_buffers = request_buffer(fd);

    buffers = calloc(n_buffers, sizeof(* buffers));
    if(!buffers)
            EXIT("Out of memory.");

    for(i = 0; i < n_buffers; ++i)
    {
        if(query_buffer(fd, i, &length, &offset) != 0)
                EXIT("VIDIOC_QUERYBUF");
        buffers[i].length = length;
        buffers[i].start = mmap(NULL, length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, offset);
        DebugPrintf("buffers[%d].start = 0x%x\n",i, buffers[i].start);
        if(MAP_FAILED == buffers[i].start)
                EXIT("mmap");
    }

    for(index = 0; index < n_buffers; index++)
            if(queue_buffer(fd, index) != 0)
                    EXIT("VIDIOC_QBUF");

    if(streaming_on(fd) != 0)
            EXIT("VIDIOC_STREAMON");
    DebugPrintf("card stream on================================\n");
    pfd.fd = fd;
    pfd.events = POLLIN;

    int comp_proc_count = 0;

    //set_action(fd, catch_sen); // fd threshold time
    /*检测ipcam是否处于运行状态*/
    while(ipcam->status == CAM_STATUS_RUN)
    {
        comp_proc_count++;
        /*定时打印信息*/
        if(comp_proc_count == 3000)
        {
            comp_proc_count = 0;
            PrintScreen("\n----- comp_proc thread running -----\n");
        }

        //IsSetaction(fd);
        /*一个结构体,函数阻塞时间为15s*/
        result = poll(&pfd, 1, 15000);

        /*函数调用失败*/
        if(result < 0)
            DebugPrintf("pool ing errro ==================\n");
        /*在规定时间内没有检测到可读套接字*/
        if(result == 0)
        {
            DebugPrintf("pool ing time out --------------\n");
            exit(1);
        }

        if(result < 0)
            continue;
        if(result == 0)
            continue;

        dequeue_buffer(fd, &buf_av);
        buf_index = buf_av.buf_index;
        buf_num = buf_av.length;
        for(i = 0; i < CHS_PER_CARD; i++)
        {
            index = buf_index & 0xff;
            num = buf_num & 0xff;

            buf_index >>= 8;
            buf_num >>= 8;
            if(index != 0xff)
            {
                /*DATA PACKET*/
                channel = i;

                for(j = 0, packet_addr = buffers[index].start; j < num; j++)
                {
                    /*s数据包处理,动态处理就在此实现*/
                    parse_packet(&packet_addr, &pkt_header);

                    //pkt_header.motion_addr, 4 * 9
                    /*捕捉到动作*/
                    if(pkt_header.motion_flag)
                    {
#if RELEASE_MODE
#else
                        PrintScreen("\n-------------receive a motion------------\n\n");
#endif
                        catchonemotion = 1;
                        is_action = 1;
                    }

                    if (pkt_header.videolost_flag)
                    {
                        //DebugPrintf("video lost\n");
                    }

                    if(pkt_header.audio_flag)
                    {//length
                        if(!got_audio[channel])
                        {
                            got_audio[channel] = 1;
                            //a_time_stamp[channel] = buf_av.timestamp;
                            a_time_stamp[channel].tv_usec = ((pkt_header.audio_timestamp % 32768) * 1000ULL * 1000ULL) >> 15;
                            a_time_stamp[channel].tv_sec = (pkt_header.audio_timestamp >> 15) + (a_time_stamp[channel].tv_usec / 1000000);
                            a_time_stamp[channel].tv_usec %= 1000000;
                        }
                        else
                        {
                            duration = pkt_header.audio_timestamp - last_atstamp[channel];
                            a_time_stamp[channel].tv_usec += ((duration % 32768) * 1000ULL * 1000ULL) >> 15;
                            a_time_stamp[channel].tv_sec += (duration >> 15) + (a_time_stamp[channel].tv_usec / 1000000);
                            a_time_stamp[channel].tv_usec %= 1000000;

                        }
                        last_atstamp[channel] = pkt_header.audio_timestamp;

                        //DebugPrintf("audio frame\n");
                        comp_data.channel = channel;
                        comp_data.type = DATA_AUDIO;
                        comp_data.flags = 0;
                        comp_data.timestamp = a_time_stamp[channel];
                        comp_data.size = pkt_header.audio_length;
                        comp_data.data = pkt_header.audio_addr;
                        ipcam->fun(ipcam->datahandler, &comp_data);
                    }

                    if(pkt_header.video_flag)
                    {
                        if(!got_video[channel])
                        {
                            got_video[channel] = 1;
                            //v_time_stamp[channel] = buf_av.timestamp;
                            v_time_stamp[channel].tv_usec = ((pkt_header.video_timestamp % 32768) * 1000ULL * 1000ULL) >> 15;
                            v_time_stamp[channel].tv_sec = (pkt_header.video_timestamp >> 15) + (v_time_stamp[channel].tv_usec / 1000000);
                            v_time_stamp[channel].tv_usec %= 1000000;
                        }
                        else
                        {
                            duration = pkt_header.video_timestamp - last_vtstamp[channel];
                            v_time_stamp[channel].tv_usec += ((duration % 32768) * 1000ULL * 1000ULL) >> 15;
                            v_time_stamp[channel].tv_sec += (duration >> 15) + (v_time_stamp[channel].tv_usec / 1000000);
                            v_time_stamp[channel].tv_usec %= 1000000;
                        }
                        last_vtstamp[channel] = pkt_header.video_timestamp;

                        comp_data.channel = channel;
                        comp_data.type = DATA_VIDEO;
                        comp_data.flags = pkt_header.video_type;
                        comp_data.timestamp = v_time_stamp[channel];
                        comp_data.size = pkt_header.video_length;
                        comp_data.data = pkt_header.video_addr;
                        ipcam->fun(ipcam->datahandler, &comp_data);
                    }
Beispiel #14
0
static void
mainloop(void)
{
  unsigned int count;
  int buf_idx[MAX_DEVICES];

  count = 100;

  while (count-- > 0) {
    for (;;) {
      fd_set fds;
      struct timeval tv;
      int r;

      int i;
      for (i = 0; i < n_devices; ++i)
      {
        FD_ZERO(&fds);
        FD_SET(devices[i].fd, &fds);

        /* Timeout. */
        tv.tv_sec = 2;
        tv.tv_usec = 0;

        r = select(devices[i].fd + 1, &fds, NULL, NULL, &tv);

        if (-1 == r) {
          if (EINTR == errno)
            continue;

          errno_exit("select");
        }

        if (0 == r) {
          fprintf(stderr, "select timeout\n");
          exit (EXIT_FAILURE);
        }
        //gettimeofday(&tv, NULL);
        //printf("buffer ready %s %ld.%03ld\n", devices[i].dev_name, tv.tv_sec, tv.tv_usec/1000);
      }

      struct thr_data tdata[MAX_DEVICES];
      for (i = 0; i < n_devices; ++i)
      {
        //buf_idx[i] = read_frame(&devices[i]);
        buf_idx[i] = -1;
        tdata[i].dev = &devices[i];
        pthread_create(&(tdata[i].thr), NULL, read_frame_thr, &tdata[i]);
      }

      for (i = 0; i < n_devices; ++i)
      {
        pthread_join(tdata[i].thr, NULL);
        buf_idx[i] = tdata[i].rval;
      }

      for (i = 0; i < n_devices; ++i)
      {
        if (buf_idx[i] >= 0)
        {
          queue_buffer(&devices[i], buf_idx[i]);
        }
      }

      /* EAGAIN - continue select loop. */
    }
  }
}
Beispiel #15
0
int main (int argc,char *argv[])
{
  int videofd1 = open("/dev/video0",O_RDWR);
  int videofd2 = open("/dev/video0",O_RDWR);
  IDirectFBSurface     *SurfaceHandle;
  IDirectFBSurface     *SurfaceHandle2;
  void *ptr,*ptr2;
  int pitch,pitch2;
  int colour_palette[256];
  int colour_palette2_real[256]; /* random... */
  int *colour_palette2 = colour_palette2_real;
  int is_lut8;

  is_lut8 = (argc > 1 && !strcmp (argv[1], "both"));

  memset(&colour_palette,0x0,4*256);
  //memset(&colour_palette[1],0xff,255*4);

  colour_palette[0] = 0xff000000; // black
  colour_palette[1] = 0xffff0000; // red
  colour_palette[2] = 0xff00ff00; // green
  colour_palette[3] = 0xff0000ff; // blue
  colour_palette[4] = 0xffffffff; // white
  colour_palette[5] = 0x80808000; // half-transp yellow
  colour_palette[6] = 0x00000000; // transp black

  if (videofd1 < 0)
    perror("Couldn't open video device 1\n");

  if (videofd2 < 0)
    perror("Couldn't open video device 2\n");
  
  v4l2_list_outputs (videofd1);

  fb_make_transparent();

  init_dfb(&SurfaceHandle,is_lut8);
  init_dfb(&SurfaceHandle2,1);

  v4l2_set_output_by_name (videofd1, "RGB1");
  v4l2_set_output_by_name (videofd2, "RGB2");
  init_v4l(videofd1,0,0,1280,720,is_lut8);
  init_v4l(videofd2,0,0,1280,720,1);

  printf("%s:%d\n",__FUNCTION__,__LINE__);


//  memcpy (colour_palette, colour_palette2, sizeof (colour_palette));
  colour_palette2 = colour_palette;

  {
    int coords = 60;
    int size   = 100;

    // clear
    if (!is_lut8) SurfaceHandle->Clear (SurfaceHandle, 0x00, 0x00, 0x00, 0x00);
    else {
      SurfaceHandle->SetColorIndex (SurfaceHandle, 0x6);
      SurfaceHandle->FillRectangle (SurfaceHandle, 0, 0, 600, 600);
    }
    // White
    if (is_lut8) SurfaceHandle->SetColorIndex (SurfaceHandle, 0x4);
    else SurfaceHandle->SetColor (SurfaceHandle, 0xff, 0xff, 0xff, 0xff);
    SurfaceHandle->FillRectangle (SurfaceHandle, coords, coords, size, size);
    coords += size;
    // Red
    if (is_lut8) SurfaceHandle->SetColorIndex (SurfaceHandle, 0x1);
    else SurfaceHandle->SetColor (SurfaceHandle, 0xff, 0x00, 0x00, 0xff);
    SurfaceHandle->FillRectangle (SurfaceHandle, coords, coords, size, size);
    coords += size;
    // Green
    if (is_lut8) SurfaceHandle->SetColorIndex (SurfaceHandle, 0x2);
    else SurfaceHandle->SetColor (SurfaceHandle, 0x00, 0xff, 0x00, 0xff);
    SurfaceHandle->FillRectangle (SurfaceHandle, coords, coords, size, size);
    coords += size;
    // Blue
    if (is_lut8) SurfaceHandle->SetColorIndex (SurfaceHandle, 0x3);
    else SurfaceHandle->SetColor (SurfaceHandle, 0x00, 0x00, 0xff, 0xff);
    SurfaceHandle->FillRectangle (SurfaceHandle, coords, coords, size, size);
    coords += size;
    // half transp yellow
    if (is_lut8) SurfaceHandle->SetColorIndex (SurfaceHandle, 0x5);
    else SurfaceHandle->SetColor (SurfaceHandle, 0x80, 0x80, 0x00, 0x80);
    SurfaceHandle->FillRectangle (SurfaceHandle, coords, coords, size, size);
    coords += size;
    // transp
    if (is_lut8) SurfaceHandle->SetColorIndex (SurfaceHandle, 0x6);
    else SurfaceHandle->SetColor (SurfaceHandle, 0x00, 0x00, 0x00, 0x00);
    SurfaceHandle->FillRectangle (SurfaceHandle, coords, coords, size, size);
    coords += size;
  }

  {
    int xcoords = 60 + (100 * 6), ycoords = 60;
    int size   = 100;

    // clear
    SurfaceHandle2->SetColorIndex (SurfaceHandle2, 0x6);
    SurfaceHandle2->FillRectangle (SurfaceHandle2, 220, 220, 440, 440);
    // transp
    SurfaceHandle2->SetColorIndex (SurfaceHandle2, 0x6);
    SurfaceHandle2->FillRectangle (SurfaceHandle2, xcoords, ycoords, size, size);
    xcoords -= size; ycoords += size;
    // half transp yellow
    SurfaceHandle2->SetColorIndex (SurfaceHandle2, 0x5);
    SurfaceHandle2->FillRectangle (SurfaceHandle2, xcoords, ycoords, size, size);
    xcoords -= size; ycoords += size;
    // Blue
    SurfaceHandle2->SetColorIndex (SurfaceHandle2, 0x3);
    SurfaceHandle2->FillRectangle (SurfaceHandle2, xcoords, ycoords, size, size);
    xcoords -= size; ycoords += size;
    // Green
    SurfaceHandle2->SetColorIndex (SurfaceHandle2, 0x2);
    SurfaceHandle2->FillRectangle (SurfaceHandle2, xcoords, ycoords, size, size);
    xcoords -= size; ycoords += size;
    // Red
    SurfaceHandle2->SetColorIndex (SurfaceHandle2, 0x1);
    SurfaceHandle2->FillRectangle (SurfaceHandle2, xcoords, ycoords, size, size);
    xcoords -= size; ycoords += size;
    // White
    SurfaceHandle2->SetColorIndex (SurfaceHandle2, 0x4);
    SurfaceHandle2->FillRectangle (SurfaceHandle2, xcoords, ycoords, size, size);
    xcoords -= size; ycoords += size;
  }
    
  DFBCHECK (SurfaceHandle->Lock (SurfaceHandle, DSLF_READ, &ptr, &pitch));
  DFBCHECK (SurfaceHandle2->Lock (SurfaceHandle2, DSLF_READ, &ptr2, &pitch2));

  printf("%s:%d\n",__FUNCTION__,__LINE__);

  zorder(videofd2,V4L2_CID_STM_Z_ORDER_RGB2,1);
  zorder(videofd1,V4L2_CID_STM_Z_ORDER_RGB1,2);

  queue_buffer( videofd1, ptr, (is_lut8 ? colour_palette : 0), 0ULL);

  stream_on( videofd1 );

  getchar();
  queue_buffer( videofd2, ptr2, colour_palette2, 0ULL); 

  printf("%s:%d\n",__FUNCTION__,__LINE__);

  stream_on( videofd2 );

  printf("%s:%d\n",__FUNCTION__,__LINE__);


  getchar();

  zorder(videofd2,V4L2_CID_STM_Z_ORDER_RGB2,2);
  zorder(videofd1,V4L2_CID_STM_Z_ORDER_RGB1,1);

  fprintf(stderr,"Press Return/Enter to quit\n");
  getchar();

  // This seems to dequeue all buffers too, but that's what the spec says
  // so this is ok.
  stream_off( videofd1 );
  stream_off( videofd2 );

  deinit_dfb();
  close(videofd1);  

  return 0;
}
Beispiel #16
0
void V4L2UpscaledInputAdapter::queue_all_buffers( ) {
    int i;
    for (i = 0; i < n_buffers; i++) {
        queue_buffer(i);
    }
}
Beispiel #17
0
void MjpegCamera::queue_all_buffers( ) {
	int i;
	for (i = 0; i < n_buffers; i++) {
		queue_buffer(i);
	}
}
Beispiel #18
0
void *stream_func(void *ptr)
{
    unsigned char *rgb_buffer;
    int ready_buf;
    char cur_name[64];
    int i;

    if( b_shared_mem )
    {
        rgb_buffer = p_shm;
    }
    else
    {
        rgb_buffer = (unsigned char *)malloc(req_width*req_height*3);
    }

    for(i=0; i<total_buffers; i++)
    {
        queue_buffer(i);
    }

    for(;;)
    {
        /* get the idx of ready buffer */
        for(i=0; i<total_buffers; i++)
        {
            /* Check if the thread should stop. */
            if( stream_finish ) return NULL;

		    ready_buf = dequeue_buffer();

            if( b_verbose )
            {
                printf("Buffer %d ready. Length: %uB\n", ready_buf, image_buffers[ready_buf].length);
            }

            switch( check_pixelformat() )
            {
                case V4L2_PIX_FMT_YUYV:
                    /* convert data to rgb */
                    if( b_shared_mem )
                        sem_down(&shm_sem);
                    if( convert_yuv_to_rgb_buffer(
                                        (unsigned char *)(image_buffers[ready_buf].start), 
                                        rgb_buffer, req_width, req_height) == 0 )
                    {
                        if( b_verbose )
                        {
                            printf("\tConverted to rgb.\n");
                        }
                    }
                    if( b_shared_mem )
                        sem_up(&shm_sem);
                    break;
                default:
                    print_pixelformat(stderr);
                    fprintf(stderr,"\n");
                    return NULL;
            }

            /* make the image */

            /* create the file name */
            if( b_named_pipe )
                sprintf(cur_name, "%s", psz_named_pipe);

            switch( e_outfmt )
            {
                case FORMAT_BMP:
                    if( b_shared_mem )
                    {
                        printf("Unsupported!\n");
                        break;
                    }
                    make_bmp(rgb_buffer, 
                             cur_name, 
                             req_width, 
                             req_height);
                    break;
                case FORMAT_RGB:
                    if( b_shared_mem )
                    {
                        /* The buffer is already rgb :) */
                        break;
                    }
                    make_rgb(rgb_buffer, 
                             cur_name, 
                             req_width, 
                             req_height);
                    break;
                default:
                    fprintf(stderr, "Not supported format requested!\n");
                    break;
            }   
            
            queue_buffer(ready_buf);
        }        
    }

    return NULL;
}
Beispiel #19
0
void *capture_func(void *ptr)
{
    unsigned char *rgb_buffer;
    int ready_buf;
    char cur_name[64];
    int i;
    struct timeval timestamp;

    if( b_shared_mem )
    {
        rgb_buffer = p_shm;
    }
    else
    {
        rgb_buffer = (unsigned char *)malloc(req_width*req_height*3);
    }

    for(;;)
    {
        /* Wait for the start condition */
        pthread_mutex_lock(&cond_mutex);
        pthread_cond_wait(&condition, &cond_mutex);
        pthread_mutex_unlock(&cond_mutex);

        /* queue one buffer and 'refresh it' */
        /* @todo Change 2 to some #define */
        for(i=0; i<2; i++)
        {
            queue_buffer(i);
        }
        for(i=0; i<2 - 1; i++)
        {
            dequeue_buffer();
        }

        /* get the idx of ready buffer */
		ready_buf = dequeue_buffer();

        if( b_verbose )
        {
            printf("Buffer %d ready. Length: %uB\n", ready_buf, image_buffers[ready_buf].length);
        }

        switch( check_pixelformat() )
        {
            case V4L2_PIX_FMT_YUYV:
                /* convert data to rgb */
                if( convert_yuv_to_rgb_buffer(
                                    (unsigned char *)(image_buffers[ready_buf].start), 
                                    rgb_buffer, req_width, req_height) == 0 )
                {
                    if( b_verbose )
                    {
                        printf("\tConverted to rgb.\n");
                    }
                }
                break;
            default:
                print_pixelformat(stderr);
                fprintf(stderr,"\n");
                return NULL;
        }

        timestamp = query_buffer(0);

        /* make the image */

        /* create the file name */
        if( b_named_filename )
        {
            sprintf(cur_name, "%s%s", psz_output_dir, psz_output_filename);
        }
        else if( !b_named_pipe )
            sprintf(cur_name, "%scamshot_%lu.bmp", psz_output_dir, timestamp.tv_sec);
        else
            sprintf(cur_name, "%s", psz_named_pipe);

        switch( e_outfmt )
        {
            case FORMAT_BMP:
                make_bmp(rgb_buffer, 
                         cur_name, 
                         req_width, 
                         req_height);
                break;
            case FORMAT_RGB:
                make_rgb(rgb_buffer, 
                         cur_name, 
                         req_width, 
                         req_height);
                break;
            default:
                fprintf(stderr, "Not supported format requested!\n");
                break;
        }   
    }

    return NULL;
}
Beispiel #20
0
static void* audio_start(void *aux)
{
	audio_fifo_t *af = aux;
    audio_fifo_data_t *afd;
	unsigned int frame = 0;
	ALCdevice *device = NULL;
	ALCcontext *context = NULL;
	ALuint buffers[NUM_BUFFERS];
	ALuint source;
	ALint processed;
	ALenum error;
	ALint rate;
	ALint channels;
	device = alcOpenDevice(NULL); /* Use the default device */
	if (!device) error_exit("failed to open device");
	context = alcCreateContext(device, NULL);
	alcMakeContextCurrent(context);
	alListenerf(AL_GAIN, 1.0f);
	alDistanceModel(AL_NONE);
	alGenBuffers((ALsizei)NUM_BUFFERS, buffers);
	alGenSources(1, &source);

	/* First prebuffer some audio */
	queue_buffer(source, af, buffers[0]);
	queue_buffer(source, af, buffers[1]);
	queue_buffer(source, af, buffers[2]);
	for (;;) {
      
		alSourcePlay(source);
		for (;;) {
			/* Wait for some audio to play */
			do {
				alGetSourcei(source, AL_BUFFERS_PROCESSED, &processed);
				usleep(100);
			} while (!processed);
			
			/* Remove old audio from the queue.. */
			alSourceUnqueueBuffers(source, 1, &buffers[frame % 3]);
			
			/* and queue some more audio */
			afd = audio_get(af);
			alGetBufferi(buffers[frame % 3], AL_FREQUENCY, &rate);
			alGetBufferi(buffers[frame % 3], AL_CHANNELS, &channels);
			if (afd->rate != rate || afd->channels != channels) {
				printf("rate or channel count changed, resetting\n");
                                free(afd);
				break;
			}
			alBufferData(buffers[frame % 3], 
						 afd->channels == 1 ? AL_FORMAT_MONO16 : AL_FORMAT_STEREO16, 
						 afd->samples, 
						 afd->nsamples * afd->channels * sizeof(short), 
						 afd->rate);
                        free(afd);
			alSourceQueueBuffers(source, 1, &buffers[frame % 3]);
			
			if ((error = alcGetError(device)) != AL_NO_ERROR) {
				printf("openal al error: %d\n", error);
				exit(1);
			}
			frame++;
		}
		/* Format or rate changed, so we need to reset all buffers */
		alSourcei(source, AL_BUFFER, 0);
		alSourceStop(source);

		/* Make sure we don't lose the audio packet that caused the change */
		alBufferData(buffers[0], 
					 afd->channels == 1 ? AL_FORMAT_MONO16 : AL_FORMAT_STEREO16, 
					 afd->samples, 
					 afd->nsamples * afd->channels * sizeof(short), 
					 afd->rate);

		alSourceQueueBuffers(source, 1, &buffers[0]);
		queue_buffer(source, af, buffers[1]);
		queue_buffer(source, af, buffers[2]);
		frame = 0;
	}
}
Beispiel #21
0
static void* audio_start(void *aux)
{
    audio_fifo_t *af = aux;
    audio_fifo_data_t *afd;
    ALCdevice *device = NULL;
    ALCcontext *context = NULL;
    ALuint buffers[NUM_BUFFERS];
    ALint processed;
    ALenum error;
    ALint rate;
    ALint channels;
    
    device = alcOpenDevice(NULL); /* Use the default device */
    if (!device) error_exit("failed to open device");
    context = alcCreateContext(device, NULL);
    alcMakeContextCurrent(context);
    alListenerf(AL_GAIN, 1.0f);
    alDistanceModel(AL_NONE);
    alGenBuffers((ALsizei)NUM_BUFFERS, buffers);
    alGenSources(1, &source);

    /* First prebuffer some audio */
    queue_buffer(source, af, buffers[0]);
    queue_buffer(source, af, buffers[1]);
    queue_buffer(source, af, buffers[2]);
    for (;;) {

        alSourcePlay(source);
        for (;;) {
            /* Wait for some audio to play */
            alGetSourcei(source, AL_BUFFERS_PROCESSED, &processed);
            if (processed <= 0)
            {
                usleep(200);
                continue;
            }

            /* Remove old audio from the queue.. */
            ALuint buffer;
            
            alSourceUnqueueBuffers(source, 1, &buffer);

            /* and queue some more audio */
            afd = audio_get(af);

            alGetBufferi(buffer, AL_FREQUENCY, &rate);
            alGetBufferi(buffer, AL_CHANNELS, &channels);
            if (afd->rate != rate || afd->channels != channels) 
            {
                log_debug("openal","audio_start","rate or channel count changed, resetting");
                break;
            }

            alBufferData(buffer,
                afd->channels == 1 ? AL_FORMAT_MONO16 : AL_FORMAT_STEREO16,
                afd->samples,
                afd->nsamples * afd->channels * sizeof(int16_t),
                afd->rate);

            free(afd);

            ALenum error = alGetError();
            if (error != AL_NO_ERROR)
            {
                log_error("openal","audio_start","Error buffering: %s", alGetString(error));
                return NULL;
            }

            alSourceQueueBuffers(source, 1, &buffer);

            error = alGetError();
            if (alGetError() != AL_NO_ERROR)
            {
                log_error("openal","audio_start","Error queing buffering: %s", alGetString(error));
                return NULL;
            }


            alGetSourcei(source, AL_SOURCE_STATE, &processed);
            if (processed != AL_PLAYING)
            {
                // Resume playing
                alSourcePlay(source);
            }

            if ((error = alcGetError(device)) != AL_NO_ERROR) {
                log_error("openal","audio_start","Error queing buffering: %s", alGetString(error));
                exit(1);
            }
            
        }

        /* Format or rate changed, so we need to reset all buffers */
        alSourcei(source, AL_BUFFER, 0);
        alSourceStop(source);

        /* Make sure we don't lose the audio packet that caused the change */
        alBufferData(buffers[0],
                     afd->channels == 1 ? AL_FORMAT_MONO16 : AL_FORMAT_STEREO16,
                     afd->samples,
                     afd->nsamples * afd->channels * sizeof(short),
                     afd->rate);

        free(afd);

        alSourceQueueBuffers(source, 1, &buffers[0]);
        queue_buffer(source, af, buffers[1]);
        queue_buffer(source, af, buffers[2]);
      
    }
}