Пример #1
0
static void twin_capture(const char *basename, float framerate, int cam, bool eight_bit_mode, bool testonly)
{
	struct chameleon_camera *c1=NULL, *c2=NULL;
	do {
		if (c1) chameleon_camera_free(c1);
		if (c2) chameleon_camera_free(c2);
		if (cam == -1 || cam == 0) {
			c1 = open_camera(true, eight_bit_mode);
		}
		if (cam == -1 || cam == 1) {
			c2 = open_camera(false, eight_bit_mode);
		}
		printf("Got camera c1=%p c2=%p\n", c1, c2);
		if (!c1 || !c2) sleep(1);
	} while (c1 == NULL && c2 == NULL);

	signal(SIGCHLD, SIG_IGN);

	capture_loop(c1, c2, framerate, basename, testonly, eight_bit_mode);
	if (c1) {
		chameleon_camera_free(c1);
	}
	if (c2) {
		chameleon_camera_free(c2);
	}
}
Пример #2
0
int main(int argc, char *argv[])
{
    struct video_config * camera;
    if(argc == 1)
    {
        printf(" *** Usage ***\n");
        printf("./testquickcam DEVICE [ -r | -m | -l ] [ -0 | -1 | -2 | -3 | -4 | -5 ]\n\n");
        printf(" -r reads one frame via read() from the camera\n");
        printf(" -m reads one frame via mmap() from the camera\n");
        printf(" -l read() loop...good for debugging gain etc \n");
        printf(" -0-5 set resulution\n");
        exit(1);
    }

    camera = init_video_config(324,248);
    if(open_camera(camera,argv[1]) == 1)
    {
        get_camera_info(camera);
        print_camera_info(camera);
        read_loop(camera);
        close_camera(camera);
    }
    delete_video_config(camera);
    exit(1);
}
Пример #3
0
void v4l2_init(struct camera *cam) {
	open_camera(cam);
	init_camera(cam);
	start_capturing(cam);
	init_encoder(cam);
	init_file();
}
Пример #4
0
static int
k4w2_v4l2_open(k4w2_t ctx, unsigned int deviceid, unsigned int flags)
{
    k4w2_v4l2 * v4l2 = (k4w2_v4l2 *)ctx;
    CHANNEL ch;
    int res = K4W2_ERROR;

    for (ch = COLOR_CH; ch <= DEPTH_CH; ++ch) {
	v4l2->cam[ch].fd = -1;
    }

    for (ch = ctx->begin; ch <= ctx->end; ++ch) {

	char devfile[FILENAME_MAX];
	snprintf(devfile, sizeof(devfile), "/dev/video%d",
		 deviceid + ch);

	res = open_camera(&v4l2->cam[ch], devfile);
	if (K4W2_SUCCESS != res) {
	    VERBOSE("open_camera(%s) failed", devfile);
	    break;
	}

	res = mmap_camera(&v4l2->cam[ch], 8);
	if (K4W2_SUCCESS != res) {
	    VERBOSE("mmap_camera(%d) failed", ch);
	    break;
	}
    }

    return res;
}
Пример #5
0
bool v4l2_init(Camera* cam)
{
	open_camera(cam); 
	init_camera(cam);
	start_video_capturing(cam);	
	return true ;
Пример #6
0
bool camera:: cam_start()
{
    open_camera();//打开摄像头
    get_camea_data();
    set_camea_data();//设置摄像头参数
    RequestBuffers(); //内存映射
    camea_video_start();
}
Пример #7
0
int main(int argc, char *argv[])
{
	static struct Camera camera;
	pthread_t draw_thread, grab_thread, tele_thread;

	init_camera(&camera);

	if(argc >= 2 && strcmp(argv[1], "GPS_ON") == 0)  camera.gpson = TRUE;

	//init threads
	g_thread_init(NULL);
	gdk_threads_init();
	//init gtk
	gtk_init(&argc, &argv);
	//Init gdk_rgb
	gdk_rgb_init();
	//Init semaphore
	sem_init(&sem_draw, 0, 0);
	sem_init(&sem_grab, 0, 1);

	open_camera(&camera);
	set_camera_info(&camera);
	get_camera_info(&camera);

//	print_camera_info(&camera);

	create_window(&camera);

	if(camera.gpson) {
		camera_antennaimg_change(&camera, "0");
		pthread_create(&tele_thread, NULL, (void*)&camera_gps_tel, &camera);
	}
	pthread_create(&draw_thread, NULL, (void*)&preview_camera, &camera);
	pthread_create(&grab_thread, NULL, (void*)&grab_image_camera, &camera);

	gdk_threads_enter();
	gtk_main();
	gdk_threads_leave();

	camera.quit = 1;
	if(camera.gpson) pthread_join(tele_thread, NULL);
	pthread_join(grab_thread, NULL);
	pthread_join(draw_thread, NULL);
	sem_post(&sem_draw);	
	sem_post(&sem_grab);
	sem_destroy(&sem_draw);
	sem_destroy(&sem_grab);

	close_camera(&camera);

	return 0;
}
Пример #8
0
static int run_capture(const char *basename, float delay, bool testonly)
{
	dc1394camera_t *camera;

	printf("Waiting for camera\n");
	while ((camera = open_camera()) == NULL) {
		printf(".");
		fflush(stdout);
		sleep(1);
	}

	dc1394_camera_reset(camera);

	capture_loop(camera, basename, delay, testonly);
	close_camera(camera);

	return 0;
}
Пример #9
0
static PyObject *
chameleon_open(PyObject *self, PyObject *args)
{
	int colour = 0;
	unsigned short depth = 0;
	unsigned short brightness;
	int sts = -1;
	PyObject *colour_obj;
	
	if (!PyArg_ParseTuple(args, "OHH", &colour_obj, &depth, &brightness))
		return NULL;

	colour = PyObject_IsTrue(colour_obj);

         // try to make the capture thread realtime
        struct sched_param p;
        memset(&p, 0, sizeof(p));
        p.sched_priority = SCHED_FIFO;
        sched_setscheduler(0, SCHED_FIFO, &p);
        
	int i = 0;
	for (i = 0; i < NUM_CAMERA_HANDLES; ++i) {
		if (cameras[i] == NULL) {
			struct chameleon_camera *cam = open_camera(colour, depth, brightness);
			if (cam != NULL) {
				cameras[i] = cam;
				sts = i;
				break;
			} else {
				break;
			}
		}
	}
	if (i == NUM_CAMERA_HANDLES) {
		PyErr_SetString(ChameleonError, "No camera handles available");
		return NULL;
	}
	if (sts < 0) {
		PyErr_SetString(ChameleonError, "Failed to open device");
		return NULL;
	}
	return PyLong_FromLong(sts);
}
Пример #10
0
static PyObject *
flea_open(PyObject *self, PyObject *args)
{
    fleaCamera* cam;
	int i = 0, handle=-1;
	unsigned short depth = 0;
	unsigned short brightness;
	PyObject *colour_obj;
	unsigned int height = 2048, width = 2448;
	
	//Trying to stick to the chameleon interface.  Deal with the extra parameters later
	if (!PyArg_ParseTuple(args, "OHH|II", &colour_obj, &depth, &brightness, &height, &width))
	{
        return NULL;	
    }
    

	for (i = 0; i < NUM_CAMERA_HANDLES; ++i) {
		if (cameras[i] == NULL) {
			cam = open_camera(brightness, height, width);
			if (cam != NULL) {
			    cam->height = height;
			    cam->width =width;
				cameras[i] = cam;
				handle = i;
				break;
			} else {
				break;
			}
		}
	}
	if (i == NUM_CAMERA_HANDLES) {
		PyErr_SetString(FleaError, "No camera handles available");
		return NULL;
	}
	if (handle < 0) {
		PyErr_SetString(FleaError, "Failed to open device");
		return NULL;
	}

    return PyLong_FromLong(handle);
}
Пример #11
0
void *recorder_thread(void *ptr) {
  struct camera *cam = (struct camera *)ptr;
  struct motion_detection md;
  AVPacket packet;
  AVFrame *frame;
  int got_frame, ret;
  unsigned int cnt = 0;
  time_t first_activity = 0;
  time_t last_activity = 0;

  if(open_camera(cam) < 0)
    return NULL;
  if(open_output(cam) < 0)
    return NULL;

  av_dump_format(cam->context, 0, cam->context->filename, 0);
  av_dump_format(cam->output_context, 0, cam->output_context->filename, 1);

  md.cam = cam;

  md.prev = cvCreateImage(cvSize(cam->codec->width, cam->codec->height), IPL_DEPTH_8U, 1);
  md.cur  = cvCreateImage(cvSize(cam->codec->width, cam->codec->height), IPL_DEPTH_8U, 1);
  md.silh = cvCreateImage(cvSize(cam->codec->width, cam->codec->height), IPL_DEPTH_8U, 1);
  cvZero(md.prev);
  cvZero(md.cur);
  cvZero(md.silh);

  md.img_convert_ctx = sws_getContext(
    cam->codec->width, cam->codec->height, cam->codec->pix_fmt,
    cam->codec->width, cam->codec->height, PIX_FMT_GRAY8,
    SWS_BICUBIC, NULL, NULL, NULL);
  md.buffer = (uint8_t*)av_malloc(3 * cam->codec->width * cam->codec->height);

  int got_key_frame = 0, first_detection = 1;
  frame = avcodec_alloc_frame();
  if(!frame) {
    av_err_msg("avcodec_alloc_frame", 0);
    return NULL;
  }

  while(1) {
    cam->last_io = time(NULL);

    if((ret = av_read_frame(cam->context, &packet)) < 0) {
      if(ret == AVERROR_EOF) break;
      else av_err_msg("av_read_frame", ret);
    }

    if(packet.stream_index == cam->video_stream_index) {
      // start on keyframe
      if(!got_key_frame && !(packet.flags & AV_PKT_FLAG_KEY)) {
        continue;
      }
      got_key_frame = 1;

      avcodec_get_frame_defaults(frame);
      got_frame = 0;

      cnt = (cnt + 1) % cam->analize_frames;
      if(cnt == 0) {
        if((ret = avcodec_decode_video2(cam->codec, frame, &got_frame, &packet)) < 0)
          av_err_msg("avcodec_decode_video2", ret);

        if(got_frame) {
          if(detect_motion(&md, frame)) {
            if(first_activity == 0) first_activity = time(NULL);
            last_activity = time(NULL);
          } else {
            if(first_activity > 0 && time(NULL) - last_activity > cam->motion_delay) {
              if(!first_detection)
                db_create_event(cam->id, first_activity, last_activity);
              else
                first_detection = 0;
              first_activity = 0;
            }
          }
        }

        if(time(NULL) - cam->last_screenshot > 60 && (packet.flags & AV_PKT_FLAG_KEY)) {
          char fname[128];
          snprintf(fname, sizeof(fname), "%s/%s/screenshot.png", store_dir, cam->name);
          cvSaveImage(fname, md.cur, 0);
          cam->last_screenshot = time(NULL);
        }
      }

      packet.stream_index = cam->output_stream->id;
      if((ret = av_write_frame(cam->output_context, &packet)) < 0)
        av_err_msg("av_write_frame", ret);

      pthread_mutex_lock(&cam->consumers_lock);
      for(l1 *p = cam->cam_consumers_list; p != NULL; p = p->next) {
        struct cam_consumer *consumer = (struct cam_consumer *)p->value;
        if(!consumer->screen->active)
          continue;

        if(consumer->screen->tmpl_size == 1) {
          packet.stream_index = 0;
          if((ret = av_write_frame(consumer->screen->rtp_context, &packet)) < 0)
            av_err_msg("av_write_frame", ret);
        } else {
          if(!got_frame) {
            if((ret = avcodec_decode_video2(cam->codec, frame, &got_frame, &packet)) < 0) {
              av_err_msg("avcodec_decode_video2", ret);
              break;
            }
          }
          if(got_frame)
            copy_frame_to_consumer(frame, cam->codec->height, consumer);
        }
      }
      pthread_mutex_unlock(&cam->consumers_lock);
    }

    av_free_packet(&packet);
    
    if(!cam->active) {
      break;
    }

    if(time(NULL) - cam->file_started_at > 60 * 60) {
      db_update_videofile(cam);
      close_output(cam);
      open_output(cam);
      got_key_frame = 0;
    }
  }

  db_update_videofile(cam);
  close_output(cam);

  if((ret = avcodec_close(cam->codec)) < 0)
    av_err_msg("avcodec_close", ret);
  avformat_close_input(&cam->context);
  av_free(frame);

  cvReleaseImage(&md.prev);
  cvReleaseImage(&md.cur);
  cvReleaseImage(&md.silh);
  av_free(md.buffer);
  sws_freeContext(md.img_convert_ctx);

  return NULL;
}
Пример #12
0
int main(int argc, char **argv) {
  int c = -1;
  int option_index;
  int img_height = DEFAULT_HEIGHT; 
  int img_width  = DEFAULT_WIDTH;
  int listen_port = LISTEN_PORT;
  
  for (;;) {
    c = getopt_long(argc, argv, short_options, long_options, &option_index);
    if (c == -1) {
      break;
    }
    
    switch(c) {
      case 'h': {
	if (optarg != NULL) {
	  img_height = atoi(optarg);
	}
	break;
      };
      case 'w': {
	if (optarg != NULL) {
	  img_width = atoi(optarg);
	}
	break;
      };
      case 'p': {
	if (optarg != NULL) {
	  listen_port = atoi(optarg);
	}
	break;
      };
    };
  }
  
  if (open_socket(LISTEN_ADDR, listen_port) != 0) {
    printf("ERROR: cannot open socket\n");
    return 1;
  }
  
  if (open_camera() != 0) {
    printf("Camera file error, exiting...\n");
    return 1;
  }
  
  if (init_camera(img_height, img_width) != 0) {
    printf("Camera error, exiting...\n");
    return 1;
  } else {
    printf("INFO: image size is %dx%d;\n\n", img_width, img_height);
  }

  while (1) {
    while (1) {
      if (wait_for_connect() == 0) { break; }
    }
    
    while (1) {
      if (read_current_socket() == 0) {
	if (check_get_params("exit") == 1) {
	  goto APP_EXIT;
	} else if (check_get_params("bmp") == 1) {
	  if (check_get_params(GETPARAM_SEND_HTTP_HEADERS)) {
	    send(get_current_socket(), http_headers, strlen(http_headers), 0);
	    send(get_current_socket(), header_ct_bmp, strlen(header_ct_bmp), 0);
	  }
	  read_camera(IMAGE_TYPE_BMP, get_current_socket(), check_get_params(GETPARAM_COLOR_IMAGE));
	  close_current_socket();
	  break;
	} else if (check_get_params("jpg") == 1) { 
	  //print_time(0);
	  if (check_get_params(GETPARAM_SEND_HTTP_HEADERS)) {
	    send(get_current_socket(), http_headers, strlen(http_headers), 0);
	    send(get_current_socket(), header_ct_jpeg, strlen(header_ct_jpeg), 0);
	  }
	  read_camera(IMAGE_TYPE_JPG, get_current_socket(), check_get_params(GETPARAM_COLOR_IMAGE));
	  close_current_socket();
	  //print_time(1);
	  break;
	} else if (check_get_params("yuyv") == 1) { 		// GET:yuyv
	  read_camera(IMAGE_TYPE_YUYV, get_current_socket(), 0);
	  close_current_socket();
	  break;
	}
      }
    }
  }
  
  APP_EXIT:
  printf("Exiting...\n");
  close_camera();
  close_current_socket();
  close_main_socket();
  
  return 0;
}