Пример #1
1
int main()
{
	int i;
	struct picture_t pic;
	struct encoded_pic_t encoded_pic;
	errno = 0;
	if(!camera_init(&pic))
		goto error_cam;
	if(!encoder_init(&pic)){
		fprintf(stderr,"failed to initialize encoder\n");
		goto error_encoder;
	}
	if(!preview_init(&pic))
		goto error_preview;
	if(!output_init(&pic))
		goto error_output;
	if(!encoder_encode_headers(&encoded_pic))
		goto error_output;
	if(!output_write_headers(&encoded_pic))
		goto error_output;
	if(!camera_on())
		goto error_cam_on;
	if(signal(SIGINT, stop_recording) == SIG_ERR){
		fprintf(stderr,"signal() failed\n");
		goto error_signal;
	}
	printf("Press ctrl-c to stop recording...\n");
	recording = 1;
	for(i=0; recording; i++){
		if(!camera_get_frame(&pic))
			break;
		gen_osd_info();
		osd_print(&pic, osd_string);
		if((i&7)==0) // i%8==0
			preview_display(&pic);
		if(!encoder_encode_frame(&pic, &encoded_pic))
			break;
		applog_flush();
		if(!output_write_frame(&encoded_pic))
			break;
	}
	printf("\nrecorded %d frames\n", i);

error_signal:
	camera_off();
error_cam_on:
	output_close();
error_output:
	preview_close();
error_preview:
	encoder_close();
error_encoder:
	camera_close();
error_cam:
	return 0;
}
Пример #2
0
void scene_loadTeapotDemo(Scene *scene) {
    Material teapotMaterial = material_make(COLOR_WHITE, 0.2, 0, 20);
    scene_loadMesh(scene, "teapot.txt", teapotMaterial);
    
    scene->backgroundColor = COLORS1_BLUE;
    Light light = light_make(vec3_make(10, 10, -20), 1.0);
    scene_AddLight(scene, &light);
    scene->ambientCoefficient = 0.6;
    camera_init(&scene->camera, vec3_make(0, 5, -10), vec3_make(0, -0.4, 1), 800.0,
                scene->camera.width, scene->camera.height);
    
    Material screenMaterial = material_make(COLORS1_BLUE, 0, 0, 200);
    Vector3 vs[8];
    vs[0] = vec3_make(-500, -500, -30);
    vs[1] = vec3_make(-500, 500, -30);
    vs[2] = vec3_make(500, 500, -30);
    vs[3] = vec3_make(500, -500, -30);
    
    vs[4] = vec3_make(-500, -500, 5);
    vs[5] = vec3_make(-500, 500, 5);
    vs[6] = vec3_make(500, 500, 5);
    vs[7] = vec3_make(500, -500, 5);
    Surface triangles[2]; // Back wall
    triangles[0] = surface_initTriangle(vs[6], vs[5], vs[4], screenMaterial);
    triangles[1] = surface_initTriangle(vs[7], vs[6], vs[4], screenMaterial);
    scene_addSurfaceRange(scene, triangles, 2);
}
Пример #3
0
void lt_hack_reshape (xstuff_t * XStuff)
{
	float mat[16] = { cos ((float)dFov * 0.5f * DEG2RAD) / sin ((float)dFov * 0.5f * DEG2RAD),
		0.0f, 0.0f, 0.0f,
		0.0f, 0.0, 0.0f, 0.0f,
		0.0f, 0.0f, -1.0f - 0.02f / (float)dDrawdepth, -1.0f,
		0.0f, 0.0f, -(0.02f + 0.0002f / (float)dDrawdepth), 0.0f
	};

        height = XStuff->windowHeight;
        width = XStuff->windowWidth;

	glViewport (0, 0, (GLint) XStuff->windowWidth, (GLint) XStuff->windowHeight);

	glFrontFace (GL_CCW);
	glEnable (GL_CULL_FACE);
	glClearColor (0.0f, 0.0f, 0.0f, 1.0f);
	glMatrixMode (GL_PROJECTION);
	glLoadIdentity ();

	mat[5] = mat[0] * ((float)XStuff->windowWidth / (float)XStuff->windowHeight);

	glLoadMatrixf (mat);
	camera_init (&theCamera, mat, (float)dDrawdepth);
	glMatrixMode (GL_MODELVIEW);
	glLoadIdentity ();
}
Пример #4
0
void scene_init(Scene *scene, size_t width, size_t height) {
    array_init(&scene->surfaces, INIT_CAPACITY, sizeof(Surface));
    array_init(&scene->lights, INIT_CAPACITY, sizeof(Light));
    camera_init(&scene->camera, VEC3_ZERO, vec3_make(0, 0, 1), 430.0, width, height);
    scene->ambientCoefficient = 0.6;
    scene->backgroundColor = COLOR_WHITE;
}
Пример #5
0
static int __init camera_module_init(void)
{
    u32 val, ret = 0;
    if(get_hw_config_int("camera/miniisp", &val, NULL) == true){
       if(val == 1){
           	ret = mini_isp_camera_init();
       }else{
        ret = camera_init();
	}
    }
    else{
        ret = camera_init();
    }

    return ret;
}
Пример #6
0
void scene_loadSnowmanDemo(Scene *scene) {
    Surface spheres[5];
    Surface triangles[12];
    Vector3 vs[12];
    Light lights[4];
    scene->ambientCoefficient = 0.7;
    camera_init(&scene->camera, vec3_make(0, 35, -70), vec3_make(0, 0, 1), 500.0,
                scene->camera.width, scene->camera.height);
    lights[0] = light_make(vec3_make(80, 20, -30), 1.0);
    lights[1] = light_make(vec3_make(60, 65, 30), 1.0);
    lights[2] = light_make(vec3_make(65, 60, 30), 1.0);
    lights[3] = light_make(vec3_make(65, 65, 30), 1.0);
    spheres[1] = surface_initSphere(vec3_make(45, -20, 70), 20,
                                   material_make(COLOR_GREEN, 0.5, 0, 20.2));
    spheres[2] = surface_initSphere(vec3_make(-35, -25, 80), 15,
                                   material_make(COLOR_BLUE, 0.5, 0, 100));
    spheres[0] = surface_initSphere(vec3_make(10, -10, 110), 30,
                                   material_make(COLOR_WHITE, 0.2, 0, 40.0));
    spheres[3] = surface_initSphere(vec3_make(10, 30, 110), 25,
                                   material_make(COLOR_WHITE, 0.2, 0, 40));
    spheres[4] = surface_initSphere(vec3_make(10, 60, 110), 20,
                                   material_make(COLOR_WHITE, 0.2, 0, 40));
    Material sideWallMaterial1 = material_make(COLORS1_BLUE, 0.0, 0, 400);
    Material sideWallMaterial2 = material_make(COLORS1_BLUE, 0.0, 0, 400);
    Material ceilingMaterial = material_make(COLORS1_BLUE, 0.3, 0, 400);
    Material floorMaterial = material_make(COLOR_WHITE, 0.3, 0, 40);
    vs[0] = vec3_make(-75, -40, 0);
    vs[1] = vec3_make(-75, -40, 150);
    vs[2] = vec3_make(75, -40, 0);
    vs[3] = vec3_make(75, -40, 150);
    vs[4] = vec3_make(-75, 110, 0);
    vs[5] = vec3_make(-75, 110, 150);
    vs[6] = vec3_make(75, 110, 0);
    vs[7] = vec3_make(75, 110, 150);
    vs[8] = vec3_make(-75, -40, -70);
    vs[9] = vec3_make(75, -40, -70);
    vs[10] = vec3_make(75, 110, -70);
    vs[11] = vec3_make(-75, 110, -70);
    //Floor
    triangles[0] = surface_initTriangle(vs[2], vs[1], vs[0], floorMaterial);
    triangles[1] = surface_initTriangle(vs[2], vs[3], vs[1], floorMaterial);
    //Left wall
    triangles[2] = surface_initTriangle(vs[0], vs[1], vs[4], sideWallMaterial1);
    triangles[3] = surface_initTriangle(vs[1], vs[5], vs[4], sideWallMaterial1);
    //Right wall
    triangles[4] = surface_initTriangle(vs[6], vs[3], vs[2], sideWallMaterial1);
    triangles[5] = surface_initTriangle(vs[6], vs[7], vs[3], sideWallMaterial1);
    //Ceiling
    triangles[6] = surface_initTriangle(vs[4], vs[5], vs[6], ceilingMaterial);
    triangles[7] = surface_initTriangle(vs[5], vs[7], vs[6], ceilingMaterial);
    //Back
    triangles[8] = surface_initTriangle(vs[3], vs[7], vs[5], sideWallMaterial2);
    triangles[9] = surface_initTriangle(vs[5], vs[1], vs[3], sideWallMaterial2);
    triangles[10] = surface_initTriangle(vs[8], vs[9], vs[10], sideWallMaterial1);
    triangles[11] = surface_initTriangle(vs[8], vs[10], vs[11], sideWallMaterial1);
    scene_addSurfaceRange(scene, spheres, 5);
    scene_addSurfaceRange(scene, triangles, 10);
    scene_AddLightRange(scene, lights, 1);
}
Пример #7
0
static void board_camera_init(void)
{
#ifdef CONFIG_ARA_BRIDGE_HAVE_CAMERA
    sleep(1);

    camera_init();
#endif
}
Пример #8
0
int camera_on(){
    if(!w_init())error("bcm2835 missing");
    sphere_init(stepper_init(RHO_SLEEP,RHO_STEP,RHO_DIR,RHO_M0,RHO_M1,RHO_MODE,RHO_STEPS,"ρ"),
                stepper_init(THETA_SLEEP,THETA_STEP,THETA_DIR,THETA_M0,THETA_M1,THETA_MODE,THETA_STEPS,"θ"),
                stepper_init(PHI_SLEEP,PHI_STEP,PHI_DIR,PHI_M0,PHI_M1,PHI_MODE,PHI_STEPS,"φ"),
                "ο");
    trigger_init("μ");
    camera_init("δ");
}
void __init olympus_cameras_init(void)
{
	camera_init();
	
	printk("bus 2: %d devices\n", ARRAY_SIZE(olympus_i2c3_board_info));

	i2c_register_board_info(2, olympus_i2c3_board_info,
				ARRAY_SIZE(olympus_i2c3_board_info));

}
Пример #10
0
int main(void)
{
    video_init();
    camera_init();
    tilemap_init();
    plan(4);
    test_basic_output();
    todo();
    note("exercise file reading");
    end_todo;
    done_testing();
}
Пример #11
0
/**
 * Sets the temperature of the camera CCD to set_temp.
 *
 * @param set_temp integer, the temperature to set the CCD to
 * @return Zero on success, non-zero on failure.
 */
int camera_set_temp(int set_temp) {
  int err;

  err = camera_init();
  if (err) return err;

  CALLFLIAPI(FLISetTemperature(fli->active_camera, set_temp), "FLISetTemperature");

  err = camera_fini();
  if (err) return err;

  return 0;
}
Пример #12
0
/* Camera initial setup */
void camera_setup () {
  imgWidth = 320;
  imgHeight = 256;
  strcpy1(imgHead, "##IMJ5");
  i2cwrite(0x30, ov9655_setup, sizeof(ov9655_setup)>>1);
  i2cwrite(0x30, ov9655_qvga, sizeof(ov9655_qvga)>>1);
  camera_init((unsigned char *)DMA_BUF1, (unsigned char *)DMA_BUF2, imgWidth, imgHeight);
  camera_start();

	/* Initialise camera-related globals */
	framecount = 0;
	overlay_flag = 1;
	quality = 3; // Default JPEG quality.
}
Пример #13
0
/* Switches between visualization modes */
void
fsv_set_mode( FsvMode mode )
{
	boolean first_init = FALSE;

	switch (globals.fsv_mode) {
		case FSV_SPLASH:
		/* Queue desired mode */
		initial_fsv_mode = mode;
		return;

		case FSV_NONE:
		/* Filesystem's first appearance */
		first_init = TRUE;
		break;

		default:
		/* Set initial mode for next time */
		initial_fsv_mode = mode;
		break;
	}

	/* Generate appropriate visualization geometry */
	geometry_init( mode );

	/* Set up initial camera state */
	camera_init( mode, first_init );

	globals.fsv_mode = mode;

	/* Ensure that About presentation is not up */
	about( ABOUT_END );

	/* Render one frame before performing the initial camera pan.
	 * There are two separate reasons for doing this: */
	if (first_init) {
		/* 1. Practical limitations make the first frame take an
		 * unusually long time to render, so this avoids a really
		 * unpleasant camera jump */
		schedule_event( initial_camera_pan, "new_fs", 1 );
	}
	else {
		/* 2. In order to do a camera pan, the geometry needs to
		 * be defined. We just called geometry_init( ), but if the
		 * camera's going to a non-root node, it may very well not
		 * have been laid out yet (but it will be when drawn) */
		schedule_event( initial_camera_pan, "", 1 );
	}
}
Пример #14
0
int main(int argc, char **argv)
{
	struct camera *cam = NULL;

	cam = (struct camera *)malloc(sizeof(struct camera));
	if (!cam) {
		printf("malloc camera failure!\n");
		exit(1);
	}

	memset(cam, 0, sizeof(struct camera));
	cam->device_name = "/dev/video0";
	cam->buffers = NULL;
	cam->width = 640;
	cam->height = 480;
	cam->display_depth = 5;		/* RGB24 */
	cam->h264_file_name = "test.h264";

	camera_open(cam);
	camera_init(cam);
	camera_capturing_start(cam);
	h264_compress_init(&cam->en, cam->width, cam->height);
	cam->h264_buf = (uint8_t *) malloc(sizeof(uint8_t) * cam->width * cam->height * 3);	// 设置缓冲区
	if ((cam->h264_fp = fopen(cam->h264_file_name, "wa+")) == NULL) {
		printf("open file error!\n");
		return -1;
	}

	while (1) {
		if (read_and_encode_frame(cam) < 0) {
			fprintf(stderr, "read_fram fail in thread\n");
			//break;
		}
	}

	printf("-----------end program------------");
	if (cam->h264_fp != NULL)
		fclose(cam->h264_fp);
	h264_compress_uninit(&cam->en);
	free(cam->h264_buf);

	camera_capturing_stop(cam);
	camera_uninit(cam);
	camera_close(cam);

	free(cam);

	return 0;
}
Пример #15
0
/**
 * Prints the current temperature of the camera CCD, base and the cooling power.
 *
 * @return Zero on success, non-zero on failure.
 */
int camera_get_temp() {
  int err;
  double ccd_temp, base_temp, cooler_power;

  err = camera_init();
  if (err) return err;

  CALLFLIAPI(FLIReadTemperature(fli->active_camera, FLI_TEMPERATURE_CCD, &ccd_temp), "FLIReadTemperature");
  CALLFLIAPI(FLIReadTemperature(fli->active_camera, FLI_TEMPERATURE_BASE, &base_temp), "FLIReadTemperature");
  CALLFLIAPI(FLIGetCoolerPower(fli->active_camera, &cooler_power), "FLIGetCoolerPower");

  fprintf(stdout, "camera CCD  temperature: %f°C\n", ccd_temp);
  fprintf(stdout, "camera base temperature: %f°C\n", base_temp);
  fprintf(stdout, "camera cooler power: %f%%\n", cooler_power);

  err = camera_fini();
  if (err) return err;

  return 0;
}
Пример #16
0
int  camera_info_init(struct camera * dev) {

    if(strlen(dev->dev_name) <= 0 )
        strncpy(dev->dev_name,"/dev/video100",strlen("/dev/video100")+2);
    if(dev->v4l_info.wide <= 0 )
        dev->v4l_info.wide = 320;
    if(dev->v4l_info.high <=0 )
        dev->v4l_info.high =240;
    if(dev->v4l_info.v4l_num<=0)
        dev->v4l_info.v4l_num =4;

    if(dev->exit==NULL)
        dev->exit   =&camera_exit;

    if(dev->get_rgb16==NULL)
        dev->get_rgb16 = &get_rgb16;

    if(camera_init(dev) <0)
        return -1;
    return camera_start(dev) ;
}
Пример #17
0
/**
 * Prints information about the camera.
 *
 * @return Zero on success, non-zero on failure.
 */
int camera_info() {
  long fwrev, hwrev;
  char * serial;
  char * model;
  char * lib_version;
  int err;
  long total_ul_x, total_ul_y, total_lr_x, total_lr_y;
  long visible_ul_x, visible_ul_y, visible_lr_x, visible_lr_y;
  double pixel_size_x, pixel_size_y;

  lib_version = (char*) malloc((128) * sizeof (char));
  model = (char*) malloc((128) * sizeof (char));
  serial = (char*) malloc((128) * sizeof (char));

  err = camera_init();
  if (err) return err;

  CALLFLIAPI(FLIGetFWRevision(fli->active_camera, &fwrev), "FLIGetFWRevision");
  CALLFLIAPI(FLIGetHWRevision(fli->active_camera, &hwrev), "FLIGetHWRevision");
  CALLFLIAPI(FLIGetSerialString(fli->active_camera, serial, 128), "FLIGetSerialString");
  CALLFLIAPI(FLIGetModel(fli->active_camera, model, 128), "FLIGetModel");
  CALLFLIAPI(FLIGetLibVersion(lib_version, 128), "FLIGetLibVersion");
  CALLFLIAPI(FLIGetArrayArea(fli->active_camera, &total_ul_x, &total_ul_y, &total_lr_x, &total_lr_y), "FLIGetLibVersion");
  CALLFLIAPI(FLIGetVisibleArea(fli->active_camera, &visible_ul_x, &visible_ul_y, &visible_lr_x, &visible_lr_y), "FLIGetVisibleArea");
  CALLFLIAPI(FLIGetPixelSize(fli->active_camera, &pixel_size_x, &pixel_size_y), "FLIGetPixelSize");

  fprintf(stdout, "camera FW revision: 0x%04x\n", (int) fwrev);
  fprintf(stdout, "camera HW revision: 0x%04x\n", (int) hwrev);
  fprintf(stdout, "camera serial number: %s\n", serial);
  fprintf(stdout, "camera model: %s\n", model);
  fprintf(stdout, "camera lib version: %s\n", lib_version);
  fprintf(stdout, "camera total area: %ld, %ld, %ld, %ld\n", total_ul_x, total_ul_y, total_lr_x, total_lr_y);
  fprintf(stdout, "camera visible area: %ld, %ld, %ld, %ld\n", visible_ul_x, visible_ul_y, visible_lr_x, visible_lr_y);
  fprintf(stdout, "camera pixel sizes: %fm, %fm\n", pixel_size_x, pixel_size_y);

  err = camera_fini();
  if (err) return err;

  return (0);
}
Пример #18
0
static int run_scanner(void)
{
	struct quirc *qr;
	struct camera cam;
	struct mjpeg_decoder mj;
	int ret;

	if (camera_init(&cam, camera_path, video_width, video_height) < 0)
		return -1;

	camera_set_gain(&cam, gain_request);

	qr = quirc_new();
	if (!qr) {
		perror("couldn't allocate QR decoder");
		goto fail_qr;
	}

	if (quirc_resize(qr, cam.width, cam.height) < 0) {
		perror("couldn't allocate QR buffer");
		goto fail_qr_resize;
	}

	mjpeg_init(&mj);
	ret = main_loop(&cam, qr, &mj);
	mjpeg_free(&mj);

	quirc_destroy(qr);
	camera_free(&cam);

	return 0;

fail_qr_resize:
	quirc_destroy(qr);
fail_qr:
	camera_free(&cam);

	return 0;
}
Пример #19
0
/**
 * Turns on (1) or off (0) the fan on the given camera
 *
 * @param status
 * @return Zero on success, non-zero on failure.
 */
int camera_set_fan(int status) {
  int err;

  err = camera_init();
  if (err) return err;

  if (status == 1 || status == 0) {
    if (status == 1) {
      /* Turn on the fan */
      CALLFLIAPI(FLISetFanSpeed(fli->active_camera, FLI_FAN_SPEED_ON), "FLISetFanSpeed");
    } else if (status == 0) {
      /* Turn off the fan */
      CALLFLIAPI(FLISetFanSpeed(fli->active_camera, FLI_FAN_SPEED_OFF), "FLISetFanSpeed");
    }
  } else {
    return -1;
  }

  err = camera_fini();
  if (err) return err;

  return 0;
}
Пример #20
0
/**
 * Opens (1) or closes (0) the shutter of the camera.
 *
 * @param status integer, 0 or 1.
 * @return Zero on success, non-zero on failure.
 */
int camera_control_shutter(int status) {
  int err;

  err = camera_init();
  if (err) return err;

  if (status == 1 || status == 0) {
    if (status == 1) {
      /* Open the shutter */
      CALLFLIAPI(FLIControlShutter(fli->active_camera, FLI_SHUTTER_OPEN), "FLIControlShutter");
    } else if (status == 0) {
      /* Close the shutter */
      CALLFLIAPI(FLIControlShutter(fli->active_camera, FLI_SHUTTER_CLOSE), "FLIControlShutter");
    }
  } else {
    return -1;
  }

  err = camera_fini();
  if (err) return err;

  return 0;
}
Пример #21
0
void shape ()
{
	float mat[16] = { cos ((float)dFov * 0.5f * DEG2RAD) / sin ((float)dFov * 0.5f * DEG2RAD),
		0.0f, 0.0f, 0.0f,
		0.0f, 0.0, 0.0f, 0.0f,
		0.0f, 0.0f, -1.0f - 0.02f / (float)dDrawdepth, -1.0f,
		0.0f, 0.0f, -(0.02f + 0.0002f / (float)dDrawdepth), 0.0f
	};

	glViewport (0, 0, width, height);

	glFrontFace (GL_CCW);
	glEnable (GL_CULL_FACE);
	glClearColor (0.0f, 0.0f, 0.0f, 1.0f);
	glMatrixMode (GL_PROJECTION);
	glLoadIdentity ();

	mat[5] = mat[0] * ((float)width/(float) height);

	glLoadMatrixf (mat);
	camera_init (&theCamera, mat, (float)dDrawdepth);
	glMatrixMode (GL_MODELVIEW);
	glLoadIdentity ();
}
Пример #22
0
int render_init(void)
{
	IF_FAILED0(!init);

	if(!init_opengl) {
		ERROR_MSG("OpenGL not initialised\n");
		return 0;
	}

	log_init();
	
	TRACE_MSG("init base render system\n");
	
	TRACE_MSG("init noise\n");
	noise_init();
	
	glViewport(0, 0, window_width, window_height);
	glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
	glEnable(GL_DEPTH_TEST);
	glClearDepth(1.0f);
	
	glDepthFunc(GL_LESS);
	glEnable(GL_DEPTH_CLAMP);
	
	DEBUG_MSG("OpenGL version: %s\n", glGetString(GL_VERSION));
	DEBUG_MSG("GLSL version: %s\n", (char*) glGetString(GL_SHADING_LANGUAGE_VERSION));
	
	// настраиваем основную камеру
	TRACE_MSG("init main camera\n");
	camera_init(&camera, vec3f(0.0f, 0.0f, 0.5f), vec3f(0.0f, 1.0f, 0.0f), vec3f(0.0f, 0.0f, -1.0f));
	camera_set_limit(&camera, -95.0f, 95.0f, -360.0f, 360.0f);
	camera_set_fov(&camera, camera_fov);
	camera_set_move_velocity(&camera, camera_move_speed);
	camera_set_rotate_velocity(&camera, camera_rotate_speed);
	camera_set_aspect(&camera, (float) window_width / (float) window_height);
	camera_set_zplanes(&camera, 0.001f, 1000.0f);
	camera_update(&camera, 0.0);
	
	isolevel = 30.0f; 
	isolevel_animate = 0;
	
	// устанавливаем функцию по-умолчанию
	parser_create(&parser);
	const char *default_func = "d = y;";
	str_function = (char*) malloc(sizeof(char) * (strlen(default_func) + 1));
	strcpy(str_function, default_func);
	
	// настраиваем и создаем скалярное поле
	render_set_volume_size(vec3ui(128, 128, 128), 1);
	render_set_grid_size(vec3ui(64, 64, 64));
	render_update_volume_tex();
	
	CHECK_GL_ERRORS();
	
	// инициализируем шейдер
	if(!init_shader())
		return 0;
	
	// инициализируем буферы с данными
	init_buffers();
	
	// устанавливаем параметры по-умолчанию
	new_light_position = light_position = vec3f(1.0f, 1.0f, 1.0f);
	rot_axis = vec3f(0.0f, 1.0f, 0.0f);
	num_threads = 1;
	rot_angle = 0.0f;
	light_animate = 0;
	light_rot_angle = 0.0f;
	mat4(rotmat, 1.0f);
	mat4(modelmat, 1.0f);
	material_front_color = vec3f(0.5f, 0.5f, 0.5f);
	material_back_color = vec3f(0.5f, 0.0f, 0.0f);
	light_color = vec3f(1.0f, 1.0f, 1.0f);
	light_spec_color = vec3f(1.0f, 1.0f, 1.0f);
	
	input_set_wheel_limits(20, 150);
	
	init = 1;
	
	// полигонизируем ск. п.
	render_update_mc();
	
	// обновляем рендер
	render_update(0.0f);
	
	return 1;
}
Пример #23
0
int main (int argc, char* argv[])
{
	struct gps_location gl1 , gl2;
	struct gps_displacement gd;
	struct fatwrite_t fout;
	char logging_state = 0;
	char flag_reset = 0;

	gps_init_serial();
	lcd_init();
	camera_init();
	camera_sleep();
	lcd_printf("sd card:\nconnecting");
	char rt = mmc_init();
	if (rt) {
		lcd_printf("sd card: error\n");
		while (1) ;
	}
	
	init_partition(0);
	init_logtoggle();
	lcd_printf("GPS ...");

	gps_disable_unwanted();

	char in[64];
	int i, img_counter = 0;
	char c = 0;
	char loading_map[] = {'-', '\\', '|', '/'};
	const char * fpic;

	while (1) {

		// wait until valid location
		do {
			receive_str(in);
			gps_log_data(in , &gl1);
			lcd_printf("GPS Fixing %c\n", loading_map[(c++)&0x3]);
		} while (gl1.status != 'A');
	
		// got fix
		lcd_printf("Acquired Fix");

		// compute displacement
		while (1) {
			// read in gps data
			receive_str(in);
			if (flag_reset) {
				// reset waypoint
				gps_log_data(in, &gl1);
				flag_reset = 0;
			}
			i = gps_log_data(in , &gl2);
			
			// end log
			if (logging_state && !CHECK_LOGTOGGLE()) {
					logging_state = 0;
					lcd_printf("log: finishing..\n");
					log_end(&fout);
			}
		
			// check if we have a fix
			if (gl2.status != 'A') {
				lcd_printf("Lost GPS Fix %c\n", loading_map[(c++)&0x3]);
				continue;
			}
		
			// compute and display gps data
			gps_calc_disp(&gl1, &gl2, &gd);
			lcd_printf("I: %d\xb2 F: %d\xb2\nMg: %dm Sp: %d",
				(int)gd.initial_bearing,
				(int)gd.final_bearing,
				(int)gd.magnitude,
				(int)(1.15*gl2.sog + 0.5));
			
			// start / update logging
			if (logging_state) {
				// add to log
				fpic = gps_gen_name(img_counter++);
				camera_init();
				camera_takephoto(fpic, &fout);
				camera_sleep();
				log_add(&fout, &gl2, &gd, fpic);
			} else if (CHECK_LOGTOGGLE()) {
				// start logging
				logging_state = 1;
				flag_reset = 1;
				img_counter = 0;
				log_start(&fout);
			}
		}

	}
	
	return 0;
}
Пример #24
0
/*
 * Initialize the variables
 */
void init(void)
{
    GLfloat color[4] = { 0.75, 0.75, 0.75, 1.0 };
    glClearColor(0.75, 0.75, 0.75, 1.0);
    glEnable(GL_DEPTH_TEST);                            // Depth testing
    glEnable(GL_CULL_FACE);                             // One-sided faces

    glEnable(GL_FOG);                                   // Fog
    glFogi(GL_FOG_MODE, GL_EXP2);
    glHint(GL_FOG_HINT, GL_NICEST);
    glFogf(GL_FOG_DENSITY, 0.025f);
    glFogfv(GL_FOG_COLOR, color);

    // Enable texture & sphere mapping
    glTexGeni(GL_S, GL_TEXTURE_GEN_MODE, GL_SPHERE_MAP);
    glTexGeni(GL_T, GL_TEXTURE_GEN_MODE, GL_SPHERE_MAP);

    // Enable lighting
    glEnable(GL_LIGHTING);
    glEnable(GL_LIGHT0);
    glLightfv(GL_LIGHT0, GL_POSITION, LIGHT0_POSITION);
    glLightfv(GL_LIGHT0, GL_AMBIENT , LIGHT0_AMBIENT);
    glLightfv(GL_LIGHT0, GL_DIFFUSE , LIGHT0_DIFFUSE);
    glLightfv(GL_LIGHT0, GL_SPECULAR, LIGHT0_SPECULAR);
    glEnable(GL_LIGHT1);
    glLightfv(GL_LIGHT1, GL_POSITION, LIGHT1_POSITION);
    glLightfv(GL_LIGHT1, GL_AMBIENT , LIGHT1_AMBIENT);
    glLightfv(GL_LIGHT1, GL_DIFFUSE , LIGHT1_DIFFUSE);
    glLightfv(GL_LIGHT1, GL_SPECULAR, LIGHT1_SPECULAR);

    // Initialize texture
    if( LoadTextures() )
    {
        MessageBox(NULL, "An error occured when loading the textures.", "Initialization Error", MB_ICONERROR);
        exit(1);
    }

    // Load models
    camera_init();
    if(
        player_init() ||
        LoadModelA3D(OBJECT_LANDSCAPE, "../models/landscape.a3d") ||
        LoadModelA3D(OBJECT_JEEP     , "../models/jeep.a3d") ||
        LoadModelA3D(OBJECT_DOCK     , "../models/dock.a3d") ||
        LoadModelA3D(OBJECT_LANDING  , "../models/landing.a3d") ||
        LoadModelA3D(OBJECT_BARRACK  , "../models/barrack.a3d"))
    {
        MessageBox(NULL, "An error occured when loading the models.", "Initialization Error", MB_ICONERROR);
        exit(1);
    }

    // Initialize models (buildings)
    srand(GetTickCount());
    g_pModels[0].call_identifier = OBJECT_LANDING;
    g_pModels[0].position[0]     = -50;
    g_pModels[0].position[1]     = -50;
    g_pModels[0].rotation        = rand() % 360;
    g_pModels[1].call_identifier = OBJECT_DOCK;
    g_pModels[1].position[0]     = -20;
    g_pModels[1].position[1]     = -60;
    g_pModels[1].rotation        = rand() % 360;
    g_pModels[2].call_identifier = OBJECT_BARRACK;
    g_pModels[2].position[0]     = -35;
    g_pModels[2].position[1]     = -20;
    g_pModels[2].rotation        = rand() % 360;
    g_pModels[3].call_identifier = OBJECT_BARRACK;
    g_pModels[3].position[0]     = -15;
    g_pModels[3].position[1]     = -40;
    g_pModels[3].rotation        = rand() % 360;
    g_pModels[4].call_identifier = OBJECT_DOCK;
    g_pModels[4].position[0]     = -35;
    g_pModels[4].position[1]     = -60;
    g_pModels[4].rotation        = rand() % 360;
    g_pModelJeep.call_identifier = OBJECT_JEEP;
    g_pModelJeep.position[0]     = 20.0f;
    g_pModelJeep.position[1]     =  0;
    g_pModelJeep.rotation        =  0;

    // Hide mouse cursor
    ShowCursor(FALSE);
    SetCursorPos(g_iWindowCenterX, g_iWindowCenterY);

    // Store current time
    g_dLastTime = Wallclock();
}
Пример #25
0
int main(int argc, char* argv[])
{
	int ret;
	char fn[32] = "/sdcard/car.config";

	if(atexit(exit_handler) != 0) {
		exit(EXIT_FAILURE);
	}

	if(signal(SIGINT, signal_init_handler) == SIG_ERR) {
		exit(EXIT_FAILURE);
	}

	i2c_download_adv7180_init();
	

	int width = 320;
	int height = 240;


	config.width = width;
	config.height = height;
	config.format = V4L2_PIX_FMT_YUV420;
	//config.format = V4L2_PIX_FMT_YUV422P;


	tvout_open(&config);

	fd_mem  = open( "/dev/mem", O_RDWR ); 
//	paddr_mem_tv = mmap( 0, size_mem, PROT_READ | PROT_WRITE, MAP_SHARED, fd_mem, addr_phy_tv );
//	memset(paddr_mem_tv, 0x80, size_mem);
	size_mem = width * height * 2;
	paddr_mem_tv = mmap( 0, size_mem, PROT_READ | PROT_WRITE, MAP_SHARED, fd_mem, addr_phy_tv );
	memset(paddr_mem_tv, 0x80, size_mem);


	camera.format.width = width;                            /* set output frame width */
	camera.format.height = height;                          /* set output frame height */
	camera.format.pixelformat = V4L2_PIX_FMT_YUV420;
	//camera.format.pixelformat = V4L2_PIX_FMT_YUYV;

	ret = camera_init(&camera);
	if(0 > ret) {
		perror("open camera device error");
		exit(EXIT_FAILURE);
	}

	unsigned char *paddr_camera;
	long	size_camera;

	struct timeval start,end;
	long timeuse = 0;
	long time_max = 0;
	long time_temp = 0;
	int i = 0;

	static struct car_parm_setting car_set;

	if(initlib(fn) >= 0) {
		if (isname("PARAM","carbody_width"))
			car_set.carbody_width = atoi(getvalue("carbody_width"));
		if (isname("PARAM","camera_high_degree"))
			car_set.camera_high_degree = atoi(getvalue("camera_high_degree"));
		tidyup();
	}
	else {
		car_set.carbody_width = 150;
		car_set.camera_high_degree = 50;
	}

	car_set.init_flag = 1;
	car_set.img_width = width;
	car_set.img_height = height;

	printf("/*......Car Setting......*/\n");
	printf("  carbody width = %d cm \n", car_set.carbody_width);
	printf("  camera high degree = %d cm \n", car_set.camera_high_degree);
	printf("  image width = %d pixel \n", car_set.img_width);
	printf("  image height = %d pixel \n", car_set.img_height);
	printf("/*.......................*/\n");


	while (1) {

//		paddr_camera = camera_get_one_frame_noneblock(&camera, &size_camera);
//
		gettimeofday( &start, NULL );

		paddr_camera = camera_get_one_frame(&camera, &size_camera);
		car_set.pIn_addr = paddr_camera;
		car_set.gps_speed = 70;
		car_set.light_signal = LIGHT_SIGNAL_OFF;

		paddr_camera = set_car_parm(&car_set);
		switch(car_set.car_event) {
			case WARNING_DRIVING_SAFE:
				printf("=\n");
				break;
			case WARNING_DRIVING_LEFT:
				printf("<\n");
				break;
			case WARNING_DRIVING_RIGHT:
				printf(">\n");
				break;
		}

		//car_event = car_set.car_event;
		paddr_camera = car_set.pOut_addr;

		memcpy(paddr_mem_tv, paddr_camera, width*height); 
		camera_get_one_frame_complete(&camera);

		config.addr_phy = addr_phy_tv;
		tvout_exe(&config);


		gettimeofday( &end, NULL );
		time_temp = 1000000 * ( end.tv_sec - start.tv_sec )+ (end.tv_usec - start.tv_usec);
		if (time_temp > time_max) {
			time_max = time_temp;
		}
		timeuse +=time_temp;
		DEBUG("time used: % 7ld us\n", time_temp);


		++i;
		if ( timeuse > 10*1000000) {
			printf("get %d frames spent % 7ld msec  average frame rate = %ld\n", i, timeuse, i*1000000/timeuse);
			i = 0;
			timeuse = 0;
		}
	}

	return 0;
}
Пример #26
0
/*
 * r_init
 * Perform any one-time GL state changes.
 */
static void r_init()
{
	lost = 0;
	won = 0;
	hit = 0;
	randx = rand()%21-10;
	randz = rand()%21-10;
	size = size/2;

	int myGLTexture, myTexWidth, myTexHeight, myTexBPP;

	glEnable(GL_DEPTH_TEST);
//	glEnable(GL_CULL_FACE);

	//NEW TEXTURE STUFF
	glEnable(GL_TEXTURE_2D);
	glEnable(GL_BLEND);
	glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
	//You might want to play with changing the modes
	//glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);

//	renderer_img_loadTGA("buttons.tga",
//			&textureButtons, &myTexWidth, &myTexHeight, &myTexBPP);
	renderer_img_loadTGA("brick.tga",
			&textureBrick, &myTexWidth, &myTexHeight, &myTexBPP);
	renderer_img_loadTGA("Starfield.tga",
			&textureStars, &myTexWidth, &myTexHeight, &myTexBPP);
	renderer_img_loadTGA("lava01.tga",
			&textureLava, &myTexWidth, &myTexHeight, &myTexBPP);
//	renderer_model_loadASE("submarine.ASE", efalse);
	renderer_model_loadASE("volcano.ASE", efalse);

	switch(points) {
	case 0:
		renderer_img_loadTGA("score0.tga",
				&textureScore, &myTexWidth, &myTexHeight, &myTexBPP);
		break;
	case 1:
		renderer_img_loadTGA("score1.tga",
				&textureScore, &myTexWidth, &myTexHeight, &myTexBPP);
		break;
	case 2:
		renderer_img_loadTGA("score2.tga",
				&textureScore, &myTexWidth, &myTexHeight, &myTexBPP);
		break;
	case 3:
		renderer_img_loadTGA("score3.tga",
				&textureScore, &myTexWidth, &myTexHeight, &myTexBPP);
		break;
	case 4:
		renderer_img_loadTGA("score4.tga",
				&textureScore, &myTexWidth, &myTexHeight, &myTexBPP);
		break;
	case 5:
		renderer_img_loadTGA("score5.tga",
				&textureScore, &myTexWidth, &myTexHeight, &myTexBPP);
		break;
	case 6:
		renderer_img_loadTGA("score6.tga",
				&textureScore, &myTexWidth, &myTexHeight, &myTexBPP);
		break;
	case 7:
		renderer_img_loadTGA("score7.tga",
				&textureScore, &myTexWidth, &myTexHeight, &myTexBPP);
		break;
	case 8:
		renderer_img_loadTGA("score8.tga",
				&textureScore, &myTexWidth, &myTexHeight, &myTexBPP);
		break;
	case 9:
		renderer_img_loadTGA("score9.tga",
				&textureScore, &myTexWidth, &myTexHeight, &myTexBPP);
		break;
	}


	camera_init();

	r_setupProjection();
}
Пример #27
0
void init_state() {
    static int first = 1;
    int retval;

    if(first) {
        
        if(debug)
            spawn_debug_thread();

#ifdef USE_KILL_SWITCH
        //initialize kill switch
        retval = switch_init();
        if(retval != SWITCH_NO_ERROR) {
            snprintf(state_data.error_str, sizeof(state_data.error_str), SWITCH_ERROR_STR(retval));
            next_state = ERROR_STATE;
            return;
        }
       kill_switch_initialized = 1;
#endif

#ifdef USE_GPS
        // initialize gps 
        retval = gps_init();
        if(retval != GPS_NO_ERROR) {
            snprintf(state_data.error_str, sizeof(state_data.error_str), GPS_ERROR_STR(retval));
            next_state = ERROR_STATE;
            return;
        }
       gps_initialized = 1;
#endif

#ifdef USE_SONAR
        // initialize sonar sensors
        retval = sonar_init();
        if(retval != SONAR_NO_ERROR) {
            snprintf(state_data.error_str, sizeof(state_data.error_str), SONAR_ERROR_STR(retval));
            next_state = ERROR_STATE;
            return;
        }
       sonar_initialized = 1;
#endif

#ifdef USE_COMPASS
        //initialize compass
        retval = compass_init();
        if(retval != COMPASS_NO_ERROR) {
            snprintf(state_data.error_str, sizeof(state_data.error_str), SONAR_ERROR_STR(retval));
            next_state = ERROR_STATE;
            return;
        }
        compass_initialized = 1;
#endif

#ifdef USE_CAMERA
        //initialize camera
        retval = camera_init();
        if(retval != CAMERA_NO_ERROR) {
            snprintf(state_data.error_str, sizeof(state_data.error_str), CAMERA_ERROR_STR(retval));
            next_state = ERROR_STATE;
            return;
        }
        retval = camera_start_tracking();
        if(retval != CAMERA_NO_ERROR) {
            snprintf(state_data.error_str, sizeof(state_data.error_str), CAMERA_ERROR_STR(retval));
            next_state = ERROR_STATE;
            return;
        }
        camera_initialized = 1;
#endif
#ifdef USE_CAR
        retval = init_car();
        if(retval != CAR_NO_ERROR) {
            snprintf(state_data.error_str, sizeof(state_data.error_str), CAR_ERROR_STR(retval));
            next_state = ERROR_STATE;
            return;
        }
        car_initialized = 1;
#endif

        spawn_device_threads();
        first = 0;
    }

    if (kill_switch_asserted)
        next_state = PAUSE_STATE;
    else
        next_state = NAVIGATION_STATE;
    //next_state = TRACK_STATE;
}
Пример #28
0
void init(void){
	scene_init();//meshNode-list inited
	camera_init();//camera inited
	renderlist_init();//rederlist created,namely mflist[],mfptrlist[] inited
}
/*------Done in a subroutine to keep main routine stack usage small--------*/
void
initialize(void)
{
#ifdef BUZZER
  buzz_id();
#endif

  watchdog_init();
  watchdog_start();

  clock_init();

  PRINTD("\n\nChecking MCUSR...\n");
  if(MCUSR & (1<<PORF )) PRINTD("Power-on reset.\n");
  if(MCUSR & (1<<EXTRF)) PRINTD("External reset!\n");
  if(MCUSR & (1<<BORF )) PRINTD("Brownout reset!\n");
  if(MCUSR & (1<<WDRF )) PRINTD("Watchdog reset!\n");
  if(MCUSR & (1<<JTRF )) PRINTD("JTAG reset!\n");
  MCUSR = 0;

  PRINTD("CLOCK_SECOND %d\n",CLOCK_SECOND);
  PRINTD("RTIMER_ARCH_SECOND %lu\n",RTIMER_ARCH_SECOND);
  PRINTD("F_CPU %lu\n",F_CPU);

#if STACKMONITOR
  /* Simple stack pointer highwater monitor. Checks for magic numbers in the main
   * loop. In conjuction with PERIODICPRINTS, never-used stack will be printed
   * every STACKMONITOR seconds.
   */
{
extern uint16_t __bss_end;
uint16_t p=(uint16_t)&__bss_end;
    do {
      *(uint16_t *)p = 0x4242;
      p+=10;
    } while (p<SP-10); //don't overwrite our own stack
}
#endif

/* Calibrate internal mcu clock against external 32768Hz watch crystal */
#define CONF_CALIBRATE_OSCCAL 0
#if CONF_CALIBRATE_OSCCAL
void calibrate_rc_osc_32k();
{
extern uint8_t osccal_calibrated;
uint8_t i;
  PRINTD("\nBefore calibration OSCCAL=%x\n",OSCCAL);
  for (i=0;i<10;i++) {
    calibrate_rc_osc_32k();
    PRINTD("Calibrated=%x\n",osccal_calibrated);
//#include <util/delay_basic.h>
//#define delay_us( us )   ( _delay_loop_2(1+(us*F_CPU)/4000000UL) )
//   delay_us(50000);
 }
   clock_init();
}
#endif 

  PRINTA("\n*******Booting %s*******\n",CONTIKI_VERSION_STRING);

  leds_init();
  leds_on(LEDS_RED);

  /* Initialize USART */
#ifdef CAMERA_INTERFACE
  camera_init();
#else
  init_usart();
#endif

/* rtimers needed for radio cycling */
  rtimer_init();

 /* Initialize process subsystem */
  process_init();
 /* etimers must be started before ctimer_init */
  process_start(&etimer_process, NULL);

#if RF2XXBB
  ds2401_init();
  node_id_restore();

  /* Get a random seed for the 802.15.4 packet sequence number.
   * Some layers will ignore duplicates found in a history (e.g. Contikimac)
   * causing the initial packets to be ignored after a short-cycle restart.
   */
  random_init(rng_get_uint8());

  ctimer_init();

  init_net();
#else /* !RF2XXBB */
/* Original RF230 combined mac/radio driver */
/* mac process must be started before tcpip process! */
  process_start(&mac_process, NULL);
  process_start(&tcpip_process, NULL);
#endif /* RF2XXBB */

  /* Autostart other processes */
  autostart_start(autostart_processes);

  /*---If using coffee file system create initial web content if necessary---*/
#if COFFEE_FILES
  int fa = cfs_open( "/index.html", CFS_READ);
  if (fa<0) {     //Make some default web content
    PRINTA("No index.html file found, creating upload.html!\n");
    PRINTA("Formatting FLASH file system for coffee...");
    cfs_coffee_format();
    PRINTA("Done!\n");
    fa = cfs_open( "/index.html", CFS_WRITE);
    int r = cfs_write(fa, &"It works!", 9);
    if (r<0) PRINTA("Can''t create /index.html!\n");
    cfs_close(fa);
//  fa = cfs_open("upload.html"), CFW_WRITE);
// <html><body><form action="upload.html" enctype="multipart/form-data" method="post"><input name="userfile" type="file" size="50" /><input value="Upload" type="submit" /></form></body></html>
  }
#endif /* COFFEE_FILES */

/* Add addresses for testing */
#if 0
{
  uip_ip6addr_t ipaddr;
  uip_ip6addr(&ipaddr, 0xaaaa, 0, 0, 0, 0, 0, 0, 0);
  uip_ds6_addr_add(&ipaddr, 0, ADDR_AUTOCONF);
//  uip_ds6_prefix_add(&ipaddr,64,0);
}
#endif

/*--------------------------Announce the configuration---------------------*/
#if ANNOUNCE_BOOT
{
#if AVR_WEBSERVER
  uint8_t i;
  char buf1[40],buf[40];
  unsigned int size;

  for (i=0;i<UIP_DS6_ADDR_NB;i++) {
	if (uip_ds6_if.addr_list[i].isused) {
	   httpd_cgi_sprint_ip6(uip_ds6_if.addr_list[i].ipaddr,buf);
       PRINTA("IPv6 Address: %s\n",buf);
	}
  }
   cli();
   eeprom_read_block (buf1,eemem_server_name, sizeof(eemem_server_name));
   eeprom_read_block (buf,eemem_domain_name, sizeof(eemem_domain_name));
   sei();
   buf1[sizeof(eemem_server_name)]=0;
   PRINTA("%s",buf1);
   buf[sizeof(eemem_domain_name)]=0;
   size=httpd_fs_get_size();
#ifndef COFFEE_FILES
   PRINTA(".%s online with fixed %u byte web content\n",buf,size);
#elif COFFEE_FILES==1
   PRINTA(".%s online with static %u byte EEPROM file system\n",buf,size);
#elif COFFEE_FILES==2
   PRINTA(".%s online with dynamic %u KB EEPROM file system\n",buf,size>>10);
#elif COFFEE_FILES==3
   PRINTA(".%s online with static %u byte program memory file system\n",buf,size);
#elif COFFEE_FILES==4
   PRINTA(".%s online with dynamic %u KB program memory file system\n",buf,size>>10);
#endif /* COFFEE_FILES */

#else
   PRINTA("Online\n");
#endif /* AVR_WEBSERVER */

#endif /* ANNOUNCE_BOOT */
}
}
Пример #30
0
/**
  * @brief  Main function.
  */
int main(void)
{
	__enable_irq();
	snapshot_buffer = BuildCameraImageBuffer(snapshot_buffer_mem);

	/* load settings and parameters */
	global_data_reset_param_defaults();
	global_data_reset();

	/* init led */
	LEDInit(LED_ACT);
	LEDInit(LED_COM);
	LEDInit(LED_ERR);
	LEDOff(LED_ACT);
	LEDOff(LED_COM);
	LEDOff(LED_ERR);

	/* enable FPU on Cortex-M4F core */
	SCB_CPACR |= ((3UL << 10 * 2) | (3UL << 11 * 2)); /* set CP10 Full Access and set CP11 Full Access */

	/* init timers */
	timer_init();

	/* init usb */
	USBD_Init(	&USB_OTG_dev,
				USB_OTG_FS_CORE_ID,
				&USR_desc,
				&USBD_CDC_cb,
				&USR_cb);

	/* init mavlink */
	communication_init();

	/* initialize camera: */
	img_stream_param.size.x = FLOW_IMAGE_SIZE;
	img_stream_param.size.y = FLOW_IMAGE_SIZE;
	img_stream_param.binning = 4;
	{
		camera_image_buffer buffers[5] = {
			BuildCameraImageBuffer(image_buffer_8bit_1),
			BuildCameraImageBuffer(image_buffer_8bit_2),
			BuildCameraImageBuffer(image_buffer_8bit_3),
			BuildCameraImageBuffer(image_buffer_8bit_4),
			BuildCameraImageBuffer(image_buffer_8bit_5)
		};
		camera_init(&cam_ctx, mt9v034_get_sensor_interface(), dcmi_get_transport_interface(), 
					mt9v034_get_clks_per_row(64, 4) * 1, mt9v034_get_clks_per_row(64, 4) * 64, 2.0,
					&img_stream_param, buffers, 5);
	}

	/* gyro config */
	gyro_config();

	/* usart config*/
	usart_init();

    /* i2c config*/
    i2c_init();

	/* sonar config*/
	float sonar_distance_filtered = 0.0f; // distance in meter
	float sonar_distance_raw = 0.0f; // distance in meter
	bool distance_valid = false;
	sonar_config();

	/* reset/start timers */
	timer_register(sonar_update_fn, SONAR_POLL_MS);
	timer_register(system_state_send_fn, SYSTEM_STATE_MS);
	timer_register(system_receive_fn, SYSTEM_STATE_MS / 2);
	timer_register(send_params_fn, PARAMS_MS);
	timer_register(send_video_fn, global_data.param[PARAM_VIDEO_RATE]);
	timer_register(take_snapshot_fn, 500);
	//timer_register(switch_params_fn, 2000);

	/* variables */
	uint32_t counter = 0;

	result_accumulator_ctx mavlink_accumulator;

	result_accumulator_init(&mavlink_accumulator);
	
	uint32_t fps_timing_start = get_boot_time_us();
	uint16_t fps_counter = 0;
	uint16_t fps_skipped_counter = 0;
	
	uint32_t last_frame_index = 0;
	
	/* main loop */
	while (1)
	{
		/* check timers */
		timer_check();
		
		if (snap_capture_done) {
			snap_capture_done = false;
			camera_snapshot_acknowledge(&cam_ctx);
			snap_ready = true;
			if (snap_capture_success) {
				/* send the snapshot! */
				LEDToggle(LED_COM);
				mavlink_send_image(&snapshot_buffer);
			}
		}

		/* calculate focal_length in pixel */
		const float focal_length_px = (global_data.param[PARAM_FOCAL_LENGTH_MM]) / (4.0f * 0.006f); //original focal lenght: 12mm pixelsize: 6um, binning 4 enabled

		/* new gyroscope data */
		float x_rate_sensor, y_rate_sensor, z_rate_sensor;
		int16_t gyro_temp;
		gyro_read(&x_rate_sensor, &y_rate_sensor, &z_rate_sensor,&gyro_temp);

		/* gyroscope coordinate transformation to flow sensor coordinates */
		float x_rate =   y_rate_sensor; // change x and y rates
		float y_rate = - x_rate_sensor;
		float z_rate =   z_rate_sensor; // z is correct

		/* get sonar data */
		distance_valid = sonar_read(&sonar_distance_filtered, &sonar_distance_raw);
		/* reset to zero for invalid distances */
		if (!distance_valid) {
			sonar_distance_filtered = 0.0f;
			sonar_distance_raw = 0.0f;
		}

		bool use_klt = global_data.param[PARAM_ALGORITHM_CHOICE] != 0;

		uint32_t start_computations = 0;
		
		/* get recent images */
		camera_image_buffer *frames[2];
		camera_img_stream_get_buffers(&cam_ctx, frames, 2, true);
		
		start_computations = get_boot_time_us();
		
		int frame_delta = ((int32_t)frames[0]->frame_number - (int32_t)last_frame_index);
		last_frame_index = frames[0]->frame_number;
		fps_skipped_counter += frame_delta - 1;
		
		flow_klt_image *klt_images[2] = {NULL, NULL};
		{
			/* make sure that the new images get the correct treatment */
			/* this algorithm will still work if both images are new */
			int i;
			bool used_klt_image[2] = {false, false};
			for (i = 0; i < 2; ++i) {
				if (frames[i]->frame_number != frames[i]->meta) {
					// the image is new. apply pre-processing:
					/* filter the new image */
					if (global_data.param[PARAM_ALGORITHM_IMAGE_FILTER]) {
						filter_image(frames[i]->buffer, frames[i]->param.p.size.x);
					}
					/* update meta data to mark it as an up-to date image: */
					frames[i]->meta = frames[i]->frame_number;
				} else {
					// the image has the preprocessing already applied.
					if (use_klt) {
						int j;
						/* find the klt image that matches: */
						for (j = 0; j < 2; ++j) {
							if (flow_klt_images[j].meta == frames[i]->frame_number) {
								used_klt_image[j] = true;
								klt_images[i] = &flow_klt_images[j];
							}
						}
					}
				}
			}
			if (use_klt) {
				/* only for KLT: */
				/* preprocess the images if they are not yet preprocessed */
				for (i = 0; i < 2; ++i) {
					if (klt_images[i] == NULL) {
						// need processing. find unused KLT image:
						int j;
						for (j = 0; j < 2; ++j) {
							if (!used_klt_image[j]) {
								used_klt_image[j] = true;
								klt_images[i] = &flow_klt_images[j];
								break;
							}
						}
						klt_preprocess_image(frames[i]->buffer, klt_images[i]);
					}
				}
			}
		}
		
		float frame_dt = (frames[0]->timestamp - frames[1]->timestamp) * 0.000001f;

		/* compute gyro rate in pixels and change to image coordinates */
		float x_rate_px = - y_rate * (focal_length_px * frame_dt);
		float y_rate_px =   x_rate * (focal_length_px * frame_dt);
		float z_rate_fr = - z_rate * frame_dt;

		/* compute optical flow in pixels */
		flow_raw_result flow_rslt[32];
		uint16_t flow_rslt_count = 0;
		if (!use_klt) {
			flow_rslt_count = compute_flow(frames[1]->buffer, frames[0]->buffer, x_rate_px, y_rate_px, z_rate_fr, flow_rslt, 32);
		} else {
			flow_rslt_count =  compute_klt(klt_images[1], klt_images[0], x_rate_px, y_rate_px, z_rate_fr, flow_rslt, 32);
		}

		/* calculate flow value from the raw results */
		float pixel_flow_x;
		float pixel_flow_y;
		float outlier_threshold = global_data.param[PARAM_ALGORITHM_OUTLIER_THR_RATIO];
		float min_outlier_threshold = 0;
		if(global_data.param[PARAM_ALGORITHM_CHOICE] == 0)
		{
			min_outlier_threshold = global_data.param[PARAM_ALGORITHM_OUTLIER_THR_BLOCK];
		}else
		{
			min_outlier_threshold = global_data.param[PARAM_ALGORITHM_OUTLIER_THR_KLT];
		}
		uint8_t qual = flow_extract_result(flow_rslt, flow_rslt_count, &pixel_flow_x, &pixel_flow_y, 
							outlier_threshold,  min_outlier_threshold);

		/* create flow image if needed (previous_image is not needed anymore)
		 * -> can be used for debugging purpose
		 */
		previous_image = frames[1];
		if (global_data.param[PARAM_USB_SEND_VIDEO])
		{
			uint16_t frame_size = global_data.param[PARAM_IMAGE_WIDTH];
			uint8_t *prev_img = previous_image->buffer;
			for (int i = 0; i < flow_rslt_count; i++) {
				if (flow_rslt[i].quality > 0) {
					prev_img[flow_rslt[i].at_y * frame_size + flow_rslt[i].at_x] = 255;
					int ofs = (int)floor(flow_rslt[i].at_y + flow_rslt[i].y * 2 + 0.5f) * frame_size + (int)floor(flow_rslt[i].at_x + flow_rslt[i].x * 2 + 0.5f);
					if (ofs >= 0 && ofs < frame_size * frame_size) {
						prev_img[ofs] = 200;
					}
				}
			}
		}

		/* return the image buffers */
		camera_img_stream_return_buffers(&cam_ctx, frames, 2);
		
		/* decide which distance to use */
		float ground_distance = 0.0f;

		if(global_data.param[PARAM_SONAR_FILTERED])
		{
			ground_distance = sonar_distance_filtered;
		}
		else
		{
			ground_distance = sonar_distance_raw;
		}

		/* update I2C transmit buffer */
		update_TX_buffer(frame_dt, 
						 x_rate, y_rate, z_rate, gyro_temp, 
						 qual, pixel_flow_x, pixel_flow_y, 1.0f / focal_length_px, 
						 distance_valid, ground_distance, get_time_delta_us(get_sonar_measure_time()));

		/* accumulate the results */
		result_accumulator_feed(&mavlink_accumulator, frame_dt, 
								x_rate, y_rate, z_rate, gyro_temp, 
								qual, pixel_flow_x, pixel_flow_y, 1.0f / focal_length_px, 
								distance_valid, ground_distance, get_time_delta_us(get_sonar_measure_time()));

		uint32_t computaiton_time_us = get_time_delta_us(start_computations);

		counter++;
		fps_counter++;

        /* serial mavlink  + usb mavlink output throttled */
		if (counter % (uint32_t)global_data.param[PARAM_FLOW_SERIAL_THROTTLE_FACTOR] == 0)//throttling factor
		{
			float fps = 0;
			float fps_skip = 0;
			if (fps_counter + fps_skipped_counter > 100) {
				uint32_t dt = get_time_delta_us(fps_timing_start);
				fps_timing_start += dt;
				fps = (float)fps_counter / ((float)dt * 1e-6f);
				fps_skip = (float)fps_skipped_counter / ((float)dt * 1e-6f);
				fps_counter = 0;
				fps_skipped_counter = 0;

				mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "TIMING", get_boot_time_us(), computaiton_time_us, fps, fps_skip);
			}
			mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "EXPOSURE", get_boot_time_us(), 
					frames[0]->param.exposure, frames[0]->param.analog_gain, cam_ctx.last_brightness);
			
			/* calculate the output values */
			result_accumulator_output_flow output_flow;
			result_accumulator_output_flow_rad output_flow_rad;
			int min_valid_ratio = global_data.param[PARAM_ALGORITHM_MIN_VALID_RATIO];
			result_accumulator_calculate_output_flow(&mavlink_accumulator, min_valid_ratio, &output_flow);
			result_accumulator_calculate_output_flow_rad(&mavlink_accumulator, min_valid_ratio, &output_flow_rad);

			// send flow
			mavlink_msg_optical_flow_send(MAVLINK_COMM_0, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID],
					output_flow.flow_x, output_flow.flow_y,
					output_flow.flow_comp_m_x, output_flow.flow_comp_m_y, 
					output_flow.quality, output_flow.ground_distance);

			mavlink_msg_optical_flow_rad_send(MAVLINK_COMM_0, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID],
					output_flow_rad.integration_time, 
					output_flow_rad.integrated_x, output_flow_rad.integrated_y,
					output_flow_rad.integrated_xgyro, output_flow_rad.integrated_ygyro, output_flow_rad.integrated_zgyro,
					output_flow_rad.temperature, output_flow_rad.quality,
					output_flow_rad.time_delta_distance_us,output_flow_rad.ground_distance);

			if (global_data.param[PARAM_USB_SEND_FLOW] && (output_flow.quality > 0 || global_data.param[PARAM_USB_SEND_QUAL_0]))
			{
				mavlink_msg_optical_flow_send(MAVLINK_COMM_2, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID],
						output_flow.flow_x, output_flow.flow_y,
						output_flow.flow_comp_m_x, output_flow.flow_comp_m_y, 
						output_flow.quality, output_flow.ground_distance);

				mavlink_msg_optical_flow_rad_send(MAVLINK_COMM_2, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID],
						output_flow_rad.integration_time, 
						output_flow_rad.integrated_x, output_flow_rad.integrated_y,
						output_flow_rad.integrated_xgyro, output_flow_rad.integrated_ygyro, output_flow_rad.integrated_zgyro,
						output_flow_rad.temperature, output_flow_rad.quality,
						output_flow_rad.time_delta_distance_us,output_flow_rad.ground_distance);
			}

			if(global_data.param[PARAM_USB_SEND_GYRO])
			{
				mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "GYRO", get_boot_time_us(), x_rate, y_rate, z_rate);
			}

			result_accumulator_reset(&mavlink_accumulator);
		}

		/* forward flow from other sensors */
		if (counter % 2)
		{
			communication_receive_forward();
		}
	}
}