Example #1
0
// TODO: why doesn't the above function write these structs directly?
void result_accumulator_fill_i2c_data(result_accumulator_ctx *ctx, i2c_frame* f, i2c_integral_frame* f_integral) {
	result_accumulator_output_flow output_flow;
	result_accumulator_output_flow_i2c output_i2c;
	int min_valid_ratio = global_data.param[PARAM_ALGORITHM_MIN_VALID_RATIO];
	result_accumulator_calculate_output_flow(ctx, min_valid_ratio, &output_flow);
	result_accumulator_calculate_output_flow_i2c(ctx, min_valid_ratio, &output_i2c);

	/* write the i2c_frame */
	f->frame_count = ctx->frame_count;
	f->pixel_flow_x_sum = output_flow.flow_x;
	f->pixel_flow_y_sum = output_flow.flow_y;
	f->flow_comp_m_x = floor(output_flow.flow_comp_m_x * 1000.0f + 0.5f);
	f->flow_comp_m_y = floor(output_flow.flow_comp_m_y * 1000.0f + 0.5f);
	f->qual = output_flow.quality;
	f->ground_distance = output_i2c.ground_distance;

	f->gyro_x_rate = output_i2c.gyro_x;
	f->gyro_y_rate = output_i2c.gyro_y;
	f->gyro_z_rate = output_i2c.gyro_z;
	f->gyro_range = getGyroRange();

	if (output_i2c.time_delta_distance_us < 255 * 1000) {
		f->distance_timestamp = output_i2c.time_delta_distance_us / 1000; //convert to ms
	} else {
		f->distance_timestamp = 255;
	}

	/* write the i2c_integral_frame */
	f_integral->frame_count_since_last_readout = output_i2c.valid_frames;
	f_integral->gyro_x_rate_integral = output_i2c.integrated_gyro_x;		//mrad*10
	f_integral->gyro_y_rate_integral = output_i2c.integrated_gyro_y;		//mrad*10
	f_integral->gyro_z_rate_integral = output_i2c.integrated_gyro_z; 	//mrad*10
	f_integral->pixel_flow_x_integral = output_i2c.rad_flow_x; 			//mrad*10
	f_integral->pixel_flow_y_integral = output_i2c.rad_flow_y; 			//mrad*10
	f_integral->integration_timespan = output_i2c.integration_time;      //microseconds
	f_integral->ground_distance = output_i2c.ground_distance;		    //mmeters
	f_integral->distance_timestamp = output_i2c.time_delta_distance_us;  	//microseconds
	f_integral->qual = output_i2c.quality;										//0-255 linear quality measurement 0=bad, 255=best
	f_integral->gyro_temperature = output_i2c.temperature;						//Temperature * 100 in centi-degrees Celsius
}
Example #2
0
File: main.c Project: leitwert/Flow
/**
  * @brief  Main function.
  */
int main(void)
{
	__enable_irq();
	snapshot_buffer = BuildCameraImageBuffer(snapshot_buffer_mem);

	/* load settings and parameters */
	global_data_reset_param_defaults();
	global_data_reset();

	/* init led */
	LEDInit(LED_ACT);
	LEDInit(LED_COM);
	LEDInit(LED_ERR);
	LEDOff(LED_ACT);
	LEDOff(LED_COM);
	LEDOff(LED_ERR);

	/* enable FPU on Cortex-M4F core */
	SCB_CPACR |= ((3UL << 10 * 2) | (3UL << 11 * 2)); /* set CP10 Full Access and set CP11 Full Access */

	/* init timers */
	timer_init();

	/* init usb */
	USBD_Init(	&USB_OTG_dev,
				USB_OTG_FS_CORE_ID,
				&USR_desc,
				&USBD_CDC_cb,
				&USR_cb);

	/* init mavlink */
	communication_init();

	/* initialize camera: */
	img_stream_param.size.x = FLOW_IMAGE_SIZE;
	img_stream_param.size.y = FLOW_IMAGE_SIZE;
	img_stream_param.binning = 4;
	{
		camera_image_buffer buffers[5] = {
			BuildCameraImageBuffer(image_buffer_8bit_1),
			BuildCameraImageBuffer(image_buffer_8bit_2),
			BuildCameraImageBuffer(image_buffer_8bit_3),
			BuildCameraImageBuffer(image_buffer_8bit_4),
			BuildCameraImageBuffer(image_buffer_8bit_5)
		};
		camera_init(&cam_ctx, mt9v034_get_sensor_interface(), dcmi_get_transport_interface(), 
					mt9v034_get_clks_per_row(64, 4) * 1, mt9v034_get_clks_per_row(64, 4) * 64, 2.0,
					&img_stream_param, buffers, 5);
	}

	/* gyro config */
	gyro_config();

	/* usart config*/
	usart_init();

    /* i2c config*/
    i2c_init();

	/* sonar config*/
	float sonar_distance_filtered = 0.0f; // distance in meter
	float sonar_distance_raw = 0.0f; // distance in meter
	bool distance_valid = false;
	sonar_config();

	/* reset/start timers */
	timer_register(sonar_update_fn, SONAR_POLL_MS);
	timer_register(system_state_send_fn, SYSTEM_STATE_MS);
	timer_register(system_receive_fn, SYSTEM_STATE_MS / 2);
	timer_register(send_params_fn, PARAMS_MS);
	timer_register(send_video_fn, global_data.param[PARAM_VIDEO_RATE]);
	timer_register(take_snapshot_fn, 500);
	//timer_register(switch_params_fn, 2000);

	/* variables */
	uint32_t counter = 0;

	result_accumulator_ctx mavlink_accumulator;

	result_accumulator_init(&mavlink_accumulator);
	
	uint32_t fps_timing_start = get_boot_time_us();
	uint16_t fps_counter = 0;
	uint16_t fps_skipped_counter = 0;
	
	uint32_t last_frame_index = 0;
	
	/* main loop */
	while (1)
	{
		/* check timers */
		timer_check();
		
		if (snap_capture_done) {
			snap_capture_done = false;
			camera_snapshot_acknowledge(&cam_ctx);
			snap_ready = true;
			if (snap_capture_success) {
				/* send the snapshot! */
				LEDToggle(LED_COM);
				mavlink_send_image(&snapshot_buffer);
			}
		}

		/* calculate focal_length in pixel */
		const float focal_length_px = (global_data.param[PARAM_FOCAL_LENGTH_MM]) / (4.0f * 0.006f); //original focal lenght: 12mm pixelsize: 6um, binning 4 enabled

		/* new gyroscope data */
		float x_rate_sensor, y_rate_sensor, z_rate_sensor;
		int16_t gyro_temp;
		gyro_read(&x_rate_sensor, &y_rate_sensor, &z_rate_sensor,&gyro_temp);

		/* gyroscope coordinate transformation to flow sensor coordinates */
		float x_rate =   y_rate_sensor; // change x and y rates
		float y_rate = - x_rate_sensor;
		float z_rate =   z_rate_sensor; // z is correct

		/* get sonar data */
		distance_valid = sonar_read(&sonar_distance_filtered, &sonar_distance_raw);
		/* reset to zero for invalid distances */
		if (!distance_valid) {
			sonar_distance_filtered = 0.0f;
			sonar_distance_raw = 0.0f;
		}

		bool use_klt = global_data.param[PARAM_ALGORITHM_CHOICE] != 0;

		uint32_t start_computations = 0;
		
		/* get recent images */
		camera_image_buffer *frames[2];
		camera_img_stream_get_buffers(&cam_ctx, frames, 2, true);
		
		start_computations = get_boot_time_us();
		
		int frame_delta = ((int32_t)frames[0]->frame_number - (int32_t)last_frame_index);
		last_frame_index = frames[0]->frame_number;
		fps_skipped_counter += frame_delta - 1;
		
		flow_klt_image *klt_images[2] = {NULL, NULL};
		{
			/* make sure that the new images get the correct treatment */
			/* this algorithm will still work if both images are new */
			int i;
			bool used_klt_image[2] = {false, false};
			for (i = 0; i < 2; ++i) {
				if (frames[i]->frame_number != frames[i]->meta) {
					// the image is new. apply pre-processing:
					/* filter the new image */
					if (global_data.param[PARAM_ALGORITHM_IMAGE_FILTER]) {
						filter_image(frames[i]->buffer, frames[i]->param.p.size.x);
					}
					/* update meta data to mark it as an up-to date image: */
					frames[i]->meta = frames[i]->frame_number;
				} else {
					// the image has the preprocessing already applied.
					if (use_klt) {
						int j;
						/* find the klt image that matches: */
						for (j = 0; j < 2; ++j) {
							if (flow_klt_images[j].meta == frames[i]->frame_number) {
								used_klt_image[j] = true;
								klt_images[i] = &flow_klt_images[j];
							}
						}
					}
				}
			}
			if (use_klt) {
				/* only for KLT: */
				/* preprocess the images if they are not yet preprocessed */
				for (i = 0; i < 2; ++i) {
					if (klt_images[i] == NULL) {
						// need processing. find unused KLT image:
						int j;
						for (j = 0; j < 2; ++j) {
							if (!used_klt_image[j]) {
								used_klt_image[j] = true;
								klt_images[i] = &flow_klt_images[j];
								break;
							}
						}
						klt_preprocess_image(frames[i]->buffer, klt_images[i]);
					}
				}
			}
		}
		
		float frame_dt = (frames[0]->timestamp - frames[1]->timestamp) * 0.000001f;

		/* compute gyro rate in pixels and change to image coordinates */
		float x_rate_px = - y_rate * (focal_length_px * frame_dt);
		float y_rate_px =   x_rate * (focal_length_px * frame_dt);
		float z_rate_fr = - z_rate * frame_dt;

		/* compute optical flow in pixels */
		flow_raw_result flow_rslt[32];
		uint16_t flow_rslt_count = 0;
		if (!use_klt) {
			flow_rslt_count = compute_flow(frames[1]->buffer, frames[0]->buffer, x_rate_px, y_rate_px, z_rate_fr, flow_rslt, 32);
		} else {
			flow_rslt_count =  compute_klt(klt_images[1], klt_images[0], x_rate_px, y_rate_px, z_rate_fr, flow_rslt, 32);
		}

		/* calculate flow value from the raw results */
		float pixel_flow_x;
		float pixel_flow_y;
		float outlier_threshold = global_data.param[PARAM_ALGORITHM_OUTLIER_THR_RATIO];
		float min_outlier_threshold = 0;
		if(global_data.param[PARAM_ALGORITHM_CHOICE] == 0)
		{
			min_outlier_threshold = global_data.param[PARAM_ALGORITHM_OUTLIER_THR_BLOCK];
		}else
		{
			min_outlier_threshold = global_data.param[PARAM_ALGORITHM_OUTLIER_THR_KLT];
		}
		uint8_t qual = flow_extract_result(flow_rslt, flow_rslt_count, &pixel_flow_x, &pixel_flow_y, 
							outlier_threshold,  min_outlier_threshold);

		/* create flow image if needed (previous_image is not needed anymore)
		 * -> can be used for debugging purpose
		 */
		previous_image = frames[1];
		if (global_data.param[PARAM_USB_SEND_VIDEO])
		{
			uint16_t frame_size = global_data.param[PARAM_IMAGE_WIDTH];
			uint8_t *prev_img = previous_image->buffer;
			for (int i = 0; i < flow_rslt_count; i++) {
				if (flow_rslt[i].quality > 0) {
					prev_img[flow_rslt[i].at_y * frame_size + flow_rslt[i].at_x] = 255;
					int ofs = (int)floor(flow_rslt[i].at_y + flow_rslt[i].y * 2 + 0.5f) * frame_size + (int)floor(flow_rslt[i].at_x + flow_rslt[i].x * 2 + 0.5f);
					if (ofs >= 0 && ofs < frame_size * frame_size) {
						prev_img[ofs] = 200;
					}
				}
			}
		}

		/* return the image buffers */
		camera_img_stream_return_buffers(&cam_ctx, frames, 2);
		
		/* decide which distance to use */
		float ground_distance = 0.0f;

		if(global_data.param[PARAM_SONAR_FILTERED])
		{
			ground_distance = sonar_distance_filtered;
		}
		else
		{
			ground_distance = sonar_distance_raw;
		}

		/* update I2C transmit buffer */
		update_TX_buffer(frame_dt, 
						 x_rate, y_rate, z_rate, gyro_temp, 
						 qual, pixel_flow_x, pixel_flow_y, 1.0f / focal_length_px, 
						 distance_valid, ground_distance, get_time_delta_us(get_sonar_measure_time()));

		/* accumulate the results */
		result_accumulator_feed(&mavlink_accumulator, frame_dt, 
								x_rate, y_rate, z_rate, gyro_temp, 
								qual, pixel_flow_x, pixel_flow_y, 1.0f / focal_length_px, 
								distance_valid, ground_distance, get_time_delta_us(get_sonar_measure_time()));

		uint32_t computaiton_time_us = get_time_delta_us(start_computations);

		counter++;
		fps_counter++;

        /* serial mavlink  + usb mavlink output throttled */
		if (counter % (uint32_t)global_data.param[PARAM_FLOW_SERIAL_THROTTLE_FACTOR] == 0)//throttling factor
		{
			float fps = 0;
			float fps_skip = 0;
			if (fps_counter + fps_skipped_counter > 100) {
				uint32_t dt = get_time_delta_us(fps_timing_start);
				fps_timing_start += dt;
				fps = (float)fps_counter / ((float)dt * 1e-6f);
				fps_skip = (float)fps_skipped_counter / ((float)dt * 1e-6f);
				fps_counter = 0;
				fps_skipped_counter = 0;

				mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "TIMING", get_boot_time_us(), computaiton_time_us, fps, fps_skip);
			}
			mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "EXPOSURE", get_boot_time_us(), 
					frames[0]->param.exposure, frames[0]->param.analog_gain, cam_ctx.last_brightness);
			
			/* calculate the output values */
			result_accumulator_output_flow output_flow;
			result_accumulator_output_flow_rad output_flow_rad;
			int min_valid_ratio = global_data.param[PARAM_ALGORITHM_MIN_VALID_RATIO];
			result_accumulator_calculate_output_flow(&mavlink_accumulator, min_valid_ratio, &output_flow);
			result_accumulator_calculate_output_flow_rad(&mavlink_accumulator, min_valid_ratio, &output_flow_rad);

			// send flow
			mavlink_msg_optical_flow_send(MAVLINK_COMM_0, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID],
					output_flow.flow_x, output_flow.flow_y,
					output_flow.flow_comp_m_x, output_flow.flow_comp_m_y, 
					output_flow.quality, output_flow.ground_distance);

			mavlink_msg_optical_flow_rad_send(MAVLINK_COMM_0, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID],
					output_flow_rad.integration_time, 
					output_flow_rad.integrated_x, output_flow_rad.integrated_y,
					output_flow_rad.integrated_xgyro, output_flow_rad.integrated_ygyro, output_flow_rad.integrated_zgyro,
					output_flow_rad.temperature, output_flow_rad.quality,
					output_flow_rad.time_delta_distance_us,output_flow_rad.ground_distance);

			if (global_data.param[PARAM_USB_SEND_FLOW] && (output_flow.quality > 0 || global_data.param[PARAM_USB_SEND_QUAL_0]))
			{
				mavlink_msg_optical_flow_send(MAVLINK_COMM_2, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID],
						output_flow.flow_x, output_flow.flow_y,
						output_flow.flow_comp_m_x, output_flow.flow_comp_m_y, 
						output_flow.quality, output_flow.ground_distance);

				mavlink_msg_optical_flow_rad_send(MAVLINK_COMM_2, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID],
						output_flow_rad.integration_time, 
						output_flow_rad.integrated_x, output_flow_rad.integrated_y,
						output_flow_rad.integrated_xgyro, output_flow_rad.integrated_ygyro, output_flow_rad.integrated_zgyro,
						output_flow_rad.temperature, output_flow_rad.quality,
						output_flow_rad.time_delta_distance_us,output_flow_rad.ground_distance);
			}

			if(global_data.param[PARAM_USB_SEND_GYRO])
			{
				mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "GYRO", get_boot_time_us(), x_rate, y_rate, z_rate);
			}

			result_accumulator_reset(&mavlink_accumulator);
		}

		/* forward flow from other sensors */
		if (counter % 2)
		{
			communication_receive_forward();
		}
	}
}