/*----------------------------------------------------------------------------*/ int sobel_y_image(my1Image *src, my1Image *dst) { int coeff[] = { -1,-2,-1, 0,0,0, 1,2,1 }; my1Mask mask; if(createmask(&mask,3)==0x0) return -1; setmask(&mask,coeff); mask.orig_x = 1; mask.orig_y = 1; filter_image(src,dst,&mask); freemask(&mask); return 0; }
/*----------------------------------------------------------------------------*/ int laplace_image(my1Image *src, my1Image *dst) { int coeff[] = { 0,-1,0, -1,4,-1, 0,-1,0 }; my1Mask mask; if(createmask(&mask,3)==0x0) return -1; setmask(&mask,coeff); mask.orig_x = 1; mask.orig_y = 1; filter_image(src,dst,&mask); freemask(&mask); return 0; }
void FilterBrush::BrushMove( const Point source, const Point target , int bsize) { ImpressionistDoc* pDoc = GetDocument(); ImpressionistUI* dlg=pDoc->m_pUI; if ( pDoc == NULL ) { printf( "FilterBrush::BrushMove document is NULL\n" ); return; } dlg->get_filter_parameters(m_matrix, m_divisor, m_offset); int size = pDoc->getSize(); // maybe useful if (bsize > 0) { size = bsize; } filter_image(pDoc->m_ucPainting, pDoc->m_nWidth, pDoc->m_nHeight, target.x - size / 2, target.y - size / 2, size, size, m_matrix, m_divisor, m_offset, pDoc); dlg->m_paintView->RestoreContent(); }
/** * @brief Main function. */ int main(void) { __enable_irq(); snapshot_buffer = BuildCameraImageBuffer(snapshot_buffer_mem); /* load settings and parameters */ global_data_reset_param_defaults(); global_data_reset(); /* init led */ LEDInit(LED_ACT); LEDInit(LED_COM); LEDInit(LED_ERR); LEDOff(LED_ACT); LEDOff(LED_COM); LEDOff(LED_ERR); /* enable FPU on Cortex-M4F core */ SCB_CPACR |= ((3UL << 10 * 2) | (3UL << 11 * 2)); /* set CP10 Full Access and set CP11 Full Access */ /* init timers */ timer_init(); /* init usb */ USBD_Init( &USB_OTG_dev, USB_OTG_FS_CORE_ID, &USR_desc, &USBD_CDC_cb, &USR_cb); /* init mavlink */ communication_init(); /* initialize camera: */ img_stream_param.size.x = FLOW_IMAGE_SIZE; img_stream_param.size.y = FLOW_IMAGE_SIZE; img_stream_param.binning = 4; { camera_image_buffer buffers[5] = { BuildCameraImageBuffer(image_buffer_8bit_1), BuildCameraImageBuffer(image_buffer_8bit_2), BuildCameraImageBuffer(image_buffer_8bit_3), BuildCameraImageBuffer(image_buffer_8bit_4), BuildCameraImageBuffer(image_buffer_8bit_5) }; camera_init(&cam_ctx, mt9v034_get_sensor_interface(), dcmi_get_transport_interface(), mt9v034_get_clks_per_row(64, 4) * 1, mt9v034_get_clks_per_row(64, 4) * 64, 2.0, &img_stream_param, buffers, 5); } /* gyro config */ gyro_config(); /* usart config*/ usart_init(); /* i2c config*/ i2c_init(); /* sonar config*/ float sonar_distance_filtered = 0.0f; // distance in meter float sonar_distance_raw = 0.0f; // distance in meter bool distance_valid = false; sonar_config(); /* reset/start timers */ timer_register(sonar_update_fn, SONAR_POLL_MS); timer_register(system_state_send_fn, SYSTEM_STATE_MS); timer_register(system_receive_fn, SYSTEM_STATE_MS / 2); timer_register(send_params_fn, PARAMS_MS); timer_register(send_video_fn, global_data.param[PARAM_VIDEO_RATE]); timer_register(take_snapshot_fn, 500); //timer_register(switch_params_fn, 2000); /* variables */ uint32_t counter = 0; result_accumulator_ctx mavlink_accumulator; result_accumulator_init(&mavlink_accumulator); uint32_t fps_timing_start = get_boot_time_us(); uint16_t fps_counter = 0; uint16_t fps_skipped_counter = 0; uint32_t last_frame_index = 0; /* main loop */ while (1) { /* check timers */ timer_check(); if (snap_capture_done) { snap_capture_done = false; camera_snapshot_acknowledge(&cam_ctx); snap_ready = true; if (snap_capture_success) { /* send the snapshot! */ LEDToggle(LED_COM); mavlink_send_image(&snapshot_buffer); } } /* calculate focal_length in pixel */ const float focal_length_px = (global_data.param[PARAM_FOCAL_LENGTH_MM]) / (4.0f * 0.006f); //original focal lenght: 12mm pixelsize: 6um, binning 4 enabled /* new gyroscope data */ float x_rate_sensor, y_rate_sensor, z_rate_sensor; int16_t gyro_temp; gyro_read(&x_rate_sensor, &y_rate_sensor, &z_rate_sensor,&gyro_temp); /* gyroscope coordinate transformation to flow sensor coordinates */ float x_rate = y_rate_sensor; // change x and y rates float y_rate = - x_rate_sensor; float z_rate = z_rate_sensor; // z is correct /* get sonar data */ distance_valid = sonar_read(&sonar_distance_filtered, &sonar_distance_raw); /* reset to zero for invalid distances */ if (!distance_valid) { sonar_distance_filtered = 0.0f; sonar_distance_raw = 0.0f; } bool use_klt = global_data.param[PARAM_ALGORITHM_CHOICE] != 0; uint32_t start_computations = 0; /* get recent images */ camera_image_buffer *frames[2]; camera_img_stream_get_buffers(&cam_ctx, frames, 2, true); start_computations = get_boot_time_us(); int frame_delta = ((int32_t)frames[0]->frame_number - (int32_t)last_frame_index); last_frame_index = frames[0]->frame_number; fps_skipped_counter += frame_delta - 1; flow_klt_image *klt_images[2] = {NULL, NULL}; { /* make sure that the new images get the correct treatment */ /* this algorithm will still work if both images are new */ int i; bool used_klt_image[2] = {false, false}; for (i = 0; i < 2; ++i) { if (frames[i]->frame_number != frames[i]->meta) { // the image is new. apply pre-processing: /* filter the new image */ if (global_data.param[PARAM_ALGORITHM_IMAGE_FILTER]) { filter_image(frames[i]->buffer, frames[i]->param.p.size.x); } /* update meta data to mark it as an up-to date image: */ frames[i]->meta = frames[i]->frame_number; } else { // the image has the preprocessing already applied. if (use_klt) { int j; /* find the klt image that matches: */ for (j = 0; j < 2; ++j) { if (flow_klt_images[j].meta == frames[i]->frame_number) { used_klt_image[j] = true; klt_images[i] = &flow_klt_images[j]; } } } } } if (use_klt) { /* only for KLT: */ /* preprocess the images if they are not yet preprocessed */ for (i = 0; i < 2; ++i) { if (klt_images[i] == NULL) { // need processing. find unused KLT image: int j; for (j = 0; j < 2; ++j) { if (!used_klt_image[j]) { used_klt_image[j] = true; klt_images[i] = &flow_klt_images[j]; break; } } klt_preprocess_image(frames[i]->buffer, klt_images[i]); } } } } float frame_dt = (frames[0]->timestamp - frames[1]->timestamp) * 0.000001f; /* compute gyro rate in pixels and change to image coordinates */ float x_rate_px = - y_rate * (focal_length_px * frame_dt); float y_rate_px = x_rate * (focal_length_px * frame_dt); float z_rate_fr = - z_rate * frame_dt; /* compute optical flow in pixels */ flow_raw_result flow_rslt[32]; uint16_t flow_rslt_count = 0; if (!use_klt) { flow_rslt_count = compute_flow(frames[1]->buffer, frames[0]->buffer, x_rate_px, y_rate_px, z_rate_fr, flow_rslt, 32); } else { flow_rslt_count = compute_klt(klt_images[1], klt_images[0], x_rate_px, y_rate_px, z_rate_fr, flow_rslt, 32); } /* calculate flow value from the raw results */ float pixel_flow_x; float pixel_flow_y; float outlier_threshold = global_data.param[PARAM_ALGORITHM_OUTLIER_THR_RATIO]; float min_outlier_threshold = 0; if(global_data.param[PARAM_ALGORITHM_CHOICE] == 0) { min_outlier_threshold = global_data.param[PARAM_ALGORITHM_OUTLIER_THR_BLOCK]; }else { min_outlier_threshold = global_data.param[PARAM_ALGORITHM_OUTLIER_THR_KLT]; } uint8_t qual = flow_extract_result(flow_rslt, flow_rslt_count, &pixel_flow_x, &pixel_flow_y, outlier_threshold, min_outlier_threshold); /* create flow image if needed (previous_image is not needed anymore) * -> can be used for debugging purpose */ previous_image = frames[1]; if (global_data.param[PARAM_USB_SEND_VIDEO]) { uint16_t frame_size = global_data.param[PARAM_IMAGE_WIDTH]; uint8_t *prev_img = previous_image->buffer; for (int i = 0; i < flow_rslt_count; i++) { if (flow_rslt[i].quality > 0) { prev_img[flow_rslt[i].at_y * frame_size + flow_rslt[i].at_x] = 255; int ofs = (int)floor(flow_rslt[i].at_y + flow_rslt[i].y * 2 + 0.5f) * frame_size + (int)floor(flow_rslt[i].at_x + flow_rslt[i].x * 2 + 0.5f); if (ofs >= 0 && ofs < frame_size * frame_size) { prev_img[ofs] = 200; } } } } /* return the image buffers */ camera_img_stream_return_buffers(&cam_ctx, frames, 2); /* decide which distance to use */ float ground_distance = 0.0f; if(global_data.param[PARAM_SONAR_FILTERED]) { ground_distance = sonar_distance_filtered; } else { ground_distance = sonar_distance_raw; } /* update I2C transmit buffer */ update_TX_buffer(frame_dt, x_rate, y_rate, z_rate, gyro_temp, qual, pixel_flow_x, pixel_flow_y, 1.0f / focal_length_px, distance_valid, ground_distance, get_time_delta_us(get_sonar_measure_time())); /* accumulate the results */ result_accumulator_feed(&mavlink_accumulator, frame_dt, x_rate, y_rate, z_rate, gyro_temp, qual, pixel_flow_x, pixel_flow_y, 1.0f / focal_length_px, distance_valid, ground_distance, get_time_delta_us(get_sonar_measure_time())); uint32_t computaiton_time_us = get_time_delta_us(start_computations); counter++; fps_counter++; /* serial mavlink + usb mavlink output throttled */ if (counter % (uint32_t)global_data.param[PARAM_FLOW_SERIAL_THROTTLE_FACTOR] == 0)//throttling factor { float fps = 0; float fps_skip = 0; if (fps_counter + fps_skipped_counter > 100) { uint32_t dt = get_time_delta_us(fps_timing_start); fps_timing_start += dt; fps = (float)fps_counter / ((float)dt * 1e-6f); fps_skip = (float)fps_skipped_counter / ((float)dt * 1e-6f); fps_counter = 0; fps_skipped_counter = 0; mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "TIMING", get_boot_time_us(), computaiton_time_us, fps, fps_skip); } mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "EXPOSURE", get_boot_time_us(), frames[0]->param.exposure, frames[0]->param.analog_gain, cam_ctx.last_brightness); /* calculate the output values */ result_accumulator_output_flow output_flow; result_accumulator_output_flow_rad output_flow_rad; int min_valid_ratio = global_data.param[PARAM_ALGORITHM_MIN_VALID_RATIO]; result_accumulator_calculate_output_flow(&mavlink_accumulator, min_valid_ratio, &output_flow); result_accumulator_calculate_output_flow_rad(&mavlink_accumulator, min_valid_ratio, &output_flow_rad); // send flow mavlink_msg_optical_flow_send(MAVLINK_COMM_0, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID], output_flow.flow_x, output_flow.flow_y, output_flow.flow_comp_m_x, output_flow.flow_comp_m_y, output_flow.quality, output_flow.ground_distance); mavlink_msg_optical_flow_rad_send(MAVLINK_COMM_0, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID], output_flow_rad.integration_time, output_flow_rad.integrated_x, output_flow_rad.integrated_y, output_flow_rad.integrated_xgyro, output_flow_rad.integrated_ygyro, output_flow_rad.integrated_zgyro, output_flow_rad.temperature, output_flow_rad.quality, output_flow_rad.time_delta_distance_us,output_flow_rad.ground_distance); if (global_data.param[PARAM_USB_SEND_FLOW] && (output_flow.quality > 0 || global_data.param[PARAM_USB_SEND_QUAL_0])) { mavlink_msg_optical_flow_send(MAVLINK_COMM_2, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID], output_flow.flow_x, output_flow.flow_y, output_flow.flow_comp_m_x, output_flow.flow_comp_m_y, output_flow.quality, output_flow.ground_distance); mavlink_msg_optical_flow_rad_send(MAVLINK_COMM_2, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID], output_flow_rad.integration_time, output_flow_rad.integrated_x, output_flow_rad.integrated_y, output_flow_rad.integrated_xgyro, output_flow_rad.integrated_ygyro, output_flow_rad.integrated_zgyro, output_flow_rad.temperature, output_flow_rad.quality, output_flow_rad.time_delta_distance_us,output_flow_rad.ground_distance); } if(global_data.param[PARAM_USB_SEND_GYRO]) { mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "GYRO", get_boot_time_us(), x_rate, y_rate, z_rate); } result_accumulator_reset(&mavlink_accumulator); } /* forward flow from other sensors */ if (counter % 2) { communication_receive_forward(); } } }
int main(int argc, char **argv) { FILE *fp; meta_parameters *metaSrc, *metaTrg; envi_header *envi; extern int currArg; // Pre-initialized to 1 radiometry_t radiometry=r_AMP; filter_type_t filter_type; char srcImage[255], trgImage[255], *inFile, outFile[255], filter_str[25]; int startX_src, startY_src, startX_trg, startY_trg, lines, samples, size; int subset=FALSE, filter=FALSE, geotiff=FALSE, line_count, sample_count; double lat_UL, lon_UL, lat_LR, lon_LR, yLine, xSample; float mean, scale; register float *img, *filtered_img=NULL; register int x, y, l; /* parse command line */ logflag=quietflag=FALSE; while (currArg < (argc-2)) { char *key = argv[currArg++]; if (strmatch(key,"-startX")) { CHECK_ARG(1); startX_src = atoi(GET_ARG(1)); subset = TRUE; } else if (strmatch(key,"-startY")) { CHECK_ARG(1); startY_src = atoi(GET_ARG(1)); subset = TRUE; } else if (strmatch(key,"-lines")) { CHECK_ARG(1); lines = atoi(GET_ARG(1)); subset = TRUE; } else if (strmatch(key,"-samples")) { CHECK_ARG(1); samples = atoi(GET_ARG(1)); subset = TRUE; } else if (strmatch(key,"-filter")) { CHECK_ARG(2); strcpy(filter_str, GET_ARG(2)); size = atoi(GET_ARG(1)); filter = TRUE; } else if (strmatch(key,"-geotiff")) { geotiff = TRUE; } else { printf( "\n**Invalid option: %s\n", argv[currArg-1]); usage(argv[0]); } } if ((argc-currArg)<2) {printf("Insufficient arguments.\n"); usage(argv[0]);} strcpy (srcImage, argv[currArg]); strcpy (trgImage, argv[currArg+1]); asfSplashScreen(argc, argv); // Ingesting CEOS files into ASF internal format asfPrintStatus("Ingesting source image: %s ...\n", srcImage); asf_import(radiometry, FALSE, FALSE, FALSE, "CEOS", "", "geocoded_image", NULL, NULL, -99.0, -99.0, NULL, NULL, NULL, TRUE, NULL, srcImage, "", srcImage); metaSrc = meta_read(srcImage); asfPrintStatus("Ingesting target image: %s ...\n", trgImage); asf_import(radiometry, FALSE, FALSE, FALSE, "CEOS", "", "geocoded_image", NULL, NULL, -99.0, -99.0, NULL, NULL, NULL, TRUE, NULL, trgImage, "", trgImage); metaTrg = meta_read(trgImage); // Check subset values for source image line_count = metaSrc->general->line_count; sample_count = metaSrc->general->sample_count; if (subset) { if (startX_src < 0 || startX_src > sample_count) startX_src = 0; if (startY_src < 0 || startY_src > line_count) startY_src = 0; if (lines < 0 || (lines-startY_src) > line_count) lines = line_count - startY_src; if (samples < 0 || (samples-startX_src) > sample_count) samples = sample_count - startX_src; } else { startX_src = startY_src = 0; lines = line_count; samples = sample_count; } // Assign filter if (filter) { if (strcmp(uc(filter_str), "AVERAGE") == 0) filter_type = AVERAGE; else if (strcmp(uc(filter_str), "SOBEL") == 0) filter_type = SOBEL; else if (strcmp(uc(filter_str), "FROST") == 0) filter_type = FROST; else if (strcmp(uc(filter_str), "LEE") == 0) filter_type = LEE; else if (strcmp(uc(filter_str), "GAMMA_MAP") == 0) filter_type = GAMMA_MAP; else { asfPrintWarning("Unsupported filter type '%s'- ignoring the filter" " settings\n", filter_str); filter = FALSE; } if (size%2 == 0 && filter_type != AVERAGE) { size--; asfPrintWarning("Filter kernel must have an odd number of lines and" "samples!\n"); } } // Allocate some memory for the subsets line_count = metaTrg->general->line_count; sample_count = metaTrg->general->sample_count; img = (float *) MALLOC(sizeof(float)*lines*samples); if (filter) filtered_img = (float *) MALLOC(sizeof(float)*lines*samples); // Determine geographic location of subset meta_get_latLon(metaSrc, startY_src, startX_src, 0.0, &lat_UL, &lon_UL); meta_get_latLon(metaSrc, startY_src+lines, startX_src+samples, 0.0, &lat_LR, &lon_LR); meta_get_lineSamp(metaTrg, lat_UL, lon_UL, 0.0, &yLine, &xSample); startX_trg = (int) (xSample + 0.5); startY_trg = (int) (yLine + 0.5); // READ IN SUBSETS // Read target image subset first to determine average brightness asfPrintStatus("\nGenerating subset for target image ...\n"); asfPrintStatus("start line: %d, start sample: %d, lines: %d, samples: %d\n", startY_trg, startX_trg, lines, samples); inFile = appendExt(trgImage, ".img"); sprintf(outFile, "%s_sub.img", trgImage); fp = FOPEN(inFile, "rb"); read_subset(fp, metaTrg, startX_trg, startY_trg, samples, lines, 0.0, &mean, img); FCLOSE(fp); // Compute scale factor mean *= -1; scale = 1.0 / (lines * samples); // Subtract this average off of target image for (y=0; y<lines; y++) { l = samples * y; for (x=0; x<samples; x++) img[l+x] = (img[l+x] + mean) * scale; } if (filter) { asfPrintStatus("\nFiltering target image subset with %s (%dx%d) ...\n", uc(filter_str), size, size); filter_image(img, filtered_img, filter_type, size, lines, samples); } // Update metadata and write target subset to file metaTrg->general->line_count = lines; metaTrg->general->sample_count = samples; metaTrg->general->start_line = startY_trg; metaTrg->general->start_sample = startX_trg; meta_write(metaTrg, outFile); envi = meta2envi(metaTrg); write_envi_header(outFile, outFile, metaTrg, envi); fp = FOPEN(outFile, "wb"); if (filter) put_float_lines(fp, metaTrg, 0, lines, filtered_img); else put_float_lines(fp, metaTrg, 0, lines, img); FCLOSE(fp); // Read source image subset applying for brightness asfPrintStatus("\nGenerating subset for source image ...\n"); asfPrintStatus("start line: %d, start sample: %d, lines: %d, samples: %d\n", startY_src, startX_src, lines, samples); inFile = appendExt(srcImage, ".img"); sprintf(outFile, "%s_sub.img", srcImage); fp = FOPEN(inFile, "rb"); read_subset(fp, metaSrc, startX_src, startY_src, samples, lines, mean, NULL, img); FCLOSE(fp); if (filter) { asfPrintStatus("\nFiltering source image subset with %s (%dx%d) ...\n", uc(filter_str), size, size); filter_image(img, filtered_img, filter_type, size, lines, samples); } // Update metadata and write source subset to file metaSrc->general->line_count = lines; metaSrc->general->sample_count = samples; metaSrc->general->start_line = startY_src; metaSrc->general->start_sample = startX_src; meta_write(metaSrc, outFile); envi = meta2envi(metaSrc); write_envi_header(outFile, outFile, metaSrc, envi); fp = FOPEN(outFile, "wb"); if (filter) put_float_lines(fp, metaSrc, 0, lines, filtered_img); else put_float_lines(fp, metaSrc, 0, lines, img); FCLOSE(fp); // Clean up FREE(img); meta_free(metaSrc); meta_free(metaTrg); // Exporting subsets to GeoTIFF if (geotiff) { output_format_t output_format=GEOTIFF; scale_t sample_mapping=SIGMA; asfPrintStatus("\nExporting source image subset to GeoTIFF ...\n"); sprintf(outFile, "%s_sub", srcImage); asf_export(output_format, sample_mapping, outFile, outFile); asfPrintStatus("\nExporting target image subset to GeoTIFF ...\n"); sprintf(outFile, "%s_sub", trgImage); asf_export(output_format, sample_mapping, outFile, outFile); } return (0); }