int main(int argc, char ** argv) { // Keep track of the start time of the program long long program_start_time = get_time(); // Let the user specify the number of frames to process int num_frames = 1; if (argc !=5){ fprintf(stderr, "usage: %s <num of frames> <num of threads><input file>", argv[0]); exit(1); } if (argc > 1){ num_frames = atoi(argv[1]); omp_num_threads = atoi(argv[2]); omp_num_threads2 = atoi(argv[3]); } printf("Num of threads: %d\n", omp_num_threads); printf("Num of threads: %d\n", omp_num_threads2); omp_set_nested(1); // Open video file char *video_file_name; video_file_name = argv[4]; avi_t *cell_file = AVI_open_input_file(video_file_name, 1); if (cell_file == NULL) { AVI_print_error("Error with AVI_open_input_file"); return -1; } int i, j, *crow, *ccol, pair_counter = 0, x_result_len = 0, Iter = 20, ns = 4, k_count = 0, n; MAT *cellx, *celly, *A; double *GICOV_spots, *t, *G, *x_result, *y_result, *V, *QAX_CENTERS, *QAY_CENTERS; double threshold = 1.8, radius = 10.0, delta = 3.0, dt = 0.01, b = 5.0; // Extract a cropped version of the first frame from the video file MAT *image_chopped = get_frame(cell_file, 0, 1, 0); printf("Detecting cells in frame 0\n"); // Get gradient matrices in x and y directions MAT *grad_x = gradient_x(image_chopped); MAT *grad_y = gradient_y(image_chopped); m_free(image_chopped); // Get GICOV matrix corresponding to image gradients long long GICOV_start_time = get_time(); MAT *gicov = ellipsematching(grad_x, grad_y); // Square GICOV values MAT *max_gicov = m_get(gicov->m, gicov->n); for (i = 0; i < gicov->m; i++) { for (j = 0; j < gicov->n; j++) { double val = m_get_val(gicov, i, j); m_set_val(max_gicov, i, j, val * val); } } long long GICOV_end_time = get_time(); // Dilate the GICOV matrix long long dilate_start_time = get_time(); MAT *strel = structuring_element(12); MAT *img_dilated = dilate_f(max_gicov, strel); long long dilate_end_time = get_time(); // Find possible matches for cell centers based on GICOV and record the rows/columns in which they are found pair_counter = 0; crow = (int *) malloc(max_gicov->m * max_gicov->n * sizeof(int)); ccol = (int *) malloc(max_gicov->m * max_gicov->n * sizeof(int)); for (i = 0; i < max_gicov->m; i++) { for (j = 0; j < max_gicov->n; j++) { if (!(m_get_val(max_gicov,i,j) == 0.0) && (m_get_val(img_dilated,i,j) == m_get_val(max_gicov,i,j))) { crow[pair_counter] = i; ccol[pair_counter] = j; pair_counter++; } } } GICOV_spots = (double *) malloc(sizeof(double)*pair_counter); for (i = 0; i < pair_counter; i++) GICOV_spots[i] = m_get_val(gicov, crow[i], ccol[i]); G = (double *) calloc(pair_counter, sizeof(double)); x_result = (double *) calloc(pair_counter, sizeof(double)); y_result = (double *) calloc(pair_counter, sizeof(double)); x_result_len = 0; for (i = 0; i < pair_counter; i++) { if ((crow[i] > 29) && (crow[i] < BOTTOM - TOP + 39)) { x_result[x_result_len] = ccol[i]; y_result[x_result_len] = crow[i] - 40; G[x_result_len] = GICOV_spots[i]; x_result_len++; } } // Make an array t which holds each "time step" for the possible cells t = (double *) malloc(sizeof(double) * 36); for (i = 0; i < 36; i++) { t[i] = (double)i * 2.0 * PI / 36.0; } // Store cell boundaries (as simple circles) for all cells cellx = m_get(x_result_len, 36); celly = m_get(x_result_len, 36); for(i = 0; i < x_result_len; i++) { for(j = 0; j < 36; j++) { m_set_val(cellx, i, j, x_result[i] + radius * cos(t[j])); m_set_val(celly, i, j, y_result[i] + radius * sin(t[j])); } } A = TMatrix(9,4); V = (double *) malloc(sizeof(double) * pair_counter); QAX_CENTERS = (double * )malloc(sizeof(double) * pair_counter); QAY_CENTERS = (double *) malloc(sizeof(double) * pair_counter); memset(V, 0, sizeof(double) * pair_counter); memset(QAX_CENTERS, 0, sizeof(double) * pair_counter); memset(QAY_CENTERS, 0, sizeof(double) * pair_counter); // For all possible results, find the ones that are feasibly leukocytes and store their centers k_count = 0; for (n = 0; n < x_result_len; n++) { if ((G[n] < -1 * threshold) || G[n] > threshold) { MAT * x, *y; VEC * x_row, * y_row; x = m_get(1, 36); y = m_get(1, 36); x_row = v_get(36); y_row = v_get(36); // Get current values of possible cells from cellx/celly matrices x_row = get_row(cellx, n, x_row); y_row = get_row(celly, n, y_row); uniformseg(x_row, y_row, x, y); // Make sure that the possible leukocytes are not too close to the edge of the frame if ((m_min(x) > b) && (m_min(y) > b) && (m_max(x) < cell_file->width - b) && (m_max(y) < cell_file->height - b)) { MAT * Cx, * Cy, *Cy_temp, * Ix1, * Iy1; VEC *Xs, *Ys, *W, *Nx, *Ny, *X, *Y; Cx = m_get(1, 36); Cy = m_get(1, 36); Cx = mmtr_mlt(A, x, Cx); Cy = mmtr_mlt(A, y, Cy); Cy_temp = m_get(Cy->m, Cy->n); for (i = 0; i < 9; i++) m_set_val(Cy, i, 0, m_get_val(Cy, i, 0) + 40.0); // Iteratively refine the snake/spline for (i = 0; i < Iter; i++) { int typeofcell; if(G[n] > 0.0) typeofcell = 0; else typeofcell = 1; splineenergyform01(Cx, Cy, grad_x, grad_y, ns, delta, 2.0 * dt, typeofcell); } X = getsampling(Cx, ns); for (i = 0; i < Cy->m; i++) m_set_val(Cy_temp, i, 0, m_get_val(Cy, i, 0) - 40.0); Y = getsampling(Cy_temp, ns); Ix1 = linear_interp2(grad_x, X, Y); Iy1 = linear_interp2(grad_x, X, Y); Xs = getfdriv(Cx, ns); Ys = getfdriv(Cy, ns); Nx = v_get(Ys->dim); for (i = 0; i < Ys->dim; i++) v_set_val(Nx, i, v_get_val(Ys, i) / sqrt(v_get_val(Xs, i)*v_get_val(Xs, i) + v_get_val(Ys, i)*v_get_val(Ys, i))); Ny = v_get(Xs->dim); for (i = 0; i < Xs->dim; i++) v_set_val(Ny, i, -1.0 * v_get_val(Xs, i) / sqrt(v_get_val(Xs, i)*v_get_val(Xs, i) + v_get_val(Ys, i)*v_get_val(Ys, i))); W = v_get(Nx->dim); for (i = 0; i < Nx->dim; i++) v_set_val(W, i, m_get_val(Ix1, 0, i) * v_get_val(Nx, i) + m_get_val(Iy1, 0, i) * v_get_val(Ny, i)); V[n] = mean(W) / std_dev(W); //get means of X and Y values for all "snaxels" of the spline contour, thus finding the cell centers QAX_CENTERS[k_count] = mean(X); QAY_CENTERS[k_count] = mean(Y) + TOP; k_count++; // Free memory v_free(W); v_free(Ny); v_free(Nx); v_free(Ys); v_free(Xs); m_free(Iy1); m_free(Ix1); v_free(Y); v_free(X); m_free(Cy_temp); m_free(Cy); m_free(Cx); } // Free memory v_free(y_row); v_free(x_row); m_free(y); m_free(x); } } // Free memory free(V); free(ccol); free(crow); free(GICOV_spots); free(t); free(G); free(x_result); free(y_result); m_free(A); m_free(celly); m_free(cellx); m_free(img_dilated); m_free(max_gicov); m_free(gicov); m_free(grad_y); m_free(grad_x); // Report the total number of cells detected printf("Cells detected: %d\n\n", k_count); // Report the breakdown of the detection runtime printf("Detection runtime\n"); printf("-----------------\n"); printf("GICOV computation: %.5f seconds\n", ((float) (GICOV_end_time - GICOV_start_time)) / (1000*1000)); printf(" GICOV dilation: %.5f seconds\n", ((float) (dilate_end_time - dilate_start_time)) / (1000*1000)); printf(" Total: %.5f seconds\n", ((float) (get_time() - program_start_time)) / (1000*1000)); // Now that the cells have been detected in the first frame, // track the ellipses through subsequent frames if (num_frames > 1) printf("\nTracking cells across %d frames\n", num_frames); else printf("\nTracking cells across 1 frame\n"); long long tracking_start_time = get_time(); int num_snaxels = 20; ellipsetrack(cell_file, QAX_CENTERS, QAY_CENTERS, k_count, radius, num_snaxels, num_frames); printf(" Total cambine: %.5f seconds\n", ((float) (get_time() - tracking_start_time)) / (float) (1000*1000*num_frames)); // Report total program execution time printf("\nTotal application run time: %.5f seconds\n", ((float) (get_time() - program_start_time)) / (1000*1000)); return 0; }
void VideoDemos( VideoCapture& surveillance_video, int starting_frame, bool clean_binary_images ) { Mat previous_gray_frame, optical_flow, optical_flow_display; Mat current_frame, thresholded_image, closed_image, first_frame; Mat current_frame_gray, running_average_background; Mat temp_running_average_background, running_average_difference; Mat running_average_foreground_mask, running_average_foreground_image; Mat selective_running_average_background; Mat temp_selective_running_average_background, selective_running_average_difference; Mat selective_running_average_foreground_mask, selective_running_average_background_mask, selective_running_average_foreground_image; double running_average_learning_rate = 0.01; surveillance_video.set(CV_CAP_PROP_POS_FRAMES,starting_frame); surveillance_video >> current_frame; first_frame = current_frame.clone(); cvtColor(current_frame, current_frame_gray, CV_BGR2GRAY); current_frame.convertTo(running_average_background, CV_32F); selective_running_average_background = running_average_background.clone(); int rad = running_average_background.depth(); MedianBackground median_background( current_frame, (float) 1.005, 1 ); Mat median_background_image, median_foreground_image; int codec = static_cast<int>(surveillance_video.get(CV_CAP_PROP_FOURCC)); // V3.0.0 update on next line. OLD CODE was BackgroundSubtractorMOG2 gmm; //(50,16,true); Ptr<BackgroundSubtractorMOG2> gmm = createBackgroundSubtractorMOG2(); Mat foreground_mask, foreground_image = Mat::zeros(current_frame.size(), CV_8UC3); double frame_rate = surveillance_video.get(CV_CAP_PROP_FPS); double time_between_frames = 1000.0/frame_rate; Timestamper* timer = new Timestamper(); int frame_count = 0; while ((!current_frame.empty()) && (frame_count++ < 1000))//1800)) { double duration = static_cast<double>(getTickCount()); vector<Mat> input_planes(3); split(current_frame,input_planes); cvtColor(current_frame, current_frame_gray, CV_BGR2GRAY); if (frame_count%2 == 0) // Skip every second frame so the flow is greater. { if ( previous_gray_frame.data ) { Mat lucas_kanade_flow; timer->ignoreTimeSinceLastRecorded(); LucasKanadeOpticalFlow(previous_gray_frame, current_frame_gray, lucas_kanade_flow); timer->recordTime("Lucas Kanade Optical Flow"); calcOpticalFlowFarneback(previous_gray_frame, current_frame_gray, optical_flow, 0.5, 3, 15, 3, 5, 1.2, 0); cvtColor(previous_gray_frame, optical_flow_display, CV_GRAY2BGR); drawOpticalFlow(optical_flow, optical_flow_display, 8, Scalar(0, 255, 0), Scalar(0, 0, 255)); timer->recordTime("Farneback Optical Flow"); char frame_str[100]; sprintf( frame_str, "Frame = %d", frame_count); Mat temp_output = JoinImagesHorizontally( current_frame, frame_str, optical_flow_display, "Farneback Optical Flow", 4 ); Mat optical_flow_output = JoinImagesHorizontally( temp_output, "", lucas_kanade_flow, "Lucas Kanade Optical Flow", 4 ); imshow("Optical Flow", optical_flow_output ); } std::swap(previous_gray_frame, current_frame_gray); } // Static background image Mat difference_frame, binary_difference; Mat structuring_element(3,3,CV_8U,Scalar(1)); timer->ignoreTimeSinceLastRecorded(); absdiff(current_frame,first_frame,difference_frame); cvtColor(difference_frame, thresholded_image, CV_BGR2GRAY); threshold(thresholded_image,thresholded_image,30,255,THRESH_BINARY); if (clean_binary_images) { morphologyEx(thresholded_image,closed_image,MORPH_CLOSE,structuring_element); morphologyEx(closed_image,binary_difference,MORPH_OPEN,structuring_element); current_frame.copyTo(binary_difference, thresholded_image); } else { binary_difference.setTo(Scalar(0,0,0)); current_frame.copyTo(binary_difference, thresholded_image); } timer->recordTime("Static difference"); // Running Average (three channel version) vector<Mat> running_average_planes(3); split(running_average_background,running_average_planes); accumulateWeighted(input_planes[0], running_average_planes[0], running_average_learning_rate); accumulateWeighted(input_planes[1], running_average_planes[1], running_average_learning_rate); accumulateWeighted(input_planes[2], running_average_planes[2], running_average_learning_rate); merge(running_average_planes,running_average_background); running_average_background.convertTo(temp_running_average_background,CV_8U); absdiff(temp_running_average_background,current_frame,running_average_difference); split(running_average_difference,running_average_planes); // Determine foreground points as any point with a difference of more than 30 on any one channel: threshold(running_average_difference,running_average_foreground_mask,30,255,THRESH_BINARY); split(running_average_foreground_mask,running_average_planes); bitwise_or( running_average_planes[0], running_average_planes[1], running_average_foreground_mask ); bitwise_or( running_average_planes[2], running_average_foreground_mask, running_average_foreground_mask ); if (clean_binary_images) { morphologyEx(running_average_foreground_mask,closed_image,MORPH_CLOSE,structuring_element); morphologyEx(closed_image,running_average_foreground_mask,MORPH_OPEN,structuring_element); } running_average_foreground_image.setTo(Scalar(0,0,0)); current_frame.copyTo(running_average_foreground_image, running_average_foreground_mask); timer->recordTime("Running Average"); // Running Average with selective update vector<Mat> selective_running_average_planes(3); // Find Foreground mask selective_running_average_background.convertTo(temp_selective_running_average_background,CV_8U); absdiff(temp_selective_running_average_background,current_frame,selective_running_average_difference); split(selective_running_average_difference,selective_running_average_planes); // Determine foreground points as any point with an average difference of more than 30 over all channels: Mat temp_sum = (selective_running_average_planes[0]/3 + selective_running_average_planes[1]/3 + selective_running_average_planes[2]/3); threshold(temp_sum,selective_running_average_foreground_mask,30,255,THRESH_BINARY_INV); // Update background split(selective_running_average_background,selective_running_average_planes); accumulateWeighted(input_planes[0], selective_running_average_planes[0], running_average_learning_rate,selective_running_average_foreground_mask); accumulateWeighted(input_planes[1], selective_running_average_planes[1], running_average_learning_rate,selective_running_average_foreground_mask); accumulateWeighted(input_planes[2], selective_running_average_planes[2], running_average_learning_rate,selective_running_average_foreground_mask); invertImage(selective_running_average_foreground_mask,selective_running_average_foreground_mask); accumulateWeighted(input_planes[0], selective_running_average_planes[0], running_average_learning_rate/3.0,selective_running_average_foreground_mask); accumulateWeighted(input_planes[1], selective_running_average_planes[1], running_average_learning_rate/3.0,selective_running_average_foreground_mask); accumulateWeighted(input_planes[2], selective_running_average_planes[2], running_average_learning_rate/3.0,selective_running_average_foreground_mask); merge(selective_running_average_planes,selective_running_average_background); if (clean_binary_images) { morphologyEx(selective_running_average_foreground_mask,closed_image,MORPH_CLOSE,structuring_element); morphologyEx(closed_image,selective_running_average_foreground_mask,MORPH_OPEN,structuring_element); } selective_running_average_foreground_image.setTo(Scalar(0,0,0)); current_frame.copyTo(selective_running_average_foreground_image, selective_running_average_foreground_mask); timer->recordTime("Selective Running Average"); // Median background timer->ignoreTimeSinceLastRecorded(); median_background.UpdateBackground( current_frame ); timer->recordTime("Median"); median_background_image = median_background.GetBackgroundImage(); Mat median_difference; absdiff(median_background_image,current_frame,median_difference); cvtColor(median_difference, median_difference, CV_BGR2GRAY); threshold(median_difference,median_difference,30,255,THRESH_BINARY); median_foreground_image.setTo(Scalar(0,0,0)); current_frame.copyTo(median_foreground_image, median_difference); // Update the Gaussian Mixture Model // V3.0.0 update on next line. OLD CODE was gmm(current_frame, foreground_mask); gmm->apply(current_frame, foreground_mask); // Clean the resultant binary (moving pixel) mask using an opening. threshold(foreground_mask,thresholded_image,150,255,THRESH_BINARY); Mat moving_incl_shadows, shadow_points; threshold(foreground_mask,moving_incl_shadows,50,255,THRESH_BINARY); absdiff( thresholded_image, moving_incl_shadows, shadow_points ); Mat cleaned_foreground_mask; if (clean_binary_images) { morphologyEx(thresholded_image,closed_image,MORPH_CLOSE,structuring_element); morphologyEx(closed_image,cleaned_foreground_mask,MORPH_OPEN,structuring_element); } else cleaned_foreground_mask = thresholded_image.clone(); foreground_image.setTo(Scalar(0,0,0)); current_frame.copyTo(foreground_image, cleaned_foreground_mask); timer->recordTime("Gaussian Mixture Model"); // Create an average background image (just for information) Mat mean_background_image; timer->ignoreTimeSinceLastRecorded(); // V3.0.0 update on next line. OLD CODE was gmm.getBackgroundImage(mean_background_image); gmm->getBackgroundImage(mean_background_image); duration = static_cast<double>(getTickCount())-duration; duration /= getTickFrequency()/1000.0; int delay = (time_between_frames>duration) ? ((int) (time_between_frames-duration)) : 1; char c = cvWaitKey(delay); char frame_str[100]; sprintf( frame_str, "Frame = %d", frame_count); Mat temp_static_output = JoinImagesHorizontally( current_frame, frame_str, first_frame, "Static Background", 4 ); Mat static_output = JoinImagesHorizontally( temp_static_output, "", binary_difference, "Foreground", 4 ); imshow("Static Background Model", static_output ); Mat temp_running_output = JoinImagesHorizontally( current_frame, frame_str, temp_running_average_background, "Running Average Background", 4 ); Mat running_output = JoinImagesHorizontally( temp_running_output, "", running_average_foreground_image, "Foreground", 4 ); imshow("Running Average Background Model", running_output ); Mat temp_selective_output = JoinImagesHorizontally( current_frame, frame_str, temp_selective_running_average_background, "Selective Running Average Background", 4 ); Mat selective_output = JoinImagesHorizontally( temp_selective_output, "", selective_running_average_foreground_image, "Foreground", 4 ); imshow("Selective Running Average Background Model", selective_output ); Mat temp_median_output = JoinImagesHorizontally( current_frame, frame_str, median_background_image, "Median Background", 4 ); Mat median_output = JoinImagesHorizontally( temp_median_output, "", median_foreground_image, "Foreground", 4 ); imshow("Median Background Model", median_output ); Mat temp_gaussian_output = JoinImagesHorizontally( current_frame, frame_str, mean_background_image, "GMM Background", 4 ); Mat gaussian_output = JoinImagesHorizontally( temp_gaussian_output, "", foreground_image, "Foreground", 4 ); imshow("Gaussian Mixture Model", gaussian_output ); timer->putTimes( current_frame ); imshow( "Computation Times", current_frame ); surveillance_video >> current_frame; } cvDestroyAllWindows(); }