KalmanFilter::KalmanFilter(int sensitivity, double fe) { // gyroscope gyro = new L3G4200D(2000,200); //accelero accelero = new MMA_7455(sensitivity); // init KalmanFilter initKalman(fe); }
//========================================= void initCamKalTracker(IplImage* frame, camshift_kalman_tracker& camKalTrk) { //========================================= camKalTrk.kalman = initKalman(camKalTrk.kalman); camKalTrk.measurement = cvCreateMat(2, 1, CV_32FC1 );//Z(k) camKalTrk.realposition = cvCreateMat(4, 1, CV_32FC1 );//real X(k) /* allocate all the buffers */ camKalTrk.image = cvCreateImage(cvGetSize(frame), 8, 3); camKalTrk.image->origin = frame->origin; camKalTrk.hsv = cvCreateImage(cvGetSize(frame), 8, 3); camKalTrk.hue = cvCreateImage(cvGetSize(frame), 8, 1); camKalTrk.mask = cvCreateImage(cvGetSize(frame), 8, 1); camKalTrk.backproject = cvCreateImage(cvGetSize(frame), 8, 1); camKalTrk.backproject->origin = frame->origin; int hdims = 256; float hranges_arr[] = {0, 180}; float* hranges = hranges_arr; camKalTrk.hist = cvCreateHist(1, &hdims, CV_HIST_ARRAY, &hranges, 1); // camKalTrk.selectObject = 0; }
void targetTrackingFilter::applyFilter(cv::Mat &image, std::vector<cv::Rect> targets) { predictions.clear(); for(int i=0 ; i<KFs.size(); ++i) { predictions.push_back(KFs.at(i).predict()); missingData.at(i) ++; } for(int i = 0 ; i<targets.size() ; ++i) { bool found = false; bool close = false; for(int j=0 ; j<predictions.size(); ++j) { // Check if the observation is in the square THRESHOLD of the track float deltaX = center(targets.at(i)).x-predictions.at(j).at<float>(0); float deltaY = center(targets.at(i)).y-predictions.at(j).at<float>(1); if (abs(deltaX) < THRESHOLD && abs(deltaY) < THRESHOLD) { //correlation close = true; cv::Mat correlation; double minVal, maxVal; cv::Point minLoc, maxLoc; cv::Mat toCompare = cv::Mat(image,targets.at(i)); cv::resize(toCompare,toCompare,targetsModel.at(j).size()); cv::matchTemplate(toCompare, targetsModel.at(j), correlation, CV_TM_CCORR_NORMED); cv::minMaxLoc(correlation, &minVal,&maxVal,&minLoc,&maxLoc); std::cout << maxVal << std::endl; // cv::Mat hist1,hist2; // cv::calcHist(&cv::Mat(image,targets.at(i))); // cv::calcHist(&targetsModel.at(j),1,0,cv::Mat(),hist1); // cv::compareHist(); if(maxVal>CORR_FACTOR) { predictions.at(j) = KFs.at(j).correct((cv::Mat_<float>(2,1)<< center(targets.at(i)).x ,center(targets.at(i)).y )); missingData.at(j) = 0; targetsModel.at(j) = cv::Mat(image,targets.at(i)); found = true; break; } } } // If no tak is found for the target a new tracking filter is created if(! found && ! close ) // &&(targets.at(i).x < BORDERS || targets.at(i).x > image.rows - BORDERS || targets.at(i).y < BORDERS || targets.at(i).y > image.cols - BORDERS)) { cv::KalmanFilter newKalman; initKalman (&newKalman, center(targets.at(i)).x, center(targets.at(i)).y, 1.5); KFs.push_back(newKalman); missingData.push_back(0); targetsModel.push_back(cv::Mat(image,targets.at(i))); noOfTarget.push_back(++nbOfTargets); } // If the track of the target is lost for more than MAX_MISSING_DATA frames the tracking filter is deleted for(int i=0 ; i<missingData.size(); ++i) if(missingData.at(i)>MAX_MISSING_DATA) { // if(missingData.size() < 2) // { // KFs.clear(); // missingData.clear(); // predictions.clear(); // // } // else { KFs.erase(KFs.begin()+i); missingData.erase(missingData.begin()+i); predictions.erase(predictions.begin()+i); targetsModel.erase(targetsModel.begin()+i); noOfTarget.erase(noOfTarget.begin()+i); } } } }
void MPU6050Task(void) { char uart_out[32]; uint8_t controller_command = 0; uint8_t pre_command = 0; uint8_t count = 0; MPU6050_Task_Suspend(); vTaskDelayUntil(&xLastWakeTime, xFrequency); // wait while sensor is ready initKalman(&kalmanX); initKalman(&kalmanY); MPU6050_ReadAccelerometer(); accX = MPU6050_Data.Accelerometer_X; accY = MPU6050_Data.Accelerometer_Y; accZ = MPU6050_Data.Accelerometer_Z; float roll = atan2(-accY, -accZ) * RAD_TO_DEG; float pitch = atan(-accX / sqrt1(Square(accY) + Square(accZ))) * RAD_TO_DEG; setAngle(&kalmanX, roll); // Set starting angle setAngle(&kalmanY, pitch); while (1) { /* Read all data from sensor */ MPU6050_ReadAccGyo(); accX = MPU6050_Data.Accelerometer_X; accY = MPU6050_Data.Accelerometer_Y; accZ = MPU6050_Data.Accelerometer_Z; gyroX = MPU6050_Data.Gyroscope_X; gyroY = MPU6050_Data.Gyroscope_Y; gyroZ = MPU6050_Data.Gyroscope_Z; float roll = atan2(-accY, -accZ) * RAD_TO_DEG; float pitch = atan(-accX / sqrt1(Square(accY) + Square(accZ))) * RAD_TO_DEG; float gyroXrate = gyroX * MPU6050_Data.Gyro_Mult; // Convert to deg/s float gyroYrate = gyroY * MPU6050_Data.Gyro_Mult; // Convert to deg/s // This fixes the transition problem when the accelerometer angle jumps between -180 and 180 degrees if ((roll < -90 && kalAngleX > 90) || (roll > 90 && kalAngleX < -90)) { setAngle(&kalmanX, roll); } else { kalAngleX = getAngle(&kalmanX, roll, gyroXrate, dt); // Calculate the angle using a Kalman filter } if (Abs(kalAngleX) > 90) gyroYrate = -gyroYrate; // Invert rate, so it fits the restriced accelerometer reading kalAngleY = getAngle(&kalmanY, pitch, gyroYrate, dt); #ifdef DEBUG USART1_puts("\r\n"); shell_float2str(accZ, uart_out); USART1_puts(uart_out); #else if (accY < -6300) { controller_command = 1; } else if (accY > 6300) { controller_command = 2; } else if (accZ < 0 && kalAngleY > 47) { controller_command = 3; } else if (accZ < 0 && kalAngleY < -30) { controller_command = 4; } else if (accZ < 0 && kalAngleY > 25 && kalAngleY < 47) { controller_command = 5; } else { controller_command = 6; } // check hand gesture for a while if (count == 5 && pre_command == controller_command) { switch (controller_command) { case 1: USART1_puts("\r\nmove right"); break; case 2: USART1_puts("\r\nmove left"); break; case 3: USART1_puts("\r\nforward"); break; case 4: USART1_puts("\r\nDOWN"); break; case 5: USART1_puts("\r\nUP"); break; case 6: USART1_puts("\r\nsuspend"); break; } } else if (pre_command == controller_command) { count++; } else { pre_command = controller_command; count = 0; } #endif vTaskDelayUntil(&xLastWakeTime, xFrequency); } }