PSMoveOrientation * psmove_orientation_new(PSMove *move) { psmove_return_val_if_fail(move != NULL, NULL); if (!psmove_has_calibration(move)) { psmove_DEBUG("Can't create orientation - no calibration!\n"); return NULL; } PSMoveOrientation *orientation = calloc(1, sizeof(PSMoveOrientation)); orientation->move = move; /* Initial sampling frequency */ orientation->sample_freq = 120.0f; /* Measurement */ orientation->sample_freq_measure_start = psmove_util_get_ticks(); orientation->sample_freq_measure_count = 0; /* Initial quaternion */ orientation->quaternion[0] = 1.f; orientation->quaternion[1] = 0.f; orientation->quaternion[2] = 0.f; orientation->quaternion[3] = 0.f; return orientation; }
CameraControl * camera_control_new(int cameraID) { CameraControl* cc = (CameraControl*) calloc(1, sizeof(CameraControl)); cc->cameraID = cameraID; #if defined(CAMERA_CONTROL_USE_CL_DRIVER) int w, h; int cams = CLEyeGetCameraCount(); if (cams <= cameraID) { free(cc); return NULL; } GUID cguid = CLEyeGetCameraUUID(cameraID); cc->camera = CLEyeCreateCamera(cguid, CLEYE_COLOR_PROCESSED, CLEYE_VGA, 60); CLEyeCameraGetFrameDimensions(cc->camera, &w, &h); // Depending on color mode chosen, create the appropriate OpenCV image cc->frame4ch = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 4); cc->frame3ch = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 3); CLEyeCameraStart(cc->camera); #else char *video = psmove_util_get_env_string(PSMOVE_TRACKER_FILENAME_ENV); if (video) { psmove_DEBUG("Using '%s' as video input.\n", video); cc->capture = cvCaptureFromFile(video); free(video); } else { cc->capture = cvCaptureFromCAM(cc->cameraID); int width, height; get_metrics(&width, &height); cvSetCaptureProperty(cc->capture, CV_CAP_PROP_FRAME_WIDTH, width); cvSetCaptureProperty(cc->capture, CV_CAP_PROP_FRAME_HEIGHT, height); } #endif return cc; }
//-- public methods ----- PSMoveOrientation * psmove_orientation_new(PSMove *move) { psmove_return_val_if_fail(move != NULL, NULL); if (!psmove_has_calibration(move)) { psmove_DEBUG("Can't create orientation - no calibration!\n"); return NULL; } PSMoveOrientation *orientation_state = (PSMoveOrientation *)calloc(1, sizeof(PSMoveOrientation)); orientation_state->move = move; /* Initial sampling frequency */ orientation_state->sample_freq = SAMPLE_FREQUENCY; /* Measurement */ orientation_state->sample_freq_measure_start = psmove_util_get_ticks(); orientation_state->sample_freq_measure_count = 0; /* Initial quaternion */ orientation_state->quaternion.setIdentity(); orientation_state->reset_quaternion.setIdentity(); /* Initialize data specific to the selected filter */ psmove_orientation_set_fusion_type(orientation_state, OrientationFusion_ComplementaryMARG); /* Set the transform used re-orient the calibration data used by the orientation fusion algorithm */ psmove_orientation_set_calibration_transform(orientation_state, k_psmove_identity_pose_laying_flat); /* Set the transform used re-orient the sensor data used by the orientation fusion algorithm */ psmove_orientation_set_sensor_data_transform(orientation_state, k_psmove_sensor_transform_opengl); return orientation_state; }
static moved_client_list * moved_client_list_discover(moved_client_list *result) { int fd = (int)socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP); if (fd == -1) { return nullptr; } psmove_port_set_socket_timeout_ms(fd, 10); int enabled = 1; setsockopt(fd, SOL_SOCKET, SO_BROADCAST, (const char *)&enabled, sizeof(enabled)); struct sockaddr_in addr; addr.sin_family = AF_INET; addr.sin_port = htons(MOVED_UDP_PORT); addr.sin_addr.s_addr = INADDR_BROADCAST; union PSMoveMovedRequest request; union PSMoveMovedRequest response; memset(&request, 0, sizeof(request)); request.header.request_sequence = 0x47110000; request.header.command_id = MOVED_REQ_DISCOVER; request.header.controller_id = 0; for (int i=0; i<5; i++) { request.header.request_sequence++; int res = sendto(fd, (const char *)&request, sizeof(request), /*flags=*/0, (struct sockaddr *)&addr, sizeof(addr)); if (res == -1) { psmove_WARNING("Could not send discovery request"); } psmove_port_sleep_ms(20); } psmove_port_sleep_ms(50); std::map<std::string, bool> discovered; int failures = 0; while (failures < 10) { struct sockaddr_in addr_server; socklen_t addr_server_len = sizeof(addr_server); memset(&addr_server, 0, sizeof(addr_server)); addr_server.sin_family = AF_INET; int res = recvfrom(fd, (char *)&response, sizeof(response), /*flags=*/0, (struct sockaddr *)&addr_server, &addr_server_len); if (res == sizeof(response)) { char temp[1024]; inet_ntop(addr.sin_family, &addr_server.sin_addr, temp, sizeof(temp)); std::string hostname(temp); discovered[hostname] = true; psmove_DEBUG("Discovered daemon: '%s' (seq=0x%08x)\n", temp, response.header.request_sequence); failures = 0; } else { failures++; } } psmove_port_close_socket(fd); for (auto it: discovered) { const char *hostname = it.first.c_str(); printf("Using remote host '%s' (from auto-discovery)\n", hostname); moved_client *client = moved_client_create(hostname); result = moved_client_list_insert(result, client); if (moved_client_send(client, MOVED_REQ_GET_HOST_BTADDR, 0, nullptr, 0)) { char *serial = _psmove_btaddr_to_string(*((PSMove_Data_BTAddr *)client->response_buf.get_host_btaddr.btaddr)); printf("Bluetooth host address of '%s' is '%s'\n", hostname, serial); free(serial); } } return result; }
IplImage * camera_control_query_frame(CameraControl* cc) { IplImage* result; #if defined(CAMERA_CONTROL_USE_CL_DRIVER) // assign buffer-pointer to address of buffer cvGetRawData(cc->frame4ch, &cc->pCapBuffer, 0, 0); CLEyeCameraGetFrame(cc->camera, cc->pCapBuffer, 2000); // convert 4ch image to 3ch image const int from_to[] = { 0, 0, 1, 1, 2, 2 }; const CvArr** src = (const CvArr**) &cc->frame4ch; CvArr** dst = (CvArr**) &cc->frame3ch; cvMixChannels(src, 1, dst, 1, from_to, 3); result = cc->frame3ch; #else long start = psmove_util_get_ticks(); result = cvQueryFrame(cc->capture); psmove_DEBUG("cvQueryFrame: %ld ms\n", psmove_util_get_ticks() - start); #endif #if defined(PSMOVE_USE_DEINTERLACE) /** * Dirty hack follows: * - Clone image * - Hack internal variables to make an image of all odd lines **/ IplImage *tmp = cvCloneImage(result); tmp->imageData += tmp->widthStep; // odd lines tmp->widthStep *= 2; tmp->height /= 2; /** * Use nearest-neighbor to be faster. In my tests, this does not * cause a speed disadvantage, and tracking quality is still good. * * This will scale the half-height image "tmp" to the original frame * size by doubling lines (so we can still do normal circle tracking). **/ cvResize(tmp, result, CV_INTER_NN); /** * Need to revert changes in tmp from above, otherwise the call * to cvReleaseImage would cause a crash. **/ tmp->height = result->height; tmp->widthStep = result->widthStep; tmp->imageData -= tmp->widthStep; // odd lines cvReleaseImage(&tmp); #endif // undistort image if (cc->mapx && cc->mapy) { cvRemap(result, cc->frame3chUndistort, cc->mapx, cc->mapy, CV_INTER_LINEAR | CV_WARP_FILL_OUTLIERS, cvScalarAll(0)); result = cc->frame3chUndistort; } return result; }
PSMoveCalibration * psmove_calibration_new(PSMove *move) { PSMove_Data_BTAddr addr; char *serial; int i; PSMoveCalibration *calibration = (PSMoveCalibration*)calloc(1, sizeof(PSMoveCalibration)); calibration->move = move; if (psmove_connection_type(move) == Conn_USB) { _psmove_read_btaddrs(move, NULL, &addr); serial = _psmove_btaddr_to_string(addr); } else { serial = psmove_get_serial(move); } if (!serial) { psmove_CRITICAL("Could not determine serial from controller"); free(calibration); return NULL; } for (i = 0; i<(int)strlen(serial); i++) { if (serial[i] == ':') { serial[i] = '_'; } } size_t calibration_filename_length = strlen(serial) + strlen(PSMOVE_CALIBRATION_EXTENSION) + 1; char *calibration_filename = (char *)malloc(calibration_filename_length); strcpy(calibration_filename, serial); strcat(calibration_filename, PSMOVE_CALIBRATION_EXTENSION); calibration->filename = psmove_util_get_file_path(calibration_filename); calibration->system_filename = psmove_util_get_system_file_path(calibration_filename); free(calibration_filename); free(serial); /* Try to load the calibration data from disk, or from USB */ psmove_calibration_load(calibration); if (!psmove_calibration_supported(calibration)) { if (psmove_connection_type(move) == Conn_USB) { psmove_DEBUG("Storing calibration from USB\n"); psmove_calibration_read_from_usb(calibration); psmove_calibration_save(calibration); } } /* Pre-calculate values used for mapping input */ if (psmove_calibration_supported(calibration)) { /* Accelerometer reading (high/low) for each axis */ int axlow, axhigh, aylow, ayhigh, azlow, azhigh; psmove_calibration_get_usb_accel_values(calibration, &axlow, &axhigh, &aylow, &ayhigh, &azlow, &azhigh); /** * * Calculation of accelerometer mapping (as factor of gravity, 1g): * * 2 * (raw - low) * calibrated = ---------------- - 1 * (high - low) * * with: * * raw .... Raw sensor reading * low .... Raw reading at -1g * high ... Raw reading at +1g * * Now define: * * 2 * f = -------------- * (high - low) * * And combine constants: * * c = - (f * low) - 1 * * Then we get: * * calibrated = f * raw + c * **/ /* Accelerometer factors "f" */ calibration->ax = 2.f / (float)(axhigh - axlow); calibration->ay = 2.f / (float)(ayhigh - aylow); calibration->az = 2.f / (float)(azhigh - azlow); /* Accelerometer constants "c" */ calibration->bx = - (calibration->ax * (float)axlow) - 1.f; calibration->by = - (calibration->ay * (float)aylow) - 1.f; calibration->bz = - (calibration->az * (float)azlow) - 1.f; /** * Calculation of gyroscope mapping (in radiant per second): * * raw * calibrated = -------- * 2 PI * rpm60 * * 60 * rpm80 * rpm60 = ------------ * 80 * * with: * * raw ..... Raw sensor reading * rpm80 ... Sensor reading at 80 RPM (from calibration blob) * rpm60 ... Sensor reading at 60 RPM (1 rotation per second) * * Or combined: * * 80 * raw * 2 PI * calibrated = ----------------- * 60 * rpm80 * * Now define: * * 2 * PI * 80 * f = ------------- * 60 * rpm80 * * Then we get: * * calibrated = f * raw * **/ int gx80, gy80, gz80; psmove_calibration_get_usb_gyro_values(calibration, &gx80, &gy80, &gz80); float factor = (float)(2.0 * M_PI * 80.0) / 60.0; calibration->gx = factor / (float)gx80; calibration->gy = factor / (float)gy80; calibration->gz = factor / (float)gz80; } else { /* No calibration data - pass-through input data */ calibration->ax = 1.f; calibration->ay = 1.f; calibration->az = 1.f; calibration->bx = 0.f; calibration->by = 0.f; calibration->bz = 0.f; calibration->gx = 1.f; calibration->gy = 1.f; calibration->gz = 1.f; } return calibration; }
void psmove_orientation_update(PSMoveOrientation *orientation_state) { psmove_return_if_fail(orientation_state != NULL); int frame_half; long now = psmove_util_get_ticks(); if (now - orientation_state->sample_freq_measure_start >= 1000) { float measured = ((float)orientation_state->sample_freq_measure_count) / ((float)(now-orientation_state->sample_freq_measure_start))*1000.f; psmove_DEBUG("Measured sample_freq: %f\n", measured); orientation_state->sample_freq = measured; orientation_state->sample_freq_measure_start = now; orientation_state->sample_freq_measure_count = 0; } /* We get 2 measurements per call to psmove_poll() */ orientation_state->sample_freq_measure_count += 2; Eigen::Quaternionf quaternion_backup = orientation_state->quaternion; float deltaT = 1.f / fmax(orientation_state->sample_freq, SAMPLE_FREQUENCY); // time delta = 1/frequency for (frame_half=0; frame_half<2; frame_half++) { switch (orientation_state->fusion_type) { case OrientationFusion_None: break; case OrientationFusion_MadgwickIMU: { // Get the sensor data transformed by the sensor_transform PSMove_3AxisVector a= psmove_orientation_get_accelerometer_normalized_vector(orientation_state, (enum PSMove_Frame)(frame_half)); PSMove_3AxisVector omega= psmove_orientation_get_gyroscope_vector(orientation_state, (enum PSMove_Frame)(frame_half)); // Apply the filter _psmove_orientation_fusion_imu_update( orientation_state, deltaT, /* Gyroscope */ Eigen::Vector3f(omega.x, omega.y, omega.z), /* Accelerometer */ Eigen::Vector3f(a.x, a.y, a.z)); } break; case OrientationFusion_MadgwickMARG: { PSMove_3AxisVector m = psmove_orientation_get_magnetometer_normalized_vector(orientation_state); PSMove_3AxisVector a = psmove_orientation_get_accelerometer_normalized_vector(orientation_state, (enum PSMove_Frame)(frame_half)); PSMove_3AxisVector omega = psmove_orientation_get_gyroscope_vector(orientation_state, (enum PSMove_Frame)(frame_half)); _psmove_orientation_fusion_madgwick_marg_update(orientation_state, deltaT, /* Gyroscope */ Eigen::Vector3f(omega.x, omega.y, omega.z), /* Accelerometer */ Eigen::Vector3f(a.x, a.y, a.z), /* Magnetometer */ Eigen::Vector3f(m.x, m.y, m.z)); } break; case OrientationFusion_ComplementaryMARG: { PSMove_3AxisVector m= psmove_orientation_get_magnetometer_normalized_vector(orientation_state); PSMove_3AxisVector a= psmove_orientation_get_accelerometer_normalized_vector(orientation_state, (enum PSMove_Frame)(frame_half)); PSMove_3AxisVector omega= psmove_orientation_get_gyroscope_vector(orientation_state, (enum PSMove_Frame)(frame_half)); // Apply the filter _psmove_orientation_fusion_complementary_marg_update( orientation_state, deltaT, /* Gyroscope */ Eigen::Vector3f(omega.x, omega.y, omega.z), /* Accelerometer */ Eigen::Vector3f(a.x, a.y, a.z), /* Magnetometer */ Eigen::Vector3f(m.x, m.y, m.z)); } break; } if (!psmove_quaternion_is_valid(orientation_state->quaternion)) { psmove_DEBUG("Orientation is NaN!"); orientation_state->quaternion = quaternion_backup; } } }
CameraControl * camera_control_new_with_settings(int cameraID, int width, int height, int framerate, int cam_type) { CameraControl* cc = (CameraControl*) calloc(1, sizeof(CameraControl)); cc->cameraID = cameraID; if (framerate <= 0) { framerate = PSMOVE_TRACKER_DEFAULT_FPS; } if (cam_type == PSMove_Camera_PS3EYE_BLUEDOT) { cc->focl_x = (float)PS3EYE_FOCAL_LENGTH_BLUE; cc->focl_y = (float)PS3EYE_FOCAL_LENGTH_BLUE; } else if (cam_type == PSMove_Camera_PS3EYE_REDDOT) { cc->focl_x = (float)PS3EYE_FOCAL_LENGTH_RED; cc->focl_y = (float)PS3EYE_FOCAL_LENGTH_RED; } else if (cam_type == PSMove_Camera_Unknown) { cc->focl_x = (float)PS3EYE_FOCAL_LENGTH_BLUE; cc->focl_y = (float)PS3EYE_FOCAL_LENGTH_BLUE; } // Needed for cbb tracker. Will be overwritten by camera calibration files if they exist. #if defined(CAMERA_CONTROL_USE_CL_DRIVER) // Windows 32-bit. Either CL_SDK or Registry_requiring int cams = CLEyeGetCameraCount(); if (cams <= cameraID) { free(cc); return NULL; } GUID cguid = CLEyeGetCameraUUID(cameraID); cc->camera = CLEyeCreateCamera(cguid, CLEYE_COLOR_PROCESSED, CLEYE_VGA, framerate); CLEyeCameraGetFrameDimensions(cc->camera, &width, &height); // Depending on color mode chosen, create the appropriate OpenCV image cc->frame4ch = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 4); cc->frame3ch = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 3); CLEyeCameraStart(cc->camera); #elif defined(CAMERA_CONTROL_USE_PS3EYE_DRIVER) // Mac or Windows // Initialize PS3EYEDriver ps3eye_init(); int cams = ps3eye_count_connected(); psmove_DEBUG("Found %i ps3eye(s) with CAMERA_CONTROL_USE_PS3EYE_DRIVER.\n", cams); if (cams <= cameraID) { free(cc); return NULL; } if (width <= 0 || height <= 0) { get_metrics(&width, &height); } psmove_DEBUG("Attempting to open ps3eye with cameraId, width, height, framerate: %d, %d, %d, %d.\n", cameraID, width, height, framerate); cc->eye = ps3eye_open(cameraID, width, height, framerate); if (cc->eye == NULL) { psmove_WARNING("Failed to open camera ID %d", cameraID); free(cc); return NULL; } cc->framebgr = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 3); #else // Assume webcam accessible from OpenCV. char *video = psmove_util_get_env_string(PSMOVE_TRACKER_FILENAME_ENV); if (video) { psmove_DEBUG("Using '%s' as video input.\n", video); cc->capture = cvCaptureFromFile(video); free(video); } else { cc->capture = cvCaptureFromCAM(cc->cameraID); if (width <= 0 || height <= 0) { get_metrics(&width, &height); } cvSetCaptureProperty(cc->capture, CV_CAP_PROP_FRAME_WIDTH, width); cvSetCaptureProperty(cc->capture, CV_CAP_PROP_FRAME_HEIGHT, height); } #endif cc->width = width; cc->height = height; cc->deinterlace = PSMove_False; return cc; }
void psmove_orientation_update(PSMoveOrientation *orientation) { psmove_return_if_fail(orientation != NULL); int frame; long now = psmove_util_get_ticks(); if (now - orientation->sample_freq_measure_start >= 1000) { float measured = ((float)orientation->sample_freq_measure_count) / ((float)(now-orientation->sample_freq_measure_start))*1000.; psmove_DEBUG("Measured sample_freq: %f\n", measured); orientation->sample_freq = measured; orientation->sample_freq_measure_start = now; orientation->sample_freq_measure_count = 0; } /* We get 2 measurements per call to psmove_poll() */ orientation->sample_freq_measure_count += 2; psmove_get_magnetometer_vector(orientation->move, &orientation->output[6], &orientation->output[7], &orientation->output[8]); float q0 = orientation->quaternion[0]; float q1 = orientation->quaternion[1]; float q2 = orientation->quaternion[2]; float q3 = orientation->quaternion[3]; for (frame=0; frame<2; frame++) { psmove_get_accelerometer_frame(orientation->move, frame, &orientation->output[0], &orientation->output[1], &orientation->output[2]); psmove_get_gyroscope_frame(orientation->move, frame, &orientation->output[3], &orientation->output[4], &orientation->output[5]); #if defined(PSMOVE_WITH_MADGWICK_AHRS) MadgwickAHRSupdate(orientation->quaternion, orientation->sample_freq, 0.5f, /* Gyroscope */ orientation->output[3], orientation->output[5], -orientation->output[4], /* Accelerometer */ orientation->output[0], orientation->output[2], -orientation->output[1], /* Magnetometer */ orientation->output[6], orientation->output[8], orientation->output[7] ); #else psmove_DEBUG("Built without Madgwick AHRS - no orientation tracking"); return; #endif if (isnan(orientation->quaternion[0]) || isnan(orientation->quaternion[1]) || isnan(orientation->quaternion[2]) || isnan(orientation->quaternion[3])) { psmove_DEBUG("Orientation is NaN!"); orientation->quaternion[0] = q0; orientation->quaternion[1] = q1; orientation->quaternion[2] = q2; orientation->quaternion[3] = q3; } } }