web123456

Multi-sensor fusion time soft synchronization

// Find a ratio double front_scale = (back_data.time - sync_time) / (back_data.time - front_data.time); double back_scale = (sync_time - front_data.time) / (back_data.time - front_data.time); synced_data.time = sync_time; synced_data.linear_acceleration.x = front_data.linear_acceleration.x * front_scale + back_data.linear_acceleration.x * back_scale; synced_data.linear_acceleration.y = front_data.linear_acceleration.y * front_scale + back_data.linear_acceleration.y * back_scale; synced_data.linear_acceleration.z = front_data.linear_acceleration.z * front_scale + back_data.linear_acceleration.z * back_scale; synced_data.angular_velocity.x = front_data.angular_velocity.x * front_scale + back_data.angular_velocity.x * back_scale; synced_data.angular_velocity.y = front_data.angular_velocity.y * front_scale + back_data.angular_velocity.y * back_scale; synced_data.angular_velocity.z = front_data.angular_velocity.z * front_scale + back_data.angular_velocity.z * back_scale; // Quaternion interpolation can be linear or spherical, spherical interpolation is more accurate, but the difference between the two quaternions is not significant, and the accuracy of both is comparable. // Since the pose is interpolated for two neighboring moments, the pose difference is relatively small, so linear interpolation can be used synced_data.orientation.x = front_data.orientation.x * front_scale + back_data.orientation.x * back_scale; synced_data.orientation.y = front_data.orientation.y * front_scale + back_data.orientation.y * back_scale; synced_data.orientation.z = front_data.orientation.z * front_scale + back_data.orientation.z * back_scale; synced_data.orientation.w = front_data.orientation.w * front_scale + back_data.orientation.w * back_scale; // Normalize after linear interpolation synced_data.orientation.Normlize();