*Robust IMU-based Attitude Neural Network for orientation estimation using IMU data.
This class implements efficient quaternion-based attitude estimation from accelerometer and gyroscope data using a neural network approach. The implementation is optimized for step-by-step processing in real-time applications.*
Type
Default
Details
onnx_path
NoneType
None
Path to the RIANN onnx file. Defaults to the file provided with the package.
# Example 1: Basic usage with batch processing# Prepare sample IMU datasequence_length =100acc = np.ones((sequence_length, 3)) # Accelerometer data [m/s²]gyr = np.zeros((sequence_length, 3)) # Gyroscope data [rad/s]fs =200# Sampling rate [Hz]# Initialize RIANNriann = RIANN()# Process all data at oncequaternions = riann.predict(acc, gyr, fs)print(f"Quaternions shape: {quaternions.shape}") # (100, 4)
Quaternions shape: (100, 4)
# Example 2: Real-time processing# Initialize RIANN for real-time processingriann = RIANN()riann.set_sampling_rate(100) # 100 Hzriann.reset_state()# Simulated IMU data streamdef simulate_imu_data():"""Simulate a single IMU reading"""# In a real application, this would read from actual sensors acc = np.array([0.0, 0.0, 9.81]) + np.random.normal(0, 0.1, 3) # Add noise gyr = np.array([0.0, 0.0, 0.0]) + np.random.normal(0, 0.01, 3) # Add noisereturn acc, gyr# Simulated real-time looporientations = []for _ inrange(100): # Process 100 readings# Read IMU data acc, gyr = simulate_imu_data()# Process data to get orientation quaternion = riann.predict_step(acc, gyr) orientations.append(quaternion)# In a real application, you might use the orientation here# for control, visualization, etc.# Simulate sensor sampling rate time.sleep(0.01) # 100 Hzorientations = np.array(orientations)print(f"Collected {len(orientations)} orientation estimates")
Collected 100 orientation estimates
# Example 3: Processing existing datasetsimport h5py# Load data from file (example using the BROAD dataset as shown in RIANN_Example.ipynb)# In your application, adapt this to your data sourcetry:with h5py.File('data_hdf5/01_undisturbed_slow_rotation_A.hdf5', 'r') as f: acc = f['imu_acc'][:] # Accelerometer data gyr = f['imu_gyr'][:] # Gyroscope data ref_quat = f['opt_quat'][:] # Reference quaternions (if available) fs = f.attrs['sampling_rate'] # Sampling rateexceptFileNotFoundError:# Create dummy data if file not found sequence_length =1000 acc = np.ones((sequence_length, 3)) gyr = np.zeros((sequence_length, 3)) ref_quat =None fs =200# Initialize RIANNriann = RIANN()# Process dataest_quat = riann.predict(acc, gyr, fs)# If reference data is available, calculate errorif ref_quat isnotNone:# Helper function for attitude error calculationdef calculate_attitude_error(q1, q2):"""Calculate attitude error in degrees between two quaternions"""# Simple dot product method (accurate for small angles) dot_products = np.sum(q1 * q2, axis=1) dot_products = np.clip(dot_products, -1.0, 1.0) # Ensure valid acos input angles_rad =2* np.arccos(np.abs(dot_products))return angles_rad *180/ np.pi # Convert to degrees# Calculate errors errors = calculate_attitude_error(est_quat, ref_quat)# Plot error plt.figure(figsize=(10, 6)) plt.plot(errors) plt.xlabel('Sample') plt.ylabel('Attitude Error (degrees)') plt.title('Orientation Estimation Error') plt.grid(True) plt.show()print(f"Mean error: {np.mean(errors):.2f} degrees")print(f"Max error: {np.max(errors):.2f} degrees")
# Example 4: Advanced usage with step-by-step processing and state tracking# Create synthetic IMU data with known motion patterndef generate_synthetic_data(fs=100, duration=5):"""Generate synthetic IMU data for a specific motion pattern""" n_samples =int(fs * duration) t = np.linspace(0, duration, n_samples)# Initialize arrays acc = np.zeros((n_samples, 3)) gyr = np.zeros((n_samples, 3))# Create a simple rotation pattern (rotation around y-axis)for i, time inenumerate(t):# Gyroscope data (constant rotation around y-axis)if1.0<= time <=3.0: gyr[i, 1] =0.5# 0.5 rad/s around y-axis# Accelerometer data (gravity vector rotated accordingly)if time <1.0:# Initial position - gravity along z acc[i] = [0, 0, 9.81]elif time <3.0:# During rotation - rotate gravity vector angle =0.5* (time -1.0) # 0.5 rad/s * time acc[i] = [9.81* np.sin(angle), 0, 9.81* np.cos(angle)]else:# Final position - gravity rotated acc[i] = [9.81* np.sin(1.0), 0, 9.81* np.cos(1.0)]# Add noise to make it realistic acc += np.random.normal(0, 0.1, acc.shape) gyr += np.random.normal(0, 0.01, gyr.shape)return t, acc, gyr# Generate datafs =100t, acc, gyr = generate_synthetic_data(fs=fs)# Initialize RIANNriann = RIANN()# Process data in chunks to simulate real-time processing with state trackingchunk_size =50# Process 50 samples at a timen_chunks =len(acc) // chunk_size# Store all quaternionsall_quaternions = []for chunk inrange(n_chunks): start_idx = chunk * chunk_size end_idx = start_idx + chunk_size# Process this chunkif chunk ==0:# First chunk - initialize state riann.reset_state()# Process chunk step by stepfor i inrange(start_idx, end_idx): quat = riann.predict_step(acc[i], gyr[i], fs) all_quaternions.append(quat)# At this point in a real application, you could:# 1. Use the current orientation for feedback# 2. Save the current state for later resume# 3. Transmit the orientation to another systemprint(f"Processed chunk {chunk+1}/{n_chunks}, "+f"current orientation: w={quat[0]:.2f}, x={quat[1]:.2f}, "+f"y={quat[2]:.2f}, z={quat[3]:.2f}")all_quaternions = np.array(all_quaternions)print(f"Generated {len(all_quaternions)} orientation estimates")
Processed chunk 1/10, current orientation: w=-0.79, x=0.00, y=-0.00, z=0.61
Processed chunk 2/10, current orientation: w=-0.81, x=-0.00, y=-0.00, z=0.59
Processed chunk 3/10, current orientation: w=-0.90, x=-0.04, y=-0.09, z=0.42
Processed chunk 4/10, current orientation: w=-0.98, x=-0.00, y=-0.16, z=0.09
Processed chunk 5/10, current orientation: w=-0.94, x=0.08, y=-0.18, z=-0.28
Processed chunk 6/10, current orientation: w=-0.87, x=0.15, y=-0.17, z=-0.44
Processed chunk 7/10, current orientation: w=-0.75, x=0.04, y=0.02, z=0.66
Processed chunk 8/10, current orientation: w=-0.56, x=0.08, y=0.10, z=0.82
Processed chunk 9/10, current orientation: w=-0.53, x=0.11, y=0.13, z=0.83
Processed chunk 10/10, current orientation: w=-0.55, x=0.15, y=0.15, z=0.81
Generated 500 orientation estimates