-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathRobotEKF.m
More file actions
117 lines (90 loc) · 3.63 KB
/
RobotEKF.m
File metadata and controls
117 lines (90 loc) · 3.63 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
classdef RobotEKF < handle
properties
% State vector: [x, y, theta]
state
% Covariance matrix
P
% Sensor noise covariances
R_camera % Camera measurement noise
R_encoder % Encoder measurement noise
% Process noise covariance
Q
% Sampling time
dt
end
methods
function obj = RobotEKF(initial_state, dt)
% Constructor
obj.state = initial_state; % [x, y, theta]
obj.dt = dt;
% Initialize covariance matrix (high initial uncertainty)
obj.P = diag([10, 10, 0.1]);
% Define sensor noise covariances
% Lower values mean higher trust in that sensor
obj.R_camera = diag([0.5, 0.5, 0.01]); % Camera: more precise
obj.R_encoder = diag([2, 2, 0.1]); % Encoder: less precise
% Process noise (model uncertainty)
obj.Q = diag([0.1, 0.1, 0.01]);
end
function [state, covariance] = predict(obj, encoder_distance, imu_angle)
% Non-linear motion model prediction step
% Current state
x = obj.state(1);
y = obj.state(2);
theta = obj.state(3);
% Predict new state using encoder and IMU
dx = encoder_distance * cos(theta);
dy = encoder_distance * sin(theta);
obj.state(1) = x + dx;
obj.state(2) = y + dy;
obj.state(3) = imu_angle;
% Compute Jacobian of motion model
F = [1, 0, -dx*sin(theta);
0, 1, dy*cos(theta);
0, 0, 1];
% Predict covariance
obj.P = F * obj.P * F' + obj.Q;
state = obj.state;
covariance = obj.P;
end
function [state, covariance] = updateWithCamera(obj, camera_coords)
% Camera measurement update step
% Measurement innovation
z_innov = [camera_coords(1) - obj.state(1);
camera_coords(2) - obj.state(2);
camera_coords(3) - obj.state(3)];
% Measurement Jacobian
H = eye(3);
% Kalman Gain
S = H * obj.P * H' + obj.R_camera;
K = obj.P * H' / S;
% State update
obj.state = obj.state + K * z_innov;
% Covariance update
obj.P = (eye(3) - K*H) * obj.P;
state = obj.state;
covariance = obj.P;
end
end
end
% Example usage script
function main()
% Initialize EKF
ekf = RobotEKF([0, 0, 0], 0.05); % Initial state, sampling time
% Simulated data loop
for i = 1:1000
% Simulated encoder and IMU data
encoder_distance = 0.1; % 10 cm movement
imu_angle = rand() * pi/18; % Small angle variation
% Prediction step
[predicted_state, predicted_cov] = ekf.predict(encoder_distance, imu_angle);
% Every 20 iterations (simulating camera update)
if mod(i, 20) == 0
camera_coords = [predicted_state(1) + randn()*0.5, ...
predicted_state(2) + randn()*0.5, ...
predicted_state(3) + randn()*0.1];
[fused_state, fused_cov] = ekf.updateWithCamera(camera_coords);
% Visualization or logging could be added here
end
end
end