-
Notifications
You must be signed in to change notification settings - Fork 1
/
testLevenbergMarquardt.m
157 lines (131 loc) · 4.71 KB
/
testLevenbergMarquardt.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
%% Script to test the Levenberg-Marquardt optimizer
% We consider a Gaussian function as our test case. The function is of the
% form f(x; a, m, s) = a * exp(-0.5*(x - m) / s^2), where 'a' is the
% amplitude, 'm' is the mean, and 's' is the standard deviation of the
% Gaussian curve.
% Number of observations
numObs = 50;
% Parameters of the Gaussian (amplitude, mean, std dev)
a = 10;
m = 0;
s = 20;
% Draw observations from the Gaussian with the given parameters
x = linspace(-25,25,numObs);
y = generateGaussian(x, a, m, s);
% Initialization
a0 = 15;
m0 = 45;
s0 = 15;
% Initial parameter vector
p0 = [a0; m0; s0];
% Other constants for LM
% Max number of iters
maxIters = 50;
% Tolerance on gradient magnitude, cost, and successive step size (i.e.,
% when to stop)
tolerance = 1e-15;
% Setup the coefficient matrices, Jacobian, gradient, etc.
% Parameter vector
p = p0;
% Jacobian
J = getJacobianOfGaussian(x, p);
% Compute the error in the Gaussian, given an estimate of the parameter
% vector, and the observations.
residual = getResidualGaussian(x, p, y);
% Compute error
err = norm(residual,2);
% Store initial error (for analysis)
initErr = err;
% Gradient
g = J'*residual;
% Initialize the damping parameter
dampingCoeff = 10e-3 * max(diag(J'*J));
% dampMomentum = 2;
% Coefficient matrix
A = J'*J;
% Store errors over time
errValues = [initErr];
% Number of successful and unsuccessful steps
numSuccess = 0;
numFail = 0;
% Stopping criterion (if the gradient is already too small, stop)
stop = (norm(g,'inf') < tolerance);
% LM iterations
for k = 1:maxIters
% Iterate until a feasible point is found
if ~stop
% Solve the normal equations for the LM linear system
deltap = (A + dampingCoeff*eye(size(A,1))) \ g;
% Check if the magnitude (L2 norm) of the parameter update is less than 'tolerance'
% If yes, stop
% -------- CODE HERE --------- %
if(norm(deltap) < tolerance)
stop = 1;
% If it is not, then compute the updated vector (do not update in
% the original vector, as we will first determine whether or not it
% decreases the cost).
else
% Updated parameter vector
pnew = p - deltap;
% Error resulting from this update
newErr = norm(getResidualGaussian(x, pnew, y),2);
% Check if the new error is less than the previous error by a
% margin greater than the tolerance. Only then we are in a
% trust region. Then, carry out the parameter update.
if newErr < err
errValues = [errValues, newErr];
if abs(newErr - err) < tolerance
stop = true;
break;
else
numSuccess = numSuccess + 1;
% Update the parameter vector, Jacobian, and the
% residuals, etc. (update p, J, residual, err, A, g)
p = pnew;
J = getJacobianOfGaussian(x, p);
residual = getResidualGaussian(x, p, y);
err = norm(residual,2);
A = J'*J;
g = J'*residual;
% Determine if stopping criteria is attained
stop = (norm(g,'inf') <= tolerance) || (err <= tolerance);
% Decrease damping coefficient (since we are in the
% trust-region), i.e., divide dampingCoeff by 2.
% --- code here --- %
dampingCoeff = dampingCoeff / 2;
end
% We are not in a trust-region
else
% disp('Error increased');
numFail = numFail + 1;
% Increase the damping coefficient, i.e., multiply dampingCoeff by 2
% --- code here --- %
dampingCoeff = dampingCoeff * 2;
end
end
end
end
% Predicted Gaussian (using optimized parameters)
y_hat = generateGaussian(x, a, m, s);
% Analysis of optimization
plot(errValues, 'LineWidth', 2, 'Color', 'g');
xlabel('Iterations');
ylabel('Loss');
title('Error plot - LM');
figure;
scatter(x, y, 'filled', 'g');
hold on;
scatter(x, y_hat, 'filled', 'r');
xlabel('X');
ylabel('Y');
title('Esimated parameters - LM');
legend('Ground-Truth', 'Predicted');
fprintf('Number of total iterations: %d', k);
fprintf('Number of successful steps: %d', numSuccess);
fprintf('Number of unsuccessful steps: %d', numFail);
fprintf('Estimated parameters of the Gaussian: ');
p'
fprintf('True parameters of the Gaussian: ');
[a, m, s]
fprintf('Difference in estimated and true parameters: ');
norm(p-[a;m;s],1)