Skip to content

Commit 96b50a1

Browse files
committed
keep coding
1 parent 0cedcc0 commit 96b50a1

File tree

1 file changed

+22
-4
lines changed

1 file changed

+22
-4
lines changed

SLAM/FastSLAM/fast_slam.py

Lines changed: 22 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
# EKF state covariance
1515
Cx = np.diag([0.5, 0.5, math.radians(30.0)])**2
1616

17+
1718
# Simulation parameter
1819
Qsim = np.diag([0.2, math.radians(1.0)])**2
1920
Rsim = np.diag([1.0, math.radians(10.0)])**2
@@ -40,12 +41,18 @@ def __init__(self, N_LM):
4041
self.y = 0.0
4142
self.yaw = 0.0
4243
self.lm = np.zeros((N_LM, 2))
44+
self.lmP = [np.zeros((2, 2))] * N_LM
4345

4446

4547
def normalize_weight(particles):
4648

4749
sumw = sum([particles[ip].w for ip in range(N_PARTICLE)])
4850

51+
# if sumw <= 0.0000001:
52+
# for i in range(N_PARTICLE):
53+
# particles[i].w = 1.0 / N_PARTICLE
54+
# return particles
55+
4956
for i in range(N_PARTICLE):
5057
particles[i].w = particles[i].w / sumw
5158

@@ -94,6 +101,12 @@ def add_new_lm(particle, z):
94101
particle.lm[lm_id, 0] = particle.x + r * c
95102
particle.lm[lm_id, 1] = particle.y + r * s
96103

104+
# covariance
105+
Gz = np.matrix([[c, -r * s],
106+
[s, r * c]])
107+
108+
particle.lmP[lm_id] = Gz * Cx[0:2, 0:2] * Gz.T
109+
97110
return particle
98111

99112

@@ -117,7 +130,7 @@ def compute_weight(particle, z):
117130
zxy[0, 1] = particle.y + r * s
118131

119132
dx = (lmxy - zxy).T
120-
S = np.eye(2)
133+
S = particle.lmP[lm_id]
121134

122135
num = math.exp(-0.5 * dx.T * np.linalg.inv(S) * dx)
123136
den = 2.0 * math.pi * math.sqrt(np.linalg.det(S))
@@ -140,7 +153,7 @@ def update_with_observation(particles, z):
140153
# known landmark
141154
else:
142155
w = compute_weight(particles[ip], z[iz, :]) # w = p(z_k | x_k)
143-
particles[ip].w = particles[ip].w * w
156+
particles[ip].w = particles[ip].w + w
144157
# particles(i)= feature_update(particles(i), zf, idf, R)
145158

146159
return particles
@@ -151,25 +164,30 @@ def resampling(particles):
151164
low variance re-sampling
152165
"""
153166

167+
particles = normalize_weight(particles)
168+
154169
pw = []
155170
for i in range(N_PARTICLE):
156171
pw.append(particles[i].w)
157172

158173
pw = np.matrix(pw)
174+
# print(pw)
159175

160176
Neff = 1.0 / (pw * pw.T)[0, 0] # Effective particle number
177+
# print(Neff)
161178

162179
if Neff < NTH: # resampling
180+
print("resamping")
163181
wcum = np.cumsum(pw)
164182
base = np.cumsum(pw * 0.0 + 1 / N_PARTICLE) - 1 / N_PARTICLE
165183
resampleid = base + np.random.rand(base.shape[1]) / N_PARTICLE
166184

167185
inds = []
168186
ind = 0
169187
for ip in range(N_PARTICLE):
170-
while resampleid[0, ip] > wcum[0, ind]:
188+
while ((ind < wcum.shape[1] - 1) and (resampleid[0, ip] > wcum[0, ind])):
171189
ind += 1
172-
inds.append(ind)
190+
inds.append(ind)
173191

174192
tparticles = particles[:]
175193
for i in range(len(inds)):

0 commit comments

Comments
 (0)