-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathp5_finalhunt.py
365 lines (308 loc) · 12.8 KB
/
p5_finalhunt.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
# ----------
# Part Five
#
# This time, the sensor measurements from the runaway Traxbot will be VERY
# noisy (about twice the target's stepsize). You will use this noisy stream
# of measurements to localize and catch the target.
#
# ----------
# YOUR JOB
#
# Complete the next_move function, similar to how you did last time.
#
# ----------
# GRADING
#
# Same as part 3 and 4. Again, try to catch the target in as few steps as possible.
from robot import *
from math import *
from matrix import *
import random
def memo(f):
"""Decorator that caches the return value for each call to f(args).
Then when called again with same args, we can just look it up."""
cache = {}
def _f(*args):
try:
return cache[args]
except KeyError:
cache[args] = result = f(*args)
return result
except TypeError:
# some element of args can't be a dict key
return f(args)
return _f
@memo
def identity_matrix(n):
res = matrix([[]])
res.identity(n)
return res
def extended_kalman_filter(z, x, u, P, F_fn, x_fn, H, R):
"""
Applies extended kalman filter on system
z -> measurement
x -> last state
u -> control vector
P -> covariances
F_fn -> Function that returns F matrix for given 'x'
x_fn -> Updates 'x' using the non-linear derivatives
H -> Measurement matrix
R -> Measurement covariance
"""
I = identity_matrix(x.dimx)
# prediction
F = F_fn(x)
x = x_fn(x, u)
P = F * P * F.transpose()
# measurement update
Z = matrix([z])
y = Z.transpose() - (H * x)
S = H * P * H.transpose() + R
K = P * H.transpose() * S.inverse()
x = x + (K * y)
P = (I - (K * H)) * P
return x, P
def robot_F_fn(state, dt = 1.0):
"""
Transition matrix for robot dynamics
Linearize dynamics about 'state' for EKF
xdot = v*cos(theta+w)
ydot = v*sin(theta+w)
thetadot = w
vdot = 0 -> step size
wdot = 0 -> turn angle per step
"""
x = state.value[0][0]
y = state.value[1][0]
theta = state.value[2][0]
v = state.value[3][0]
w = state.value[4][0]
J = matrix([[0., 0., -v*sin(theta), cos(theta), 0.],
[0., 0., v*cos(theta), sin(theta), 0.],
[0., 0., 0., 0., 1.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
])
I = matrix([[]])
I.identity(5)
return I + J*dt
def robot_x_fn(state, u = None, dt=1.0):
"""
State update for nonlinear system
Computes next state using the non-linear dynamics
"""
x = state.value[0][0]
y = state.value[1][0]
theta = state.value[2][0]
v = state.value[3][0]
w = state.value[4][0]
x += v * cos(theta)*dt
y += v * sin(theta)*dt
theta += w*dt
return matrix([[x],
[y],
[theta],
[v],
[w]])
def state_from_measurements(three_measurements):
"""
Estimates state of robot from the last three measurements
Assumes each movement of robot is a "step" and a "turn"
Three measurements constitute two moves, from which turn angle, heading
and step size can be inferred.
"""
x1, y1 = three_measurements[-3]
x2, y2 = three_measurements[-2]
x3, y3 = three_measurements[-1]
# Last two position vectors from measurements
vec_1 = [x2 - x1, y2 - y1]
vec_2 = [x3 - x2, y3 - y2]
# Find last turning angle using dot product
dot = sum(v1*v2 for v1,v2 in zip(vec_1, vec_2))
mag_v1 = sqrt(sum(v**2 for v in vec_1))
mag_v2 = sqrt(sum(v**2 for v in vec_2))
v0 = mag_v2
w0 = acos(dot/(mag_v1*mag_v2))
if dot < 0:
w0 = pi-w0
theta0 = atan2(vec_2[1], vec_2[0]) + w0
x0 = x3 + v0*cos(theta0 + w0)
y0 = y3 + v0*sin(theta0 + w0)
return matrix([[x3], [y3], [theta0], [v0], [w0]])
def next_move(hunter_position, hunter_heading, target_measurement, max_distance, OTHER = None):
# This function will be called after each time the target moves.
if OTHER is None: OTHER = deque(maxlen=3)
OTHER.append(target_measurement)
if len(OTHER) < 3:
# Use naive tracking until target is localized
heading_to_target = get_heading(hunter_position, target_measurement)
heading_difference = heading_to_target - hunter_heading
turning = heading_difference # turn towards the target
distance = max_distance # full speed ahead!
else:
# Estimate current state of robot
state = state_from_measurements(OTHER)
# Estimate number of steps required to reach target
num_steps = 1
while True:
state = robot_x_fn(state)
x, y = state.value[0][0], state.value[1][0]
theta, v, w = state.value[2][0], state.value[3][0], state.value[4][0]
est_target_pos = [x, y]
separation = distance_between(est_target_pos, hunter_position)
if separation < num_steps*max_distance:
break
num_steps += 1
heading_to_target = get_heading(hunter_position, est_target_pos)
heading_difference = heading_to_target - hunter_heading
turning = heading_difference # turn towards the target
distance = distance_between(est_target_pos, hunter_position)
distance = min(distance, max_distance)
return turning, distance, OTHER
def distance_between(point1, point2):
"""Computes distance between point1 and point2. Points are (x, y) pairs."""
x1, y1 = point1
x2, y2 = point2
return sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2)
def demo_grading(hunter_bot, target_bot, next_move_fcn, OTHER = None):
"""Returns True if your next_move_fcn successfully guides the hunter_bot
to the target_bot. This function is here to help you understand how we
will grade your submission."""
max_distance = 0.97 * target_bot.distance # 0.97 is an example. It will change.
separation_tolerance = 0.02 * target_bot.distance # hunter must be within 0.02 step size to catch target
caught = False
ctr = 0
# We will use your next_move_fcn until we catch the target or time expires.
while not caught and ctr < 1000:
# Check to see if the hunter has caught the target.
hunter_position = (hunter_bot.x, hunter_bot.y)
target_position = (target_bot.x, target_bot.y)
separation = distance_between(hunter_position, target_position)
if separation < separation_tolerance:
print("You got it right! It took you ", ctr, " steps to catch the target.")
caught = True
# The target broadcasts its noisy measurement
target_measurement = target_bot.sense()
# This is where YOUR function will be called.
turning, distance, OTHER = next_move_fcn(hunter_position, hunter_bot.heading, target_measurement, max_distance, OTHER)
# Don't try to move faster than allowed!
if distance > max_distance:
distance = max_distance
# We move the hunter according to your instructions
hunter_bot.move(turning, distance)
# The target continues its (nearly) circular motion.
target_bot.move_in_circle()
ctr += 1
if ctr >= 1000:
print("It took too many steps to catch the target.")
return caught
def angle_trunc(a):
"""This maps all angles to a domain of [-pi, pi]"""
while a < 0.0:
a += pi * 2
return ((a + pi) % (pi * 2)) - pi
def get_heading(hunter_position, target_position):
"""Returns the angle, in radians, between the target and hunter positions"""
hunter_x, hunter_y = hunter_position
target_x, target_y = target_position
heading = atan2(target_y - hunter_y, target_x - hunter_x)
heading = angle_trunc(heading)
return heading
def naive_next_move(hunter_position, hunter_heading, target_measurement, max_distance, OTHER):
"""This strategy always tries to steer the hunter directly towards where the target last
said it was and then moves forwards at full speed. This strategy also keeps track of all
the target measurements, hunter positions, and hunter headings over time, but it doesn't
do anything with that information."""
if not OTHER: # first time calling this function, set up my OTHER variables.
measurements = [target_measurement]
hunter_positions = [hunter_position]
hunter_headings = [hunter_heading]
OTHER = (measurements, hunter_positions, hunter_headings) # now I can keep track of history
else: # not the first time, update my history
OTHER[0].append(target_measurement)
OTHER[1].append(hunter_position)
OTHER[2].append(hunter_heading)
measurements, hunter_positions, hunter_headings = OTHER # now I can always refer to these variables
heading_to_target = get_heading(hunter_position, target_measurement)
heading_difference = heading_to_target - hunter_heading
turning = heading_difference # turn towards the target
distance = max_distance # full speed ahead!
return turning, distance, OTHER
def demo_grading2(hunter_bot, target_bot, next_move_fcn, OTHER = None):
"""Returns True if your next_move_fcn successfully guides the hunter_bot
to the target_bot. This function is here to help you understand how we
will grade your submission."""
max_distance = 0.97 * target_bot.distance # 0.98 is an example. It will change.
separation_tolerance = 0.02 * target_bot.distance # hunter must be within 0.02 step size to catch target
caught = False
ctr = 0
#For Visualization
import turtle
window = turtle.Screen()
window.bgcolor('white')
chaser_robot = turtle.Turtle()
chaser_robot.shape('arrow')
chaser_robot.color('blue')
chaser_robot.resizemode('user')
chaser_robot.shapesize(0.3, 0.3, 0.3)
broken_robot = turtle.Turtle()
broken_robot.shape('turtle')
broken_robot.color('green')
broken_robot.resizemode('user')
broken_robot.shapesize(0.3, 0.3, 0.3)
size_multiplier = 15.0 #change size of animation
chaser_robot.hideturtle()
chaser_robot.penup()
chaser_robot.goto(hunter_bot.x*size_multiplier, hunter_bot.y*size_multiplier-100)
chaser_robot.showturtle()
broken_robot.hideturtle()
broken_robot.penup()
broken_robot.goto(target_bot.x*size_multiplier, target_bot.y*size_multiplier-100)
broken_robot.showturtle()
measuredbroken_robot = turtle.Turtle()
measuredbroken_robot.shape('circle')
measuredbroken_robot.color('red')
measuredbroken_robot.penup()
measuredbroken_robot.resizemode('user')
measuredbroken_robot.shapesize(0.1, 0.1, 0.1)
broken_robot.pendown()
chaser_robot.pendown()
#End of Visualization
# We will use your next_move_fcn until we catch the target or time expires.
while not caught and ctr < 1000:
# Check to see if the hunter has caught the target.
hunter_position = (hunter_bot.x, hunter_bot.y)
target_position = (target_bot.x, target_bot.y)
separation = distance_between(hunter_position, target_position)
if separation < separation_tolerance:
print("You got it right! It took you ", ctr, " steps to catch the target.")
caught = True
# The target broadcasts its noisy measurement
target_measurement = target_bot.sense()
# This is where YOUR function will be called.
turning, distance, OTHER = next_move_fcn(hunter_position, hunter_bot.heading, target_measurement, max_distance, OTHER)
# Don't try to move faster than allowed!
if distance > max_distance:
distance = max_distance
# We move the hunter according to your instructions
hunter_bot.move(turning, distance)
# The target continues its (nearly) circular motion.
target_bot.move_in_circle()
#Visualize it
measuredbroken_robot.setheading(target_bot.heading*180/pi)
measuredbroken_robot.goto(target_measurement[0]*size_multiplier, target_measurement[1]*size_multiplier-100)
measuredbroken_robot.stamp()
broken_robot.setheading(target_bot.heading*180/pi)
broken_robot.goto(target_bot.x*size_multiplier, target_bot.y*size_multiplier-100)
chaser_robot.setheading(hunter_bot.heading*180/pi)
chaser_robot.goto(hunter_bot.x*size_multiplier, hunter_bot.y*size_multiplier-100)
#End of visualization
ctr += 1
if ctr >= 1000:
print("It took too many steps to catch the target.")
return caught
# target = robot(0.0, 10.0, 0.0, 2*pi / 30, 1.5)
# measurement_noise = 2.0*target.distance # VERY NOISY!!
# target.set_noise(0.0, 0.0, measurement_noise)
# hunter = robot(-10.0, -10.0, 0.0)
# print(demo_grading(hunter, target, naive_next_move))