Skip to content

Commit

Permalink
Merge pull request #129 from agittins/agittins-dev
Browse files Browse the repository at this point in the history
feat: Ignore long distance readings that would require high velocity
  • Loading branch information
agittins authored Mar 17, 2024
2 parents 11864b3 + ea90537 commit f319afe
Showing 1 changed file with 89 additions and 26 deletions.
115 changes: 89 additions & 26 deletions custom_components/bermuda/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,10 +158,8 @@ def __init__(
self.hist_rssi = []
self.hist_distance = []
self.hist_distance_by_interval = [] # updated per-interval
self.smoothing_sample_count: int = options.get(
CONF_SMOOTHING_SAMPLES, DEFAULT_SMOOTHING_SAMPLES
)
self.hist_interval = [] # WARNING: This is actually "age of ad when we polled"
self.hist_velocity = [] # Effective velocity versus previous stamped reading
self.stale_update_count = (
0 # How many times we did an update but no new stamps were found.
)
Expand Down Expand Up @@ -190,11 +188,12 @@ def update(
# In case the scanner has changed it's details since startup though:
self.name: str = scandata.scanner.name
self.area_id: str = area_id
have_new_stamp = False

# Only remote scanners log timestamps here (local usb adaptors do not),
if hasattr(scandata.scanner, "_discovered_device_timestamps"):
# Found a remote scanner which has timestamp history...
scanner_sends_stamps = True
self.scanner_sends_stamps = True
# FIXME: Doesn't appear to be any API to get this otherwise...
# pylint: disable-next=protected-access
stamps = scandata.scanner._discovered_device_timestamps
Expand All @@ -208,6 +207,7 @@ def update(
else:
# We have no updated advert in this run.
have_new_stamp = False
new_stamp = None
self.stale_update_count += 1
else:
# This shouldn't happen, as we shouldn't have got a record
Expand All @@ -218,24 +218,26 @@ def update(
device_address,
)
have_new_stamp = False
new_stamp = None
else:
# Not a bluetooth_proxy device / remote scanner, but probably a USB Bluetooth adaptor.
# We don't get advertisement timestamps from bluez, so the stamps in our history
# won't be terribly accurate, and the advert might actually be rather old.
# All we can do is check if it has changed and assume it's fresh from that.

scanner_sends_stamps = False
self.scanner_sends_stamps = False
# If the rssi has changed from last time, consider it "new"
if self.rssi != scandata.advertisement.rssi:
have_new_stamp = True
# 2024-03-16: We're going to treat it as fresh for now and see how that goes.
# We can do that because we smooth distances now every update_interval, regardless
# of when the last advertisement was received, so we shouldn't see bluez trumping
# proxies with stale adverts. Hopefully.
# new_stamp = MONOTONIC_TIME() - (ADVERT_FRESHTIME * 4)
new_stamp = MONOTONIC_TIME()
have_new_stamp = True
else:
have_new_stamp = False
new_stamp = None

if len(self.hist_stamp) == 0 or have_new_stamp:
# this is the first entry or a new one...
Expand All @@ -249,16 +251,19 @@ def update(
)
self.hist_distance.insert(0, self.rssi_distance_raw)

# Stamp will be faked from above if required.
if have_new_stamp:
# Note: this is not actually the interval between adverts,
# but rather a function of our UPDATE_INTERVAL plus the packet
# interval. The bluetooth integration does not currently store
# interval data, only stamps of the most recent packet.
self.hist_interval.insert(0, new_stamp - self.stamp)
# Note: this is not actually the interval between adverts,
# but rather a function of our UPDATE_INTERVAL plus the packet
# interval. The bluetooth integration does not currently store
# interval data, only stamps of the most recent packet.
# So it more accurately reflects "How much time passed between
# the two last packets we observed" - which should be a multiple
# of the true inter-packet interval. For stamps from local bluetooth
# adaptors (usb dongles) it reflects "Which update cycle last saw a
# different rssi", which will be a multiple of our update interval.
self.hist_interval.insert(0, new_stamp - self.stamp)

self.stamp = new_stamp
self.hist_stamp.insert(0, self.stamp)
self.stamp = new_stamp
self.hist_stamp.insert(0, self.stamp)

# Safe to update these values regardless of stamps...

Expand All @@ -279,11 +284,7 @@ def update(
)
self.tx_power: float = scandata.advertisement.tx_power
self.adverts: dict[str, bytes] = scandata.advertisement.service_data.items()
self.scanner_sends_stamps = scanner_sends_stamps
self.options = options
self.smoothing_sample_count = options.get(
CONF_SMOOTHING_SAMPLES, DEFAULT_SMOOTHING_SAMPLES
)

# ###### Filter and update distance estimates.
#
Expand Down Expand Up @@ -313,7 +314,7 @@ def update(
self.hist_distance_by_interval.insert(0, self.rssi_distance_raw)
del self.hist_distance_by_interval[1:]

elif not have_new_stamp and self.stamp < MONOTONIC_TIME() - DISTANCE_TIMEOUT:
elif (not have_new_stamp) and self.stamp < MONOTONIC_TIME() - DISTANCE_TIMEOUT:
# DEVICE IS AWAY!
# Last distance reading is stale, mark device distance as unknown.
self.rssi_distance = None
Expand All @@ -323,13 +324,73 @@ def update(

else:
# Add the current reading (whether new or old) to
# a historical log that is evenly spaced by update_interval,
# and calculate our new smoothed/estimated distance
self.hist_distance_by_interval.insert(0, self.rssi_distance_raw)
del self.hist_distance_by_interval[self.smoothing_sample_count :]
# a historical log that is evenly spaced by update_interval.

# Verify the new reading is vaguely sensible. If it isn't, we
# ignore it by duplicating the last cycle's reading.
MAX_VELOCITY = 3 # m/s for how fast a device can retreat.
if len(self.hist_stamp) > 1:
# How far (away) did it travel in how long?
# we check this reading against the recent readings to find
# the peak average velocity we are alleged to have reached.
velo_newdistance = self.hist_distance[0]
velo_newstamp = self.hist_stamp[0]
peak_velocity = 0
# walk through the history of distances/stamps, and find
# the peak
for i, old_distance in enumerate(self.hist_distance):
if i == 0:
# (skip the first entry since it's what we're comparing with)
continue

delta_t = velo_newstamp - self.hist_stamp[i]
delta_d = velo_newdistance - self.hist_distance[i]
velocity = delta_d / delta_t

# Approach velocities are only interesting vs the previous
# reading, while retreats need to be sensible over time
if i == 1:
# on first round we want approach or retreat velocity
peak_velocity = velocity
if velocity < 0:
# if our new reading is an approach, we are done here
# (not so for == 0 since it might still be an invalid retreat)
break

if velocity > peak_velocity:
# but on subsequent comparisons we only care if they're faster retreats
peak_velocity = velocity
# we've been through the history and have peak velo retreat, or the most recent
# approach velo.
velocity = peak_velocity
else:
# There's no history, so no velocity
velocity = 0

self.hist_velocity.insert(0, velocity)

if velocity > MAX_VELOCITY:
if device_address.upper() in self.options[CONF_DEVICES]:
_LOGGER.debug(
"This sparrow %s flies too fast (%dm/s), ignoring",
device_address,
velocity,
)
# Discard the bogus reading by duplicating the last.
self.hist_distance_by_interval.insert(
0, self.hist_distance_by_interval[0]
)
else:
# Looks valid enough, add the current reading to the interval log
self.hist_distance_by_interval.insert(0, self.rssi_distance_raw)

# trim the log to length
del self.hist_distance_by_interval[
self.options.get(CONF_SMOOTHING_SAMPLES) :
]

# A moving-window average, that only includes historical values
# if their "closer" (ie more reliable).
# Calculate a moving-window average, that only includes
# historical values if their "closer" (ie more reliable).
#
# This might be improved by weighting the values by age, but
# already does a fairly reasonable job of hugging the bottom
Expand Down Expand Up @@ -365,6 +426,7 @@ def update(
self.hist_interval,
self.hist_rssi,
self.hist_stamp,
self.hist_velocity,
):
del histlist[HIST_KEEP_COUNT:]

Expand Down Expand Up @@ -537,6 +599,7 @@ def __init__(
# TODO: This is only here because we haven't set up migration of config
# entries yet, so some users might not have this defined after an update.
self.options[CONF_MAX_RADIUS] = DEFAULT_MAX_RADIUS
self.options[CONF_SMOOTHING_SAMPLES] = DEFAULT_SMOOTHING_SAMPLES

if hasattr(entry, "options"):
# Firstly, on some calls (specifically during reload after settings changes)
Expand Down

0 comments on commit f319afe

Please sign in to comment.