Skip to content

Commit

Permalink
Merge pull request #127 from agittins/agittins-dev
Browse files Browse the repository at this point in the history
Filtering of Distance sensors, startup and reload fixes, performance and db improvements

- breaking: Smoothing of Distance measurements and changes to sensors and attributes
    - Moved oft-changing states out of sensor attributes and into their own sensors.
    - Set all but Area, Distance and device_tracker sensors to be disabled by default, so they don't chew up resources. They can be manually enabled on a by-device basis for troubleshooting or specific use-cases.
      - Nearest Scanner sensor
      - Sensor RSSI
      - Raw (unfiltered) per-scanner distance sensors
    - implemented initial filtering of distance measurements, using a per_interval log of samples (regardless of last advert age) and a fairly dodgy moving window average with local minima overrides(!). The number of samples used is configurable.
    - now accept bluetooth usb dongle / bluez adverts as being fresh. Might need revisiting if it breaks.
- fix: Set default update_interval to 1.1 seconds.
    - This seems to give slightly better results if proxies are configured with intervals of 1 second, presumably because we are more likely to receive an update in that time.

- feat: Link entities with existing devices like bthome
    - Should match on bluetooth entities.
- fix: Only list latest beacon_source in current_mac for ibeacons
- fix: Resolve startup entity issues
    - fix device_tracker entities not working after restart and requiring a reload
    - fix warnings about unique ids and config entries during reload/configure
- feat: allow config of update_interval
- feat: add current_mac to iBeacon area sensor

- chore: remove TODO.md
- chore: Readme updates
  • Loading branch information
agittins authored Mar 16, 2024
2 parents c8d9dd1 + eff1496 commit 15b4b7d
Show file tree
Hide file tree
Showing 9 changed files with 411 additions and 144 deletions.
133 changes: 84 additions & 49 deletions README.md

Large diffs are not rendered by default.

10 changes: 0 additions & 10 deletions TODO.md

This file was deleted.

140 changes: 96 additions & 44 deletions custom_components/bermuda/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
from homeassistant.util.dt import get_age
from homeassistant.util.dt import now

from .const import ADVERT_FRESHTIME
from .const import BEACON_IBEACON_DEVICE
from .const import BEACON_IBEACON_SOURCE
from .const import BEACON_NOT_A_BEACON
Expand All @@ -43,11 +42,15 @@
from .const import CONF_DEVTRACK_TIMEOUT
from .const import CONF_MAX_RADIUS
from .const import CONF_REF_POWER
from .const import CONF_SMOOTHING_SAMPLES
from .const import CONF_UPDATE_INTERVAL
from .const import CONFDATA_SCANNERS
from .const import DEFAULT_ATTENUATION
from .const import DEFAULT_DEVTRACK_TIMEOUT
from .const import DEFAULT_MAX_RADIUS
from .const import DEFAULT_REF_POWER
from .const import DEFAULT_SMOOTHING_SAMPLES
from .const import DEFAULT_UPDATE_INTERVAL
from .const import DOMAIN
from .const import HIST_KEEP_COUNT
from .const import PLATFORMS
Expand All @@ -61,17 +64,6 @@
# if TYPE_CHECKING:
# from bleak.backends.device import BLEDevice

# Our update takes around 0.002 seconds per loop (with a single proxy), because
# we are only examining local data from the bluetooth integration.
# We could instead act on received packets, but that would result
# in us running an update at least every 250ms, probably less,
# so probably better to poll which will keep load more consistent
# and not be a function of how many adverts we get.
# It does mean we need to be careful of how long our update loop
# takes, and we should consider only doing costly things periodically,
# or perhaps getting hass to schedule a job for them when required.
SCAN_INTERVAL = timedelta(seconds=0.9)

CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)

_LOGGER: logging.Logger = logging.getLogger(__package__)
Expand Down Expand Up @@ -163,11 +155,17 @@ def __init__(
self.rssi: float = None
self.hist_rssi = []
self.hist_distance = []
self.hist_distance_by_interval = [] # updated per-interval
self.smoothing_sample_count: int = options.get(
CONF_SMOOTHING_SAMPLES, DEFAULT_SMOOTHING_SAMPLES
)
self.hist_interval = [] # WARNING: This is actually "age of ad when we polled"
self.stale_update_count = (
0 # How many times we did an update but no new stamps were found.
)
self.tx_power: float = None
self.rssi_distance: float = None
self.rssi_distance_raw: float = None

# Just pass the rest on to update...
self.update(device_address, scandata, area_id, options)
Expand Down Expand Up @@ -220,18 +218,20 @@ def update(
have_new_stamp = False
else:
# Not a bluetooth_proxy device / remote scanner, but probably a USB Bluetooth adaptor.
# We don't get advertisement timestamps from bluez, so currently there's no way to
# reliably include it in our calculations.
# We don't get advertisement timestamps from bluez, so the stamps in our history
# won't be terribly accurate, and the advert might actually be rather old.
# All we can do is check if it has changed and assume it's fresh from that.

scanner_sends_stamps = False
# But if the rssi has changed from last time, consider it "new"
# If the rssi has changed from last time, consider it "new"
if self.rssi != scandata.advertisement.rssi:
# Since rssi has changed, we'll consider this "new", but
# since it could be pretty much any age, make it a multiple
# of freshtime. This means it can still be useful for home/away
# detection in device_tracker, but won't factor in to area localisation.
have_new_stamp = True
new_stamp = MONOTONIC_TIME() - (ADVERT_FRESHTIME * 4)
# 2024-03-16: We're going to treat it as fresh for now and see how that goes.
# We can do that because we smooth distances now every update_interval, regardless
# of when the last advertisement was received, so we shouldn't see bluez trumping
# proxies with stale adverts. Hopefully.
# new_stamp = MONOTONIC_TIME() - (ADVERT_FRESHTIME * 4)
new_stamp = MONOTONIC_TIME()
else:
have_new_stamp = False

Expand All @@ -240,12 +240,12 @@ def update(

self.rssi: float = scandata.advertisement.rssi
self.hist_rssi.insert(0, self.rssi)
self.rssi_distance: float = rssi_to_metres(
self.rssi_distance_raw: float = rssi_to_metres(
self.rssi,
options.get(CONF_REF_POWER, DEFAULT_REF_POWER),
options.get(CONF_ATTENUATION, DEFAULT_ATTENUATION),
)
self.hist_distance.insert(0, self.rssi_distance)
self.hist_distance.insert(0, self.rssi_distance_raw)

# Stamp will be faked from above if required.
if have_new_stamp:
Expand Down Expand Up @@ -279,6 +279,54 @@ def update(
self.adverts: dict[str, bytes] = scandata.advertisement.service_data.items()
self.scanner_sends_stamps = scanner_sends_stamps
self.options = options
self.smoothing_sample_count = options.get(
CONF_SMOOTHING_SAMPLES, DEFAULT_SMOOTHING_SAMPLES
)

# ###### Filter and update distance estimates.
#
# Note: Noise in RSSI readings is VERY asymmetric. Ultimately,
# a closer distance is *always* more accurate than a previous
# more distant measurement. Any measurement might be true,
# or it is likely longer than the truth - and (almost) never
# shorter.
#
# For a new, long measurement to be true, we'd want to see some
# indication of rising measurements preceding it, or at least a
# long time since our last measurement.
#
# Also, a lack of recent measurements should be considered a likely
# increase in distance, but could also simply be total signal loss
# for non-distance related reasons (occlusion).
#
if self.rssi_distance is None:
self.rssi_distance = self.rssi_distance_raw
else:
# Add the current reading (whether new or old) to
# a historical log that is evenly spaced by update_interval
self.hist_distance_by_interval.insert(0, self.rssi_distance_raw)
del self.hist_distance_by_interval[self.smoothing_sample_count :]
dist_total = 0
dist_count = 0
local_min = self.rssi_distance_raw
for i, distance in enumerate(self.hist_distance_by_interval):
if distance <= local_min:
dist_total += distance
local_min = distance
else:
dist_total += local_min
dist_count += 1

if dist_count > 0:
movavg = dist_total / dist_count
else:
movavg = local_min

# The average is only helpful if it's lower than the actual reading.
if movavg < self.rssi_distance_raw:
self.rssi_distance = movavg
else:
self.rssi_distance = self.rssi_distance_raw

# Trim our history lists
for histlist in (
Expand Down Expand Up @@ -344,9 +392,8 @@ def __init__(self, address, options):

self.entry_id: str = None # used for scanner devices
self.create_sensor: bool = False # Create/update a sensor for this device
self.create_sensor_done: bool = (
False # If we have requested the sensor be created
)
self.create_sensor_done: bool = False # Sensor should now exist
self.create_tracker_done: bool = False # device_tracker should now exist
self.last_seen: float = (
0 # stamp from most recent scanner spotting. MONOTONIC_TIME
)
Expand Down Expand Up @@ -438,7 +485,12 @@ def __init__(
self.platforms = []

self.config_entry = entry
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL)

interval = entry.options.get(CONF_UPDATE_INTERVAL, DEFAULT_UPDATE_INTERVAL)

super().__init__(
hass, _LOGGER, name=DOMAIN, update_interval=timedelta(seconds=interval)
)

# First time around we freshen the restored scanner info by
# forcing a scan of the captured info.
Expand All @@ -457,6 +509,7 @@ def __init__(
CONF_DEVTRACK_TIMEOUT,
CONF_MAX_RADIUS,
CONF_REF_POWER,
CONF_SMOOTHING_SAMPLES,
):
self.options[key] = val

Expand Down Expand Up @@ -490,6 +543,16 @@ def sensor_created(self, address):
dev = self._get_device(address)
if dev is not None:
dev.create_sensor_done = True
_LOGGER.debug("Sensor confirmed created for %s", address)
else:
_LOGGER.warning("Very odd, we got sensor_created for non-tracked device")

def device_tracker_created(self, address):
"""Allows device_tracker platform to report back that sensors have been set up"""
dev = self._get_device(address)
if dev is not None:
dev.create_tracker_done = True
_LOGGER.debug("Device_tracker confirmed created for %s", address)
else:
_LOGGER.warning("Very odd, we got sensor_created for non-tracked device")

Expand Down Expand Up @@ -595,7 +658,6 @@ async def _async_update_data(self):
# locally, so we need to make one :-)

device.beacon_unique_id = f"{device.beacon_uuid}_{device.beacon_major}_{device.beacon_minor}"

# Note: it's possible that a device sends multiple
# beacons. We are only going to process the latest
# one in any single update cycle, so we ignore that
Expand Down Expand Up @@ -708,9 +770,10 @@ async def _async_update_data(self):
for address in self.options.get(CONF_DEVICES, []):
device = self._get_device(format_mac(address.lower()))
if device is not None:
if not device.create_sensor_done:
_LOGGER.debug("Firing device_new for %s", device.name)
# self.hass.async_run_job(
if not device.create_sensor_done or not device.create_tracker_done:
_LOGGER.debug(
"Firing device_new for %s (%s)", device.name, device.address
)
async_dispatcher_send(
self.hass, SIGNAL_DEVICE_NEW, device.address, self.scanner_list
)
Expand Down Expand Up @@ -846,20 +909,9 @@ def _refresh_area_by_min_distance(self, device: BermudaDevice):
if scanner.stamp > 0:
closest_scanner = scanner
else:
# is it fresh enough to win on proximity alone?
is_fresh_enough = (
scanner.stamp > closest_scanner.stamp - ADVERT_FRESHTIME
)
# is it so much fresher that it wins outright?
is_fresher = (
scanner.stamp > closest_scanner.stamp + ADVERT_FRESHTIME
)
# is it closer?
is_closer = scanner.rssi_distance < closest_scanner.rssi_distance

if is_fresher or (
is_closer and is_fresh_enough
): # This scanner is closer, and the advert is still fresh in comparison..
# Now that we are filtering the distance sensor, just rely on it
# regardless of "freshness"
if scanner.rssi_distance < closest_scanner.rssi_distance:
closest_scanner = scanner

if closest_scanner is not None:
Expand Down
14 changes: 14 additions & 0 deletions custom_components/bermuda/config_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,14 @@
from .const import CONF_DEVTRACK_TIMEOUT
from .const import CONF_MAX_RADIUS
from .const import CONF_REF_POWER
from .const import CONF_SMOOTHING_SAMPLES
from .const import CONF_UPDATE_INTERVAL
from .const import DEFAULT_ATTENUATION
from .const import DEFAULT_DEVTRACK_TIMEOUT
from .const import DEFAULT_MAX_RADIUS
from .const import DEFAULT_REF_POWER
from .const import DEFAULT_SMOOTHING_SAMPLES
from .const import DEFAULT_UPDATE_INTERVAL
from .const import DOMAIN
from .const import NAME

Expand Down Expand Up @@ -164,6 +168,16 @@ async def async_step_globalopts(self, user_input=None):
CONF_DEVTRACK_TIMEOUT, DEFAULT_DEVTRACK_TIMEOUT
),
): vol.Coerce(int),
vol.Required(
CONF_UPDATE_INTERVAL,
default=self.options.get(CONF_UPDATE_INTERVAL, DEFAULT_UPDATE_INTERVAL),
): vol.Coerce(float),
vol.Required(
CONF_SMOOTHING_SAMPLES,
default=self.options.get(
CONF_SMOOTHING_SAMPLES, DEFAULT_SMOOTHING_SAMPLES
),
): vol.Coerce(int),
vol.Required(
CONF_ATTENUATION,
default=self.options.get(CONF_ATTENUATION, DEFAULT_ATTENUATION),
Expand Down
16 changes: 11 additions & 5 deletions custom_components/bermuda/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,6 @@

DOCS = {}

ADVERT_FRESHTIME = 2.5
# If two scanners are battling to "win" a device, the winner can not be more than
# this many seconds older than its opponent. Prevents a stale but very close
# advert from overriding a newer advertisement from a less-close scanner.


HIST_KEEP_COUNT = (
10 # How many old timestamps, rssi, etc to keep for each device/scanner pairing.
Expand Down Expand Up @@ -82,6 +77,17 @@
CONF_REF_POWER, DEFAULT_REF_POWER = "ref_power", -55.0
DOCS[CONF_REF_POWER] = "Default RSSI for signal at 1 metre."

CONF_UPDATE_INTERVAL, DEFAULT_UPDATE_INTERVAL = "update_interval", 1.1
DOCS[CONF_UPDATE_INTERVAL] = (
"How often to update bluetooth stats, in seconds. 1.1 is pretty good, I reckon." # fmt: skip
)

CONF_SMOOTHING_SAMPLES, DEFAULT_SMOOTHING_SAMPLES = "smoothing_samples", 20
DOCS[CONF_SMOOTHING_SAMPLES] = (
"How many samples to average distance smoothing. Bigger numbers"
" make for slower distance increases. 10 or 20 seems good."
)

# Defaults
DEFAULT_NAME = DOMAIN

Expand Down
39 changes: 33 additions & 6 deletions custom_components/bermuda/device_tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,13 @@
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import STATE_HOME
from homeassistant.core import HomeAssistant
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback

from . import BermudaDataUpdateCoordinator
from .const import DOMAIN
from .const import SIGNAL_DEVICE_NEW
from .entity import BermudaEntity

_LOGGER = logging.getLogger(__name__)
Expand All @@ -27,12 +30,36 @@ async def async_setup_entry(
"""Load Device Tracker entities for a config entry."""
coordinator: BermudaDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]

# We go through each "device" in the co-ordinator, and create the entities
entities = []
for device in coordinator.devices.values():
if device.create_sensor:
entities.append(BermudaDeviceTracker(coordinator, entry, device.address))
async_add_devices(entities, True)
created_devices = [] # list of devices we've already created entities for

@callback
def device_new(address: str, scanners: [str]) -> None:
"""Create entities for newly-found device
Called from the data co-ordinator when it finds a new device that needs
to have sensors created. Not called directly, but via the dispatch
facility from HA.
Make sure you have a full list of scanners ready before calling this.
"""
if address not in created_devices:
entities = []
entities.append(BermudaDeviceTracker(coordinator, entry, address))
# We set update before add to False because we are being
# call(back(ed)) from the update, so causing it to call another would be... bad.
async_add_devices(entities, False)
created_devices.append(address)
else:
_LOGGER.debug(
"Ignoring create request for existing dev_tracker %s", address
)
# tell the co-ord we've done it.
coordinator.device_tracker_created(address)

# Connect device_new to a signal so the coordinator can call it
entry.async_on_unload(async_dispatcher_connect(hass, SIGNAL_DEVICE_NEW, device_new))

# Now we must tell the co-ord to do initial refresh, so that it will call our callback.
await coordinator.async_config_entry_first_refresh()


class BermudaDeviceTracker(BermudaEntity, BaseTrackerEntity):
Expand Down
Loading

0 comments on commit 15b4b7d

Please sign in to comment.