From 97fae83f2f1fd0bdc06324d5436d8ea5db501db2 Mon Sep 17 00:00:00 2001 From: Luke Date: Wed, 6 Nov 2024 23:42:35 -0500 Subject: [PATCH 01/35] some calc data updates --- .../bermuda/bermuda_device_scanner.py | 131 +++++++++--------- 1 file changed, 66 insertions(+), 65 deletions(-) diff --git a/custom_components/bermuda/bermuda_device_scanner.py b/custom_components/bermuda/bermuda_device_scanner.py index ba0eed9..8b85b0b 100644 --- a/custom_components/bermuda/bermuda_device_scanner.py +++ b/custom_components/bermuda/bermuda_device_scanner.py @@ -11,7 +11,11 @@ from typing import TYPE_CHECKING -from homeassistant.components.bluetooth import MONOTONIC_TIME, BluetoothScannerDevice +from homeassistant.components.bluetooth import ( + MONOTONIC_TIME, + BaseHaRemoteScanner, + BluetoothScannerDevice, +) from .const import ( _LOGGER, @@ -76,7 +80,13 @@ def __init__( self.rssi_distance: float | None = None self.rssi_distance_raw: float | None = None self.adverts: dict[str, bytes] = {} - + self.cached_remote_scanners = set() + self.rssi_offset = self.options.get(CONF_RSSI_OFFSETS, {}).get(self.address, 0) + self.ref_power = self.options.get(CONF_REF_POWER) + self.attenuation = self.options.get(CONF_ATTENUATION) + self.max_velocity = self.options.get(CONF_MAX_VELOCITY) + self.smoothing_samples = self.options.get(CONF_SMOOTHING_SAMPLES) + self.hist_dist_count = 0 # Just pass the rest on to update... self.update_advertisement(device_address, scandata, area_id) @@ -90,18 +100,19 @@ def update_advertisement(self, device_address: str, scandata: BluetoothScannerDe claims to have data. """ # In case the scanner has changed it's details since startup: - self.name: str = scandata.scanner.name + scanner = scandata.scanner + self.name: str = scanner.name self.area_id: str = area_id - new_stamp: float | None = None - + new_stamp: float | None # Only remote scanners log timestamps here (local usb adaptors do not), - if hasattr(scandata.scanner, "_discovered_device_timestamps"): + if device_address in self.cached_remote_scanners or isinstance(scanner, BaseHaRemoteScanner): + self.cached_remote_scanners.add(device_address) # Found a remote scanner which has timestamp history... self.scanner_sends_stamps = True # There's no API for this, so we somewhat sneakily are accessing # what is intended to be a protected dict. # pylint: disable-next=protected-access - stamps = scandata.scanner._discovered_device_timestamps # type: ignore #noqa + stamps = scanner._discovered_device_timestamps # type: ignore #noqa # In this dict all MAC address keys are upper-cased uppermac = device_address.upper() @@ -117,7 +128,7 @@ def update_advertisement(self, device_address: str, scandata: BluetoothScannerDe # of this scanner if it hadn't seen this device. _LOGGER.error( "Scanner %s has no stamp for %s - very odd.", - scandata.scanner.source, + scanner.source, device_address, ) new_stamp = None @@ -145,9 +156,9 @@ def update_advertisement(self, device_address: str, scandata: BluetoothScannerDe self.rssi = scandata.advertisement.rssi self.hist_rssi.insert(0, self.rssi) self.rssi_distance_raw = rssi_to_metres( - self.rssi + self.options.get(CONF_RSSI_OFFSETS, {}).get(self.address, 0), - self.options.get(CONF_REF_POWER), - self.options.get(CONF_ATTENUATION), + self.rssi + self.rssi_offset, + self.ref_power, + self.attenuation, ) self.hist_distance.insert(0, self.rssi_distance_raw) @@ -171,8 +182,8 @@ def update_advertisement(self, device_address: str, scandata: BluetoothScannerDe # Safe to update these values regardless of stamps... - self.adapter: str = scandata.scanner.adapter - self.source: str = scandata.scanner.source + self.adapter: str = scanner.adapter + self.source: str = scanner.source if self.tx_power is not None and scandata.advertisement.tx_power != self.tx_power: # Not really an erorr, we just don't account for this happening - # I want to know if it does. @@ -189,7 +200,7 @@ def update_advertisement(self, device_address: str, scandata: BluetoothScannerDe self.new_stamp = new_stamp - def calculate_data(self): + def new_calculate_data(self): """ Filter and update distance estimates. @@ -234,14 +245,15 @@ def calculate_data(self): new_stamp = self.new_stamp # should have been set by update() self.new_stamp = None # Clear so we know if an update is missed next cycle - if new_stamp is not None and self.rssi_distance is None: + if self.rssi_distance is None and new_stamp is not None: # DEVICE HAS ARRIVED! # We have just newly come into range (or we're starting up) # accept the new reading as-is. self.rssi_distance = self.rssi_distance_raw # And ensure the smoothing history gets a fresh start - self.hist_distance_by_interval.insert(0, self.rssi_distance_raw) - del self.hist_distance_by_interval[1:] + + self.hist_distance_by_interval = [self.rssi_distance_raw] + self.hist_dist_count = 1 elif new_stamp is None and (self.stamp is None or self.stamp < MONOTONIC_TIME() - DISTANCE_TIMEOUT): # DEVICE IS AWAY! @@ -250,6 +262,7 @@ def calculate_data(self): # Clear the smoothing history if len(self.hist_distance_by_interval) > 0: self.hist_distance_by_interval.clear() + self.hist_dist_count = 0 else: # Add the current reading (whether new or old) to @@ -266,36 +279,28 @@ def calculate_data(self): peak_velocity = 0 # walk through the history of distances/stamps, and find # the peak - for i, old_distance in enumerate(self.hist_distance): - if i == 0: - # (skip the first entry since it's what we're comparing with) - continue - - if self.hist_stamp[i] is None: - continue # Skip this iteration if hist_stamp[i] is None - - delta_t = velo_newstamp - self.hist_stamp[i] - delta_d = velo_newdistance - old_distance - if delta_t <= 0: - # Additionally, skip if delta_t is zero or negative - # to avoid division by zero - continue - - velocity = delta_d / delta_t - - # Approach velocities are only interesting vs the previous - # reading, while retreats need to be sensible over time - if i == 1: - # on first round we want approach or retreat velocity - peak_velocity = velocity - if velocity < 0: - # if our new reading is an approach, we are done here - # (not so for == 0 since it might still be an invalid retreat) - break - - if velocity > peak_velocity: - # but on subsequent comparisons we only care if they're faster retreats - peak_velocity = velocity + delta_t = velo_newstamp - self.hist_stamp[1] + delta_d = velo_newdistance - self.hist_distance[1] + if delta_t > 0: + peak_velocity = delta_d / delta_t + # if our initial reading is an approach, we are done here + if peak_velocity >= 0: + for old_distance, hist_stamp in zip(self.hist_distance[2:], self.hist_stamp[2:], strict=False): + if hist_stamp is None: + continue # Skip this iteration if hist_stamp[i] is None + + delta_t = velo_newstamp - hist_stamp + if delta_t <= 0: + # Additionally, skip if delta_t is zero or negative + # to avoid division by zero + continue + delta_d = velo_newdistance - old_distance + + velocity = delta_d / delta_t + + if velocity > peak_velocity: + # but on subsequent comparisons we only care if they're faster retreats + peak_velocity = velocity # we've been through the history and have peak velo retreat, or the most recent # approach velo. velocity = peak_velocity @@ -305,7 +310,7 @@ def calculate_data(self): self.hist_velocity.insert(0, velocity) - if velocity > self.options.get(CONF_MAX_VELOCITY): + if velocity > self.max_velocity: if self.parent_device.upper() in self.options.get(CONF_DEVICES, []): _LOGGER.debug( "This sparrow %s flies too fast (%2fm/s), ignoring", @@ -314,12 +319,16 @@ def calculate_data(self): ) # Discard the bogus reading by duplicating the last. self.hist_distance_by_interval.insert(0, self.hist_distance_by_interval[0]) + self.hist_dist_count += 1 else: # Looks valid enough, add the current reading to the interval log self.hist_distance_by_interval.insert(0, self.rssi_distance_raw) + self.hist_dist_count += 1 # trim the log to length - del self.hist_distance_by_interval[self.options.get(CONF_SMOOTHING_SAMPLES) :] + if self.smoothing_samples < self.hist_dist_count: + del self.hist_distance_by_interval[self.smoothing_samples :] + self.hist_dist_count -= 1 # Calculate a moving-window average, that only includes # historical values if their "closer" (ie more reliable). @@ -331,21 +340,16 @@ def calculate_data(self): # helpful, but probably dependent on use-case. # dist_total: float = 0 - dist_count: int = 0 local_min: float = self.rssi_distance_raw or DISTANCE_INFINITE for distance in self.hist_distance_by_interval: if distance <= local_min: - dist_total += distance local_min = distance - else: - dist_total += local_min - dist_count += 1 + dist_total += local_min - if dist_count > 0: - movavg = dist_total / dist_count + if self.hist_dist_count > 0: + movavg = dist_total / self.hist_dist_count else: movavg = local_min - # The average is only helpful if it's lower than the actual reading. if self.rssi_distance_raw is None or movavg < self.rssi_distance_raw: self.rssi_distance = movavg @@ -353,14 +357,11 @@ def calculate_data(self): self.rssi_distance = self.rssi_distance_raw # Trim our history lists - for histlist in ( - self.hist_distance, - self.hist_interval, - self.hist_rssi, - self.hist_stamp, - self.hist_velocity, - ): - del histlist[HIST_KEEP_COUNT:] + del self.hist_distance[HIST_KEEP_COUNT:] + del self.hist_interval[HIST_KEEP_COUNT:] + del self.hist_rssi[HIST_KEEP_COUNT:] + del self.hist_stamp[HIST_KEEP_COUNT:] + del self.hist_velocity[HIST_KEEP_COUNT:] def to_dict(self): """Convert class to serialisable dict for dump_devices.""" From ee32807d0992b36f1fa9855e83d2ad4ef108637f Mon Sep 17 00:00:00 2001 From: Luke Date: Thu, 7 Nov 2024 17:44:16 -0500 Subject: [PATCH 02/35] Optimize redact_data() --- custom_components/bermuda/bermuda_device.py | 4 ++++ custom_components/bermuda/coordinator.py | 14 ++++++++++++-- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/custom_components/bermuda/bermuda_device.py b/custom_components/bermuda/bermuda_device.py index 6a5ff8f..b0a16ca 100644 --- a/custom_components/bermuda/bermuda_device.py +++ b/custom_components/bermuda/bermuda_device.py @@ -186,3 +186,7 @@ def to_dict(self): val = scanout # noqa out[var] = val return out + + def __repr__(self) -> str: + """Help debug devices and figure out what device it is at a glance.""" + return self.prefname diff --git a/custom_components/bermuda/coordinator.py b/custom_components/bermuda/coordinator.py index aa46bdd..e5c400e 100644 --- a/custom_components/bermuda/coordinator.py +++ b/custom_components/bermuda/coordinator.py @@ -1196,8 +1196,18 @@ def redact_data(self, data): self.redaction_list_update() if isinstance(data, str): # the end of the recursive wormhole, do the actual work: - for find, fix in self.redactions.items(): - data = re.sub(find, fix, data, flags=re.IGNORECASE) + if ":" in data: + if data not in self.redactions: + if data.upper() not in self.redactions: + for find, fix in list(self.redactions.items()): + if find in data: + self.redactions[data] = re.sub(find, fix, data, flags=re.IGNORECASE) + data = self.redactions[data] + break + else: + data = self.redactions[data.upper()] + else: + data = self.redactions[data] # redactions done, now replace any remaining MAC addresses # We are only looking for xx:xx:xx... format. return self._redact_generic_re.sub(self._redact_generic_sub, data) From 8eab1f0f2e51ac9fb4482772f0661557c6fc5019 Mon Sep 17 00:00:00 2001 From: Luke Date: Thu, 7 Nov 2024 17:54:02 -0500 Subject: [PATCH 03/35] Merge in dev --- custom_components/bermuda/bermuda_device_scanner.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/custom_components/bermuda/bermuda_device_scanner.py b/custom_components/bermuda/bermuda_device_scanner.py index 891615b..3eecb0c 100644 --- a/custom_components/bermuda/bermuda_device_scanner.py +++ b/custom_components/bermuda/bermuda_device_scanner.py @@ -463,3 +463,7 @@ def to_dict(self): continue out[var] = val return out + + def __repr__(self) -> str: + """Help debugging by giving it a clear name instead of empty dict.""" + return self.address From ef2c9ab2b150ac58eddd51437e8e47b24f201b34 Mon Sep 17 00:00:00 2001 From: Luke Date: Fri, 8 Nov 2024 08:30:10 -0500 Subject: [PATCH 04/35] Clean up and remove self.hist_dist_count --- .../bermuda/bermuda_device_scanner.py | 20 ++++++------------- 1 file changed, 6 insertions(+), 14 deletions(-) diff --git a/custom_components/bermuda/bermuda_device_scanner.py b/custom_components/bermuda/bermuda_device_scanner.py index 3eecb0c..4a7f178 100644 --- a/custom_components/bermuda/bermuda_device_scanner.py +++ b/custom_components/bermuda/bermuda_device_scanner.py @@ -89,18 +89,12 @@ def __init__( self.hist_distance_by_interval = [] # updated per-interval self.hist_interval = [] # WARNING: This is actually "age of ad when we polled" self.hist_velocity = [] # Effective velocity versus previous stamped reading - self.stale_update_count = 0 # How many times we did an update but no new stamps were found. - self.tx_power: float | None = None - self.rssi_distance: float | None = None - self.rssi_distance_raw: float | None = None - self.adverts: dict[str, bytes] = {} self.cached_remote_scanners = set() self.rssi_offset = self.options.get(CONF_RSSI_OFFSETS, {}).get(self.address, 0) self.ref_power = self.options.get(CONF_REF_POWER) self.attenuation = self.options.get(CONF_ATTENUATION) self.max_velocity = self.options.get(CONF_MAX_VELOCITY) self.smoothing_samples = self.options.get(CONF_SMOOTHING_SAMPLES) - self.hist_dist_count = 0 self.adverts: dict[str, list] = { "manufacturer_data": [], "service_data": [], @@ -331,7 +325,6 @@ def calculate_data(self): # And ensure the smoothing history gets a fresh start self.hist_distance_by_interval = [self.rssi_distance_raw] - self.hist_dist_count = 1 elif new_stamp is None and (self.stamp is None or self.stamp < MONOTONIC_TIME() - DISTANCE_TIMEOUT): # DEVICE IS AWAY! @@ -340,7 +333,6 @@ def calculate_data(self): # Clear the smoothing history if len(self.hist_distance_by_interval) > 0: self.hist_distance_by_interval.clear() - self.hist_dist_count = 0 else: # Add the current reading (whether new or old) to @@ -397,16 +389,16 @@ def calculate_data(self): ) # Discard the bogus reading by duplicating the last. self.hist_distance_by_interval.insert(0, self.hist_distance_by_interval[0]) - self.hist_dist_count += 1 else: # Looks valid enough, add the current reading to the interval log self.hist_distance_by_interval.insert(0, self.rssi_distance_raw) - self.hist_dist_count += 1 + dist_count = len(self.hist_distance_by_interval) # trim the log to length - if self.smoothing_samples < self.hist_dist_count: + if self.smoothing_samples < dist_count: del self.hist_distance_by_interval[self.smoothing_samples :] - self.hist_dist_count -= 1 + # It should only ever need to remove one + dist_count -= 1 # Calculate a moving-window average, that only includes # historical values if their "closer" (ie more reliable). @@ -424,8 +416,8 @@ def calculate_data(self): local_min = distance dist_total += local_min - if self.hist_dist_count > 0: - movavg = dist_total / self.hist_dist_count + if dist_count > 0: + movavg = dist_total / dist_count else: movavg = local_min # The average is only helpful if it's lower than the actual reading. From 4a0fa0949cd175797244698ddf3d09bac67c793c Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 10:32:24 -0500 Subject: [PATCH 05/35] Some MR comments --- custom_components/bermuda/bermuda_device_scanner.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/custom_components/bermuda/bermuda_device_scanner.py b/custom_components/bermuda/bermuda_device_scanner.py index 4a7f178..4b846db 100644 --- a/custom_components/bermuda/bermuda_device_scanner.py +++ b/custom_components/bermuda/bermuda_device_scanner.py @@ -389,16 +389,17 @@ def calculate_data(self): ) # Discard the bogus reading by duplicating the last. self.hist_distance_by_interval.insert(0, self.hist_distance_by_interval[0]) + elif len(self.hist_distance_by_interval) == 0: + self.hist_distance_by_interval = [self.rssi_distance_raw] else: - # Looks valid enough, add the current reading to the interval log - self.hist_distance_by_interval.insert(0, self.rssi_distance_raw) - dist_count = len(self.hist_distance_by_interval) + self.hist_distance_by_interval.insert(0, self.hist_distance_by_interval[0]) + dist_count = len(self.hist_distance_by_interval) # trim the log to length if self.smoothing_samples < dist_count: del self.hist_distance_by_interval[self.smoothing_samples :] - # It should only ever need to remove one - dist_count -= 1 + # Set equal to smoothing samples + dist_count = self.smoothing_samples # Calculate a moving-window average, that only includes # historical values if their "closer" (ie more reliable). From 0dd455228846dc8bf67be6474dc3bfe9187c0c67 Mon Sep 17 00:00:00 2001 From: Luke Lashley Date: Mon, 11 Nov 2024 10:33:31 -0500 Subject: [PATCH 06/35] Apply suggestions from code review Co-authored-by: Ashley Gittins --- custom_components/bermuda/bermuda_device_scanner.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/custom_components/bermuda/bermuda_device_scanner.py b/custom_components/bermuda/bermuda_device_scanner.py index 4b846db..2f7a4f8 100644 --- a/custom_components/bermuda/bermuda_device_scanner.py +++ b/custom_components/bermuda/bermuda_device_scanner.py @@ -402,7 +402,7 @@ def calculate_data(self): dist_count = self.smoothing_samples # Calculate a moving-window average, that only includes - # historical values if their "closer" (ie more reliable). + # historical values if they're "closer" (ie more reliable). # # This might be improved by weighting the values by age, but # already does a fairly reasonable job of hugging the bottom @@ -417,9 +417,9 @@ def calculate_data(self): local_min = distance dist_total += local_min - if dist_count > 0: - movavg = dist_total / dist_count - else: + if dist_total > 0: # Calculate the minimised-windowed-average + movavg = dist_total / len(self.hist_distance_by_interval) + else: # we have only a single measurement. movavg = local_min # The average is only helpful if it's lower than the actual reading. if self.rssi_distance_raw is None or movavg < self.rssi_distance_raw: From e6b413f1362f6a6dc562cfcd08242870c11fe94f Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 11:17:20 -0500 Subject: [PATCH 07/35] Address more comments --- .../bermuda/bermuda_device_scanner.py | 2 +- custom_components/bermuda/coordinator.py | 43 ++++++++++++------- 2 files changed, 29 insertions(+), 16 deletions(-) diff --git a/custom_components/bermuda/bermuda_device_scanner.py b/custom_components/bermuda/bermuda_device_scanner.py index 2f7a4f8..d68b00c 100644 --- a/custom_components/bermuda/bermuda_device_scanner.py +++ b/custom_components/bermuda/bermuda_device_scanner.py @@ -418,7 +418,7 @@ def calculate_data(self): dist_total += local_min if dist_total > 0: # Calculate the minimised-windowed-average - movavg = dist_total / len(self.hist_distance_by_interval) + movavg = dist_total / dist_count else: # we have only a single measurement. movavg = local_min # The average is only helpful if it's lower than the actual reading. diff --git a/custom_components/bermuda/coordinator.py b/custom_components/bermuda/coordinator.py index 96f7f60..ddd729c 100644 --- a/custom_components/bermuda/coordinator.py +++ b/custom_components/bermuda/coordinator.py @@ -46,6 +46,7 @@ format_mac, ) from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.event import async_call_later from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.util import slugify from homeassistant.util.dt import get_age, now @@ -212,6 +213,8 @@ def __init__( self.devices: dict[str, BermudaDevice] = {} # self.updaters: dict[str, BermudaPBDUCoordinator] = {} + # Run it once so it will schedule itself in the future. + hass.loop.call_soon_threadsafe(hass.async_create_task, self.purge_redactions(hass)) self.area_reg = ar.async_get(hass) # Restore the scanners saved in config entry data. We maintain @@ -1239,13 +1242,15 @@ def redaction_list_update(self): i = len(self.redactions) # not entirely accurate but we don't care. # SCANNERS - for address in self.scanner_list: - if address.upper() not in self.redactions: + for non_lower_address in self.scanner_list: + address = non_lower_address.lower() + if address not in self.redactions: i += 1 - self.redactions[address.upper()] = f"{address[:2]}::SCANNER_{i}::{address[-2:]}" + self.redactions[address] = f"{address[:2]}::SCANNER_{i}::{address[-2:]}" # CONFIGURED DEVICES - for address in self.options.get(CONF_DEVICES, []): - if address.upper() not in self.redactions: + for non_lower_address in self.options.get(CONF_DEVICES, []): + address = non_lower_address.lower() + if address not in self.redactions: i += 1 if address.count("_") == 2: self.redactions[address] = f"{address[:4]}::CFG_iBea_{i}::{address[32:]}" @@ -1255,8 +1260,9 @@ def redaction_list_update(self): # Don't know what it is, but not a mac. self.redactions[address] = f"CFG_OTHER_{1}_{address}" # EVERYTHING ELSE - for address, device in self.devices.items(): - if address.upper() not in self.redactions: + for non_lower_address, device in self.devices.items(): + address = non_lower_address.lower() + if address not in self.redactions: # Only add if they are not already there. i += 1 if device.address_type == ADDR_TYPE_PRIVATE_BLE_DEVICE: @@ -1269,6 +1275,15 @@ def redaction_list_update(self): # Don't know what it is. self.redactions[address] = f"OTHER_{1}_{address}" + async def purge_redactions(self, hass: HomeAssistant): + """Empty redactions and free up some memory.""" + self.redactions = {} + async_call_later( + hass, + 1 * 1 * 10, + lambda _: hass.loop.call_soon_threadsafe(hass.async_create_task, self.purge_redactions(hass)), + ) + def redact_data(self, data): """ Wash any collection of data of any MAC addresses. @@ -1281,17 +1296,15 @@ def redact_data(self, data): # Initialise the list of addresses if not already done. self.redaction_list_update() if isinstance(data, str): + data = data.lower() # the end of the recursive wormhole, do the actual work: if ":" in data: if data not in self.redactions: - if data.upper() not in self.redactions: - for find, fix in list(self.redactions.items()): - if find in data: - self.redactions[data] = re.sub(find, fix, data, flags=re.IGNORECASE) - data = self.redactions[data] - break - else: - data = self.redactions[data.upper()] + for find, fix in list(self.redactions.items()): + if find in data: + self.redactions[data] = data.replace(find, fix) + data = self.redactions[data] + break else: data = self.redactions[data] # redactions done, now replace any remaining MAC addresses From 20c0e0fbe5880be5dbb35c4d0c3e647470710c7c Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 11:18:17 -0500 Subject: [PATCH 08/35] change time to 8 hours --- custom_components/bermuda/coordinator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/bermuda/coordinator.py b/custom_components/bermuda/coordinator.py index ddd729c..6e000f9 100644 --- a/custom_components/bermuda/coordinator.py +++ b/custom_components/bermuda/coordinator.py @@ -1280,7 +1280,7 @@ async def purge_redactions(self, hass: HomeAssistant): self.redactions = {} async_call_later( hass, - 1 * 1 * 10, + 8 * 60 * 60, lambda _: hass.loop.call_soon_threadsafe(hass.async_create_task, self.purge_redactions(hass)), ) From da6e06e356525c6a517fcf656ce4ad83b52dad92 Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 11:23:04 -0500 Subject: [PATCH 09/35] Add cancel on shutdown --- custom_components/bermuda/coordinator.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/custom_components/bermuda/coordinator.py b/custom_components/bermuda/coordinator.py index 6e000f9..9aa29e1 100644 --- a/custom_components/bermuda/coordinator.py +++ b/custom_components/bermuda/coordinator.py @@ -22,6 +22,7 @@ from homeassistant.core import ( Event, EventStateChangedData, + HassJob, HomeAssistant, ServiceCall, ServiceResponse, @@ -1281,7 +1282,10 @@ async def purge_redactions(self, hass: HomeAssistant): async_call_later( hass, 8 * 60 * 60, - lambda _: hass.loop.call_soon_threadsafe(hass.async_create_task, self.purge_redactions(hass)), + lambda _: HassJob( + hass.loop.call_soon_threadsafe(hass.async_create_task, self.purge_redactions(hass)), + cancel_on_shutdown=True, + ), ) def redact_data(self, data): From d82f24646a1dfd2a46cdac68b8244862c053661f Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 11:32:12 -0500 Subject: [PATCH 10/35] cancel on unload --- custom_components/bermuda/__init__.py | 1 + custom_components/bermuda/coordinator.py | 27 ++++++++++++++++++++---- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/custom_components/bermuda/__init__.py b/custom_components/bermuda/__init__.py index 3c4df1f..d5b6c0d 100644 --- a/custom_components/bermuda/__init__.py +++ b/custom_components/bermuda/__init__.py @@ -87,6 +87,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: BermudaConfigEntry) -> """Handle removal of an entry.""" if unload_result := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): _LOGGER.debug("Unloaded platforms.") + await entry.runtime_data.coordinator.stop_purging() return unload_result diff --git a/custom_components/bermuda/coordinator.py b/custom_components/bermuda/coordinator.py index 9aa29e1..28b4a38 100644 --- a/custom_components/bermuda/coordinator.py +++ b/custom_components/bermuda/coordinator.py @@ -215,6 +215,7 @@ def __init__( # self.updaters: dict[str, BermudaPBDUCoordinator] = {} # Run it once so it will schedule itself in the future. + self._purge_task = None hass.loop.call_soon_threadsafe(hass.async_create_task, self.purge_redactions(hass)) self.area_reg = ar.async_get(hass) @@ -333,13 +334,19 @@ def handle_devreg_changes(self, ev: Event[EventDeviceRegistryUpdatedData]): _LOGGER.debug("Trigger updating of Scanner Listings") self._do_full_scanner_init = True else: - _LOGGER.error("Received DR update/create but device id does not exist: %s", ev.data["device_id"]) + _LOGGER.error( + "Received DR update/create but device id does not exist: %s", + ev.data["device_id"], + ) elif ev.data["action"] == "remove": device_found = False for scanner in self.scanner_list: if self.devices[scanner].entry_id == ev.data["device_id"]: - _LOGGER.debug("Scanner %s removed, trigger update of scanners.", self.devices[scanner].name) + _LOGGER.debug( + "Scanner %s removed, trigger update of scanners.", + self.devices[scanner].name, + ) self._do_full_scanner_init = True device_found = True if not device_found: @@ -391,7 +398,13 @@ def _check_all_platforms_created(self, address): """Checks if all platforms have finished loading a device's entities.""" dev = self._get_device(address) if dev is not None: - if all([dev.create_sensor_done, dev.create_tracker_done, dev.create_number_done]): + if all( + [ + dev.create_sensor_done, + dev.create_tracker_done, + dev.create_number_done, + ] + ): dev.create_all_done = True def sensor_created(self, address): @@ -1279,7 +1292,7 @@ def redaction_list_update(self): async def purge_redactions(self, hass: HomeAssistant): """Empty redactions and free up some memory.""" self.redactions = {} - async_call_later( + self._purge_task = async_call_later( hass, 8 * 60 * 60, lambda _: HassJob( @@ -1288,6 +1301,12 @@ async def purge_redactions(self, hass: HomeAssistant): ), ) + async def stop_purging(self): + """Stop purging. There might be a better way to do this?.""" + if self._purge_task: + self._purge_task() # This cancels the async_call_later task + self._purge_task = None + def redact_data(self, data): """ Wash any collection of data of any MAC addresses. From c29f1fe3de91bffd8560aa0b3ab4011c75bec4dd Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 11:39:49 -0500 Subject: [PATCH 11/35] Add on unload --- custom_components/bermuda/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/custom_components/bermuda/__init__.py b/custom_components/bermuda/__init__.py index d5b6c0d..b3dc1d5 100644 --- a/custom_components/bermuda/__init__.py +++ b/custom_components/bermuda/__init__.py @@ -50,6 +50,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: BermudaConfigEntry): await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(async_reload_entry)) + entry.async_on_unload(coordinator.stop_purging) + return True From 19b54c99ecf9163c368e19c00c0be94eb762a9e2 Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 11:46:00 -0500 Subject: [PATCH 12/35] Move to init so it only gets run after startup --- custom_components/bermuda/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/custom_components/bermuda/__init__.py b/custom_components/bermuda/__init__.py index b3dc1d5..bd72f73 100644 --- a/custom_components/bermuda/__init__.py +++ b/custom_components/bermuda/__init__.py @@ -50,7 +50,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: BermudaConfigEntry): await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(async_reload_entry)) - entry.async_on_unload(coordinator.stop_purging) + + # Run it once so it will schedule itself in the future. + hass.loop.call_soon_threadsafe(hass.async_create_task, coordinator.purge_redactions(hass)) return True From 15dc1858f006e8e2114d23b6827593456d5a3745 Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 11:50:39 -0500 Subject: [PATCH 13/35] Move to init so it only gets run after startup --- custom_components/bermuda/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/bermuda/__init__.py b/custom_components/bermuda/__init__.py index bd72f73..6ed780c 100644 --- a/custom_components/bermuda/__init__.py +++ b/custom_components/bermuda/__init__.py @@ -52,7 +52,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: BermudaConfigEntry): entry.async_on_unload(entry.add_update_listener(async_reload_entry)) # Run it once so it will schedule itself in the future. - hass.loop.call_soon_threadsafe(hass.async_create_task, coordinator.purge_redactions(hass)) + await coordinator.purge_redactions(hass) return True From 05c85123fcc12a22267be71e21da972a7c7c5f88 Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 13:27:16 -0500 Subject: [PATCH 14/35] Try again --- .github/workflows/tests.yaml | 3 +-- custom_components/bermuda/__init__.py | 7 ++++++- requirements_test.txt | 6 ------ 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 649ad9e..2916eb1 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -29,8 +29,7 @@ jobs: - name: Install Python modules run: | - pip install --constraint=.github/workflows/constraints.txt pre-commit black - # flake8 isort + pip install --constraint=.github/workflows/constraints.txt pre-commit - name: "Install requirements" run: python3 -m pip install -r requirements.txt diff --git a/custom_components/bermuda/__init__.py b/custom_components/bermuda/__init__.py index 6ed780c..04e62d1 100644 --- a/custom_components/bermuda/__init__.py +++ b/custom_components/bermuda/__init__.py @@ -50,9 +50,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: BermudaConfigEntry): await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(async_reload_entry)) - # Run it once so it will schedule itself in the future. await coordinator.purge_redactions(hass) + entry.async_on_unload(coordinator.stop_purging) return True @@ -98,3 +98,8 @@ async def async_unload_entry(hass: HomeAssistant, entry: BermudaConfigEntry) -> async def async_reload_entry(hass: HomeAssistant, entry: BermudaConfigEntry) -> None: """Reload config entry.""" await hass.config_entries.async_reload(entry.entry_id) + + +async def remove_entry(hass: HomeAssistant, entry: BermudaConfigEntry) -> None: + """Remove the entry.""" + await entry.runtime_data.coordinator.stop_purging() diff --git a/requirements_test.txt b/requirements_test.txt index 8a51f0a..51d85bd 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -1,11 +1,5 @@ -r requirements.txt -pytest-asyncio pytest-homeassistant-custom-component #==0.13.50 pre-commit -isort -black==24.10.0 # AJG 2024-04-07: github not completing tests due to this. -pyudev -pyserial-asyncio==0.6 -pyserial==3.5 From 7625a63e09f52cc55af6ce26066e292ca6178fdc Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 13:30:10 -0500 Subject: [PATCH 15/35] add back removed --- requirements.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/requirements.txt b/requirements.txt index ba33afb..938fad1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,3 +2,6 @@ colorlog==6.9.0 homeassistant>=2024.6.0 pip>=24.1.1,<24.4 ruff==0.7.2 +pyudev +pyserial-asyncio==0.6 +pyserial==3.5 From e70e01d8d9bb593597639f28bb889ba485fb9b0e Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 13:32:32 -0500 Subject: [PATCH 16/35] change to async_remove --- custom_components/bermuda/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/bermuda/__init__.py b/custom_components/bermuda/__init__.py index 04e62d1..bc91661 100644 --- a/custom_components/bermuda/__init__.py +++ b/custom_components/bermuda/__init__.py @@ -100,6 +100,6 @@ async def async_reload_entry(hass: HomeAssistant, entry: BermudaConfigEntry) -> await hass.config_entries.async_reload(entry.entry_id) -async def remove_entry(hass: HomeAssistant, entry: BermudaConfigEntry) -> None: +async def async_remove_entry(hass: HomeAssistant, entry: BermudaConfigEntry) -> None: """Remove the entry.""" await entry.runtime_data.coordinator.stop_purging() From 46e515686199d0e564895a65ff67f712702e20de Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 13:43:54 -0500 Subject: [PATCH 17/35] Try try again --- custom_components/bermuda/__init__.py | 8 -------- custom_components/bermuda/coordinator.py | 4 +--- 2 files changed, 1 insertion(+), 11 deletions(-) diff --git a/custom_components/bermuda/__init__.py b/custom_components/bermuda/__init__.py index bc91661..512f536 100644 --- a/custom_components/bermuda/__init__.py +++ b/custom_components/bermuda/__init__.py @@ -50,9 +50,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: BermudaConfigEntry): await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(async_reload_entry)) - # Run it once so it will schedule itself in the future. - await coordinator.purge_redactions(hass) - entry.async_on_unload(coordinator.stop_purging) return True @@ -98,8 +95,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: BermudaConfigEntry) -> async def async_reload_entry(hass: HomeAssistant, entry: BermudaConfigEntry) -> None: """Reload config entry.""" await hass.config_entries.async_reload(entry.entry_id) - - -async def async_remove_entry(hass: HomeAssistant, entry: BermudaConfigEntry) -> None: - """Remove the entry.""" - await entry.runtime_data.coordinator.stop_purging() diff --git a/custom_components/bermuda/coordinator.py b/custom_components/bermuda/coordinator.py index 28b4a38..5aa7e1b 100644 --- a/custom_components/bermuda/coordinator.py +++ b/custom_components/bermuda/coordinator.py @@ -214,9 +214,7 @@ def __init__( self.devices: dict[str, BermudaDevice] = {} # self.updaters: dict[str, BermudaPBDUCoordinator] = {} - # Run it once so it will schedule itself in the future. - self._purge_task = None - hass.loop.call_soon_threadsafe(hass.async_create_task, self.purge_redactions(hass)) + self._purge_task = hass.loop.call_soon_threadsafe(hass.async_create_task, self.purge_redactions(hass)) self.area_reg = ar.async_get(hass) # Restore the scanners saved in config entry data. We maintain From aa568bb054db40acc2faf353f40de1761beb1eab Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 13:58:57 -0500 Subject: [PATCH 18/35] Try try again --- custom_components/bermuda/coordinator.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/custom_components/bermuda/coordinator.py b/custom_components/bermuda/coordinator.py index 5aa7e1b..4580f65 100644 --- a/custom_components/bermuda/coordinator.py +++ b/custom_components/bermuda/coordinator.py @@ -213,7 +213,7 @@ def __init__( self.devices: dict[str, BermudaDevice] = {} # self.updaters: dict[str, BermudaPBDUCoordinator] = {} - + self._has_purged = False self._purge_task = hass.loop.call_soon_threadsafe(hass.async_create_task, self.purge_redactions(hass)) self.area_reg = ar.async_get(hass) @@ -1298,12 +1298,17 @@ async def purge_redactions(self, hass: HomeAssistant): cancel_on_shutdown=True, ), ) + self._has_purged = True async def stop_purging(self): """Stop purging. There might be a better way to do this?.""" if self._purge_task: - self._purge_task() # This cancels the async_call_later task - self._purge_task = None + if self._has_purged: + self._purge_task() # This cancels the async_call_later task + self._purge_task = None + else: + self._purge_task.cancel() + self._purge_task = None def redact_data(self, data): """ From f9340c71889f242da755f30fc8c0170117bea1a2 Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 14:03:41 -0500 Subject: [PATCH 19/35] I think finally fixed --- custom_components/bermuda/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/custom_components/bermuda/__init__.py b/custom_components/bermuda/__init__.py index 512f536..53231de 100644 --- a/custom_components/bermuda/__init__.py +++ b/custom_components/bermuda/__init__.py @@ -45,6 +45,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: BermudaConfigEntry): if not coordinator.last_update_success: _LOGGER.debug("Coordinator last update failed, rasing ConfigEntryNotReady") + await coordinator.stop_purging() raise ConfigEntryNotReady await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) From 2ddaef0ad08161bc04d5293d495763eafef3b092 Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 14:07:03 -0500 Subject: [PATCH 20/35] change error --- tests/conftest.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 7b9aa75..75c9d1a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,6 +7,7 @@ import pytest from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from pytest_homeassistant_custom_component.common import MockConfigEntry @@ -69,7 +70,7 @@ def error_get_data_fixture(): """Simulate error when retrieving data from API.""" with patch( "custom_components.bermuda.BermudaDataUpdateCoordinator.async_refresh", - side_effect=Exception, + side_effect=HomeAssistantError, ): yield From b8acc4a8d094bceff18787cc626bf8bebdab38a2 Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 14:14:40 -0500 Subject: [PATCH 21/35] Change error again --- tests/conftest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 75c9d1a..a5208fa 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,7 +7,7 @@ import pytest from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from pytest_homeassistant_custom_component.common import MockConfigEntry @@ -70,7 +70,7 @@ def error_get_data_fixture(): """Simulate error when retrieving data from API.""" with patch( "custom_components.bermuda.BermudaDataUpdateCoordinator.async_refresh", - side_effect=HomeAssistantError, + side_effect=UpdateFailed, ): yield From 7b746c94aa4c289ff6779e19ec581dda353ddede Mon Sep 17 00:00:00 2001 From: Luke Date: Mon, 11 Nov 2024 14:23:58 -0500 Subject: [PATCH 22/35] Change error again --- custom_components/bermuda/__init__.py | 12 +++++++++--- tests/conftest.py | 2 +- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/custom_components/bermuda/__init__.py b/custom_components/bermuda/__init__.py index 53231de..4a06ba2 100644 --- a/custom_components/bermuda/__init__.py +++ b/custom_components/bermuda/__init__.py @@ -41,13 +41,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: BermudaConfigEntry): coordinator = BermudaDataUpdateCoordinator(hass, entry) entry.runtime_data = BermudaData(coordinator) - await coordinator.async_refresh() - - if not coordinator.last_update_success: + async def on_failure(): _LOGGER.debug("Coordinator last update failed, rasing ConfigEntryNotReady") await coordinator.stop_purging() raise ConfigEntryNotReady + try: + await coordinator.async_refresh() + except Exception as ex: # noqa: BLE001 + _LOGGER.exception(ex) + await on_failure() + if not coordinator.last_update_success: + await on_failure() + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(async_reload_entry)) diff --git a/tests/conftest.py b/tests/conftest.py index a5208fa..9b7e46b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -70,7 +70,7 @@ def error_get_data_fixture(): """Simulate error when retrieving data from API.""" with patch( "custom_components.bermuda.BermudaDataUpdateCoordinator.async_refresh", - side_effect=UpdateFailed, + side_effect=Exception, ): yield From 2c2e13574462c13884bfa2bb497fab6fc616c097 Mon Sep 17 00:00:00 2001 From: Ashley Gittins Date: Wed, 13 Nov 2024 16:16:33 +0000 Subject: [PATCH 23/35] fix: regression detecting local usb adaptor as scanner --- custom_components/bermuda/coordinator.py | 27 ++++++++++++++++++------ 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/custom_components/bermuda/coordinator.py b/custom_components/bermuda/coordinator.py index 034067d..f9595e3 100644 --- a/custom_components/bermuda/coordinator.py +++ b/custom_components/bermuda/coordinator.py @@ -10,6 +10,7 @@ import voluptuous as vol import yaml +from habluetooth import BaseHaRemoteScanner, BaseHaScanner from homeassistant.components import bluetooth from homeassistant.components.bluetooth import ( MONOTONIC_TIME, @@ -152,7 +153,9 @@ def __init__( update_interval=timedelta(seconds=UPDATE_INTERVAL), ) - self._manager: HomeAssistantBluetoothManager = _get_manager(hass) + self._manager: HomeAssistantBluetoothManager = _get_manager(hass) # instance of the bluetooth manager + self._hascanners: set[BaseHaScanner] # Links to the backend scanners + self._hascanner_timestamps: dict[str, dict[str, float]] # scanner_address, device_address, stamp self._entity_registry = er.async_get(self.hass) self._device_registry = dr.async_get(self.hass) @@ -1127,22 +1130,32 @@ def _refresh_scanners(self, scanners: list[BluetoothScannerDevice] | None = None # scanner_b: BermudaDevice entry # # Evil: We're acessing private members of bt manager to do it since there's no API call for it. - _allscanners = self._manager._connectable_scanners | self._manager._non_connectable_scanners # noqa: SLF001 - for scanner_ha in _allscanners: - scanner_address = format_mac(scanner_ha.source).lower() - scanner_devreg = self._device_registry.async_get_device(connections={("mac", scanner_address)}) + self._hascanners = self._manager._connectable_scanners | self._manager._non_connectable_scanners # noqa: SLF001 + for hascanner in self._hascanners: + scanner_address = format_mac(hascanner.source).lower() + scanner_devreg = self._device_registry.async_get_device( + connections={ + ("mac", scanner_address), # Matches ESPHome proxies, Shellys etc + ("bluetooth", scanner_address), # Matches local USB Bluetooth (hci0..) + } + ) if scanner_devreg is None: _LOGGER_SPAM_LESS.error( "scanner_not_in_devreg", "Failed to find scanner %s (%s) in Device Registry", - scanner_ha.name, - scanner_ha.source, + hascanner.name, + hascanner.source, ) continue # _LOGGER.info("Great! Found scanner: %s (%s)", scanner_ha.name, scanner_ha.source) # Since this scanner still exists, we won't purge it if scanner_address in _purge_scanners: _purge_scanners.remove(scanner_address) + + # Populate the local copy of timestamps, if applicable + if isinstance(hascanner, BaseHaRemoteScanner): + self._hascanner_timestamps[hascanner.source.lower()] = hascanner._discovered_device_timestamps # noqa: SLF001 + scanner_b = self._get_device(scanner_address) if scanner_b is None: # It's a new scanner, we will need to update our saved config. From 10d9d3c28e4a13e9bf179ee020890835bd84652c Mon Sep 17 00:00:00 2001 From: Ashley Gittins Date: Thu, 14 Nov 2024 04:48:12 +1100 Subject: [PATCH 24/35] Fix: Global sensor update rate and refpower calibration update (#383) * fix: Slowed global diag entity updates fixes [Rate-limit] Global visible device count [and others] #377 - Added _cached_ratelimit to BermudaGlobalEntity and applied to sensors - Added ref_power_changed stamp and tied between ref_power changes and cache invalidation in entities, so calibration that causes increased distances show up immediately. - allowed specifying custom interval to main _cached_rateliimit as well as global. --- custom_components/bermuda/bermuda_device.py | 4 ++++ custom_components/bermuda/entity.py | 26 +++++++++++++++++++-- custom_components/bermuda/sensor.py | 8 +++---- 3 files changed, 32 insertions(+), 6 deletions(-) diff --git a/custom_components/bermuda/bermuda_device.py b/custom_components/bermuda/bermuda_device.py index ee98e9f..8650955 100644 --- a/custom_components/bermuda/bermuda_device.py +++ b/custom_components/bermuda/bermuda_device.py @@ -55,6 +55,7 @@ def __init__(self, address, options) -> None: self.prefname: str | None = None # "preferred" name - ideally local_name self.address: str = address self.ref_power: float = 0 # If non-zero, use in place of global ref_power. + self.ref_power_changed: float = 0 # Stamp for last change to ref_power, for cache zapping. self.options = options self.unique_id: str | None = None # mac address formatted. self.address_type = BDADDR_TYPE_UNKNOWN @@ -152,6 +153,9 @@ def set_ref_power(self, new_ref_power: float): # gets applied. # if nearest_scanner is not None: self.apply_scanner_selection(nearest_scanner) + # Update the stamp so that the BermudaEntity can clear the cache and show the + # new measurement(s) immediately. + self.ref_power_changed = MONOTONIC_TIME() def apply_scanner_selection(self, closest_scanner: BermudaDeviceScanner | None): """ diff --git a/custom_components/bermuda/entity.py b/custom_components/bermuda/entity.py index cc5e98f..42558b6 100644 --- a/custom_components/bermuda/entity.py +++ b/custom_components/bermuda/entity.py @@ -52,16 +52,22 @@ def __init__( self.bermuda_last_state: Any = 0 self.bermuda_last_stamp: float = 0 - def _cached_ratelimit(self, statevalue: Any, fast_falling=True, fast_rising=False): + def _cached_ratelimit(self, statevalue: Any, fast_falling=True, fast_rising=False, interval=None): """ Uses the CONF_UPDATE_INTERVAL and other logic to return either the given statevalue or an older, cached value. Helps to reduce excess sensor churn without compromising latency. - Only suitable for MEASUREMENTS, as numerical comparison is used. + Mostly suitable for MEASUREMENTS, but should work with strings, too. + If interval is specified the cache will use that (in seconds), otherwise the deafult is + the CONF_UPPDATE_INTERVAL (typically suitable for fast-close slow-far sensors) """ + if interval is not None: + self.bermuda_update_interval = interval + nowstamp = MONOTONIC_TIME() if ( (self.bermuda_last_stamp < nowstamp - self.bermuda_update_interval) # Cache is stale + or (self._device.ref_power_changed > nowstamp + 2) # ref power changed in last 2sec or (self.bermuda_last_state is None) # Nothing compares to you. or (statevalue is None) # or you. or (fast_falling and statevalue < self.bermuda_last_state) # (like Distance) @@ -165,6 +171,9 @@ def __init__( super().__init__(coordinator) self.coordinator = coordinator self.config_entry = config_entry + self._cache_ratelimit_value = None + self._cache_ratelimit_stamp: float = 0 + self._cache_ratelimit_interval = 60 @callback def _handle_coordinator_update(self) -> None: @@ -175,6 +184,19 @@ def _handle_coordinator_update(self) -> None: """ self.async_write_ha_state() + def _cached_ratelimit(self, statevalue: Any, interval:int|None=None): + """A simple way to rate-limit sensor updates.""" + if interval is not None: + self._cache_ratelimit_interval = interval + nowstamp = MONOTONIC_TIME() + + if nowstamp > self._cache_ratelimit_stamp + self._cache_ratelimit_interval: + self._cache_ratelimit_stamp = nowstamp + self._cache_ratelimit_value = statevalue + return statevalue + else: + return self._cache_ratelimit_value + @property def device_info(self): """Implementing this creates an entry in the device registry.""" diff --git a/custom_components/bermuda/sensor.py b/custom_components/bermuda/sensor.py index 56a4120..95095f3 100644 --- a/custom_components/bermuda/sensor.py +++ b/custom_components/bermuda/sensor.py @@ -351,7 +351,7 @@ def unique_id(self): @property def native_value(self) -> int: """Gets the number of proxies we have access to.""" - return len(self.coordinator.scanner_list) + return self._cached_ratelimit(len(self.coordinator.scanner_list)) or 0 @property def name(self): @@ -375,7 +375,7 @@ def unique_id(self): @property def native_value(self) -> int: """Gets the number of proxies we have access to.""" - return self.coordinator.count_active_scanners() + return self._cached_ratelimit(self.coordinator.count_active_scanners()) or 0 @property def name(self): @@ -399,7 +399,7 @@ def unique_id(self): @property def native_value(self) -> int: """Gets the amount of devices we have seen.""" - return len(self.coordinator.devices) + return self._cached_ratelimit(len(self.coordinator.devices)) or 0 @property def name(self): @@ -423,7 +423,7 @@ def unique_id(self): @property def native_value(self) -> int: """Gets the amount of devices that are active.""" - return self.coordinator.count_active_devices() + return self._cached_ratelimit(self.coordinator.count_active_devices()) or 0 @property def name(self): From 915790155fff0269a9bed7ecc74bc0586e4a7225 Mon Sep 17 00:00:00 2001 From: Ashley Gittins Date: Wed, 13 Nov 2024 18:18:43 +0000 Subject: [PATCH 25/35] fix: daft typo --- custom_components/bermuda/coordinator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/bermuda/coordinator.py b/custom_components/bermuda/coordinator.py index f9595e3..8bf3bc7 100644 --- a/custom_components/bermuda/coordinator.py +++ b/custom_components/bermuda/coordinator.py @@ -155,7 +155,7 @@ def __init__( self._manager: HomeAssistantBluetoothManager = _get_manager(hass) # instance of the bluetooth manager self._hascanners: set[BaseHaScanner] # Links to the backend scanners - self._hascanner_timestamps: dict[str, dict[str, float]] # scanner_address, device_address, stamp + self._hascanner_timestamps: dict[str, dict[str, float]] = {} # scanner_address, device_address, stamp self._entity_registry = er.async_get(self.hass) self._device_registry = dr.async_get(self.hass) From aba9257c6bc8f9e180a37afdaa005de53a7aeb84 Mon Sep 17 00:00:00 2001 From: Ashley Gittins Date: Fri, 15 Nov 2024 04:52:34 +1100 Subject: [PATCH 26/35] fix: Local USB Bluetooth detection (#387) fixes #386 * fix: Local USB Bluetooth detection - fixes Failed to find Scanner (local USB Bluetooth) #386 - linting * - linting --- custom_components/bermuda/coordinator.py | 4 ++-- custom_components/bermuda/entity.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/custom_components/bermuda/coordinator.py b/custom_components/bermuda/coordinator.py index 8bf3bc7..e79542e 100644 --- a/custom_components/bermuda/coordinator.py +++ b/custom_components/bermuda/coordinator.py @@ -155,7 +155,7 @@ def __init__( self._manager: HomeAssistantBluetoothManager = _get_manager(hass) # instance of the bluetooth manager self._hascanners: set[BaseHaScanner] # Links to the backend scanners - self._hascanner_timestamps: dict[str, dict[str, float]] = {} # scanner_address, device_address, stamp + self._hascanner_timestamps: dict[str, dict[str, float]] = {} # scanner_address, device_address, stamp self._entity_registry = er.async_get(self.hass) self._device_registry = dr.async_get(self.hass) @@ -1136,7 +1136,7 @@ def _refresh_scanners(self, scanners: list[BluetoothScannerDevice] | None = None scanner_devreg = self._device_registry.async_get_device( connections={ ("mac", scanner_address), # Matches ESPHome proxies, Shellys etc - ("bluetooth", scanner_address), # Matches local USB Bluetooth (hci0..) + ("bluetooth", scanner_address.upper()), # Matches local USB Bluetooth (hci0..) } ) if scanner_devreg is None: diff --git a/custom_components/bermuda/entity.py b/custom_components/bermuda/entity.py index 42558b6..80b2918 100644 --- a/custom_components/bermuda/entity.py +++ b/custom_components/bermuda/entity.py @@ -184,7 +184,7 @@ def _handle_coordinator_update(self) -> None: """ self.async_write_ha_state() - def _cached_ratelimit(self, statevalue: Any, interval:int|None=None): + def _cached_ratelimit(self, statevalue: Any, interval: int | None = None): """A simple way to rate-limit sensor updates.""" if interval is not None: self._cache_ratelimit_interval = interval From dc6637f837c8d42669a937e3ae77782ebb5c00d0 Mon Sep 17 00:00:00 2001 From: J-D <16978110+createthisnl@users.noreply.github.com> Date: Sun, 1 Dec 2024 09:29:27 +0100 Subject: [PATCH 27/35] Add Dutch translation and reduce hardcoded language in scripts (#398) * Add Dutch translation * Update Greek translation * Update English (base) translation * Update config_flow.py to work better with translations --- custom_components/bermuda/config_flow.py | 9 +- .../bermuda/translations/el.json | 11 +- .../bermuda/translations/en.json | 11 +- .../bermuda/translations/nl.json | 122 ++++++++++++++++++ 4 files changed, 146 insertions(+), 7 deletions(-) create mode 100644 custom_components/bermuda/translations/nl.json diff --git a/custom_components/bermuda/config_flow.py b/custom_components/bermuda/config_flow.py index e76d90e..62a9c44 100644 --- a/custom_components/bermuda/config_flow.py +++ b/custom_components/bermuda/config_flow.py @@ -142,8 +142,11 @@ async def async_step_init(self, user_input=None): # pylint: disable=unused-argu active_devices = self.coordinator.count_active_devices() active_scanners = self.coordinator.count_active_scanners() - messages["device_count"] = f"{active_devices} active out of {len(self.devices)}" - messages["scanner_count"] = f"{active_scanners} active out of {len(self.coordinator.scanner_list)}" + messages["device_counter_active"] = f"{active_devices}" + messages["device_counter_devices"] = f"{len(self.devices)}" + messages["scanner_counter_active"] = f"{active_scanners}" + messages["scanner_counter_scanners"] = f"{len(self.coordinator.scanner_list)}" + if len(self.coordinator.scanner_list) == 0: messages["status"] = ( "You need to configure some bluetooth scanners before Bermuda will have anything to work with. " @@ -159,7 +162,7 @@ async def async_step_init(self, user_input=None): # pylint: disable=unused-argu messages["status"] = "You have at least some active devices, this is good." # Build a markdown table of scanners so the user can see what's up. - scanner_table = "\nStatus of scanners:\n\n|Scanner|Address|Last advertisement|\n|---|---|---:|\n" + scanner_table = "\n\nStatus of scanners:\n\n|Scanner|Address|Last advertisement|\n|---|---|---:|\n" # Use emoji to indicate if age is "good" for scanner in self.coordinator.get_active_scanner_summary(): age = int(scanner.get("last_stamp_age", 999)) diff --git a/custom_components/bermuda/translations/el.json b/custom_components/bermuda/translations/el.json index 316f349..fb4ed6b 100644 --- a/custom_components/bermuda/translations/el.json +++ b/custom_components/bermuda/translations/el.json @@ -3,7 +3,7 @@ "step": { "user": { "title": "Bermuda BLE Trilateration", - "description": "Αν χρειάζεστε βοήθεια για τη διαμόρφωση, ρίξτε μια ματιά εδώ: https://github.com/agittins/bermuda", + "description": "Αν χρειάζεστε βοήθεια για τη διαμόρφωση, ρίξτε μια ματιά στη σελίδα [GitHub](https://github.com/agittins/bermuda)", "data": { "username": "Όνομα Χρήστη", "password": "Κωδικός Πρόσβασης" @@ -18,11 +18,18 @@ } }, "options": { + "error": { + "some_active": "You have at least some active devices, this is good.", + "no_scanners": "You need to configure some bluetooth scanners before Bermuda will have anything to work with. \nAny one of esphome bluetooth_proxy, Shelly bluetooth proxy or local bluetooth adaptor should get you started.", + "no_devices": "No bluetooth devices are actively being reported from your scanners. \nYou will need to solve this before Bermuda can be of much help." + }, "step": { "init": { - "description": "Το Bermuda βλέπει επί του παρόντος:\n- {device_count} συσκευές Bluetooth.\n- {scanner_count} συσκευές ανιχνευτή Bluetooth.\n\n{status}" + "title": "Bermuda configuration", + "description": "Το Bermuda βλέπει επί του παρόντος:\n- {device_counter_active} ενεργή στις {device_counter_devices} συσκευές Bluetooth.\n- {scanner_counter_active} ενεργή στις {scanner_counter_scanners} συσκευές ανιχνευτή Bluetooth.\n\n{status}" }, "globalopts": { + "title": "Global settings", "data": { "max_area_radius": "Μέγιστη ακτίνα σε μέτρα για απλή ανίχνευση ΠΕΡΙΟΧΗΣ", "max_velocity": "Μέγιστη Ταχύτητα σε μέτρα ανά δευτερόλεπτο - αγνοεί μετρήσεις που υποδεικνύουν απομάκρυνση πιο γρήγορη από αυτό το όριο. 3m/s (10km/h) είναι καλό.", diff --git a/custom_components/bermuda/translations/en.json b/custom_components/bermuda/translations/en.json index d406c69..2c75244 100644 --- a/custom_components/bermuda/translations/en.json +++ b/custom_components/bermuda/translations/en.json @@ -3,7 +3,7 @@ "step": { "user": { "title": "Bermuda BLE Trilateration", - "description": "If you need help with the configuration have a look here: https://github.com/agittins/bermuda", + "description": "If you need help with the configuration have a look at our [GitHub page](https://github.com/agittins/bermuda)", "data": { "username": "Username", "password": "Password" @@ -18,11 +18,18 @@ } }, "options": { + "error": { + "some_active": "You have at least some active devices, this is good.", + "no_scanners": "You need to configure some bluetooth scanners before Bermuda will have anything to work with. \nAny one of esphome bluetooth_proxy, Shelly bluetooth proxy or local bluetooth adaptor should get you started.", + "no_devices": "No bluetooth devices are actively being reported from your scanners. \nYou will need to solve this before Bermuda can be of much help." + }, "step": { "init": { - "description": "Bermuda can currently see:\n- {device_count} bluetooth devices.\n- {scanner_count} bluetooth scanner devices.\n\n{status}" + "title": "Configure Bermuda", + "description": "Bermuda can currently see:\n- {device_counter_active} active out of {device_counter_devices} bluetooth devices.\n- {scanner_counter_active} active out of {scanner_counter_scanners} bluetooth scanner devices.\n\n{status}" }, "globalopts": { + "title": "Global settings", "data": { "max_area_radius": "Max radius in metres for simple AREA detection", "max_velocity": "Max Velocity in metres per second - ignore readings that imply movement away faster than this limit. 3m/s (10km/h) is good.", diff --git a/custom_components/bermuda/translations/nl.json b/custom_components/bermuda/translations/nl.json new file mode 100644 index 0000000..fe870e6 --- /dev/null +++ b/custom_components/bermuda/translations/nl.json @@ -0,0 +1,122 @@ +{ + "title": "Bermuda BLE Trilateratie", + "config": { + "step": { + "user": { + "title": "Bermuda BLE Trilateration", + "description": "Als je hulp nodig hebt met de configuratie, kijk dan op onze [GitHub pagina](https://github.com/agittins/bermuda)", + "data": { + "username": "Gebruikersnaam", + "password": "Wachtwoord" + } + } + }, + "error": { + "auth": "Gebruikersnaam/wachtwoord is verkeerd." + }, + "abort": { + "single_instance_allowed": "Al geconfigureerd. Slechts één configuratie mogelijk." + } + }, + "options": { + "error": { + "some_active": "Er zijn ten minste enkele actieve apparaten, dat is goed.", + "no_scanners": "Je moet enkele Bluetooth-scanners configureren voordat Bermuda iets heeft om mee te werken. \nMet ESPhome bluetooth_proxy, Shelly bluetooth proxy of een lokale bluetooth adapter kun je aan de slag.", + "no_devices": "Er worden geen bluetooth-apparaten actief gemeld door uw scanners. \nDit moet eerst opgelost worden voordat Bermuda ergens mee kan helpen." + }, + "step": { + "init": { + "title": "Bermuda configureren", + "description": "Bermuda kan op dit moment het volgende zien:\n- {device_counter_active}, van de {device_counter_devices}, actieve Bluetooth-apparaten.\n- {scanner_counter_active}, van de {scanner_counter_scanners}, actieve Bluetooth-scanners.\n\n{status}" + }, + "globalopts": { + "title": "Globale instellingen", + "data": { + "max_area_radius": "Maximale radius in meters voor eenvoudige ruimte detectie", + "max_velocity": "Maximale snelheid in meter per seconde - negeer metingen met snellere afstandstoename dan deze limiet.", + "devtracker_nothome_timeout": "Devtracker Timeout - Tijd in seconden om een apparaat als \"Afwezig\" te beschouwen.", + "update_interval": "Update Interval - Hoe vaak (in seconden) de sensormetingen worden bijgewerkt.", + "smoothing_samples": "Smoothing Samples - Hoeveel samples er gebruikt worden voor het afvlakken van afstandsmetingen.", + "attenuation": "Attenuation - Omgevingsdempingsfactor voor afstandsberekening/kalibratie.", + "ref_power": "Referentievermogen - Standaard RSSI-waarde op 1 meter afstand, voor afstandskalibratie.", + "configured_devices": "Geconfigureerde apparaten - Selecteer welke Bluetooth-apparaten of beacons moeten worden gevolgd met sensoren." + }, + "data_description": { + "max_area_radius": "Met de eenvoudige `RUIMTE` functie wordt een apparaat gemarkeerd alszijnde in de ruimte van de dichtsbijzijnde scanner, als het zich binnen deze straal bevindt. \nAls deze waarde te te klein wordt ingesteld zullen apparaten als `Onbekend` gemarkeerd worden bij verplaatsing tussen scanners. \nAls deze waarde te groot wordt ingesteld zullen apparaten altijd verschijnen als in hun dichtsbijzijnde ruimte.", + "max_velocity": "Als uit een meting blijkt dat een apparaat sneller weg beweegt dan dit, wordt die meting genegeerd. 3m/s (10km/h) is goed.\nMensen lopen normaal gesproken met een snelheid van 1,4 m/s. Maar als ze een schaar vasthouden, bewegen ze met een snelheid van 3 m/s.", + "devtracker_nothome_timeout": "Hoe snel device_tracker-entiteiten als `not_home` worden gemarkeerd nadat er geen advertenties meer worden gezien. 30 tot 300 seconden is waarschijnlijk goed.", + "update_interval": "Afstandsafnames worden nog steeds onmiddellijk worden geactiveerd, maar afstandstoenames worden hiermee beperkt om de groei van de database te verminderen.", + "smoothing_samples": "Hoeveel samples worden gebruikt om de gemiddelde afvlakking te berekenen. Een groter getal zorgt voor langzamere afstandstoename. Afstandsafnames worden hierdoor niet beïnvloed. 10 of 20 lijkt goed.", + "attenuation": "Na het instellen van het referentievermogen op 1 meter afstand, kan de dempingsfactor aangepast worden zodat andere afstanden, min of meer, correct gelezen worden.", + "ref_power": "Plaats het meest gebruikelijke Bluetooth-apparaat of beacon op 1 meter (3,28') afstand van de meest gebruikelijke scanner/proxy. Pas het referentievermogen aan totdat de afstandssensor een laagste (niet gemiddelde) afstand van 1 meter aangeeft." + } + }, + "selectdevices": { + "title": "Selecteer apparaten", + "description": "Kies welke apparaten moeten worden gevolgd. Als er geen apparaten hieronder verschijnen, ziet Bermuda geen gegevens van Bluetooth-scanners. Zorg ervoor dat je een esphome ble_proxy-apparaat, Shelly-apparaten met Bluetooth-proxy geconfigureerd of een lokale Bluetooth-adapter hebt.", + "data": { + "configured_devices": "Apparaat" + } + }, + "calibration1_global": { + "title": "Kalibratie 1: Globaal", + "description": "Deze stap is bedoeld om een aantal globale standaardwaarden voor afstandsberekeningen in te stellen.\n\n{details}\n{summary}\nKlik om uit te vouwen voor instructies!{summary_end}\n\nIn latere stappen kunt je per apparaat overschrijvingen instellen, dus is het logisch om nu de meest voorkomende hardware te kiezen als 'referentiepaar' voor deze stap. Als de meeste van uw scanners bijvoorbeeld ESPHome zijn op een bepaald type bord, kies er dan een om te gebruiken als referentiescanner. Als je een handvol van een bepaald bakenmodel heeft, gebruik dan één daarvan als referentieapparaat.\n\n- Kies hieronder een apparaat en een scanner die je als 'referentiepaar' wilt gebruiken\n- Plaats uw gekozen apparaat fysiek op 1 meter van de gekozen scanner. Zorg ervoor dat ze een duidelijk zicht op elkaar hebben en vermijd dat ze zich in de buurt van organische levensvormen bevinden die het signaal kunnen verstoren.\n- Klik op 'Verzenden' en bekijk de RSSI-waarden in de tabel die hieronder verschijnt. Klik opnieuw op 'Verzenden' om de waarden te vernieuwen.\n- Zodra er een stabiele signaalsterkte is, vult je die waarde in het veld `Referentievermogen` in en klik daarna op 'Verzenden'.\n- De waarden worden bijgewerkt en de geschatte afstanden zouden dicht bij 1 meter moeten liggen. Herhaal dit zo nodig totdat je tevreden bent met het resultaat.\n- Verplaats het apparaat nu verder van de scanner en meet deze afstand met een meetlint. Ongeveer 5 meter is bijvoorbeeld een goede afstand, de exacte afstand doet er niet toe maar het is belangrijk om de vije zichtlijn te behouden, en je zult merken dat langere afstanden over het algemeen meer nauwkeurigheid geven.\n- Met het apparaat op de nieuwe afstand, klik nogmaals op 'Verzenden'. De meest recente metingen laten de nieuwe afstand zien, maar zullen waarschijnlijk onnauwkeurig zijn.\n- Experimenteer met verschillende waarden voor 'Demping' en klik op 'Verzenden', totdat er geschatte metingen zijn die overeenkomen met de fysieke afstand.\n- Eenmaal tevreden met de kalibratie, Klik dan op 'Opslaan en sluiten' en klik daarna op 'Verzenden'.\n{details_end}\n{suffix}", + "suffix": "Nadat je op Verzenden hebt geklikt, worden hier de nieuwe afstanden weergegeven.", + "data": { + "configured_devices": "Apparaat", + "configured_scanners": "Scanner", + "save_and_close": "Opslaan en sluiten", + "attenuation": "Demping", + "ref_power": "Referentievermogen" + }, + "data_description": { + "save_and_close": "Wanneer je tevreden bent met de kalibratie, vink dit vakje aan en klik op Verzenden. Uw wijzigingen worden opgeslagen en je kunt doorgaan naar de volgende kalibratiestap. Laat dit vakje uitgevinkt terwijl de instellingen nog worden aanpast en getest.", + "attenuation": "Na het aanpassen van de bovenstaande instellingen voor afstandsmetingen van 1 meter, plaats het apparaat verder weg (bijvoorbeeld 5 meter) en past de demping opnieuw aan totdat de berekende afstanden overeenkomen met de fysieke afstand tussen de scanner en het apparaat. Klik op Verzenden om de nieuwe afstandsschattingen te bekijken.", + "ref_power": "Om deze instelling te kalibreren, plaats het apparaat op 1 meter van de scanner en pas de waarde aan totdat de bovenstaande getallen een afstand van 1 meter weergeven. \n\nLet op: De waarden worden pas opnieuw berekend nadat je op Verzenden hebt geklikt. En en negatieve waarde resulteert in een lagere afstand." + } + }, + "calibration2_scanners": { + "title": "Kalibratie 2: Per-Scanner RSSI compensatie", + "description": "This step is optional but useful if your scanners have different sensitivities or varying antenna performance. Adjust the offset RSSI for each scanner until the calculated distance to the selected device is correct. Leave the scanner you used in your \"reference pair\" in step 1 at Zero.\n\n{suffix}", + "data": { + "configured_devices": "Apparaat", + "save_and_close": "Opslaan en sluiten", + "scanner_info": "Per-Scanner RSSI compensatie" + }, + "data_description": { + "scanner_info": "Laat op 0 staan om de globale waarde te gebruiken, of voer een ander getal in om de RSSI die door die scanner wordt gerapporteerd te compenseren. Pas aan totdat de geschatte afstand hierboven overeenkomt met de werkelijke afstand tussen die scanner en het geselecteerde zendapparaat. Negatieve waarden vergroten de afstand, positieve waarden verkleinen deze." + } + } + } + }, + "entity": { + "sensor": { + "distance": { + "name": "Afstand" + }, + "area": { + "name": "Ruimte" + } + } + }, + "services": { + "dump_devices": { + "name": "Dump Devices", + "description": "Haal de interne gegevensstructuur op, optioneel beperkt tot de opgegeven adressen. Bevat de RSSI en andere informatie van elke scanner", + "fields": { + "addresses": { + "name": "Addresses", + "description": "Een optionele, door spaties gescheiden, lijst met MAC-adressen om informatie over op te halen. Als leeg, worden alle adressen opgehaald." + }, + "configured_devices": { + "name": "Geconfigureerde apparaten", + "description": "Selecteer deze optie om alleen scanners en geconfigureerde apparaten in de uitvoer op te nemen." + }, + "redact": { + "name": "Redact", + "description": "Stel deze waarde in op `TRUE` om ervoor te zorgen dat MAC-adressen in de uitvoer worden onleesbaar gemaakt vanwege de privacy." + } + } + } + } +} From 9ad61b14e0be990c0d15b472d449ab19bf379811 Mon Sep 17 00:00:00 2001 From: Luke Date: Wed, 6 Nov 2024 23:42:35 -0500 Subject: [PATCH 28/35] some calc data updates --- .../bermuda/bermuda_device_scanner.py | 57 +++++++++++-------- 1 file changed, 34 insertions(+), 23 deletions(-) diff --git a/custom_components/bermuda/bermuda_device_scanner.py b/custom_components/bermuda/bermuda_device_scanner.py index dcd7324..8ec7870 100644 --- a/custom_components/bermuda/bermuda_device_scanner.py +++ b/custom_components/bermuda/bermuda_device_scanner.py @@ -65,6 +65,7 @@ def __init__( # which is a bit silly, I suspect. self.name: str = scandata.scanner.name self.scanner_device_name = scanner_device.name + self.scanner_device = scanner_device # links to the source device self.adapter: str = scandata.scanner.adapter self.address = scanner_device.address self.source: str = scandata.scanner.source @@ -72,7 +73,6 @@ def __init__( self.area_name: str | None = scanner_device.area_name self.parent_device = parent_device self.parent_device_address = parent_device.address - self.scanner_device = scanner_device # links to the source device self.options = options self.stamp: float | None = 0 self.scanner_sends_stamps: bool = False @@ -102,6 +102,13 @@ def __init__( "platform_data": [], } + self.cached_remote_scanners = set() + self.conf_rssi_offset = self.options.get(CONF_RSSI_OFFSETS, {}).get(self.address, 0) + self.conf_ref_power = self.options.get(CONF_REF_POWER) + self.conf_attenuation = self.options.get(CONF_ATTENUATION) + self.conf_max_velocity = self.options.get(CONF_MAX_VELOCITY) + self.conf_smoothing_samples = self.options.get(CONF_SMOOTHING_SAMPLES) + # Just pass the rest on to update... self.update_advertisement(scandata) @@ -115,14 +122,19 @@ def update_advertisement(self, scandata: BluetoothScannerDevice): claims to have data. """ # In case the scanner has changed it's details since startup: + # FIXME: This should probably be a separate function that the refresh_scanners + # calls if necessary, rather than re-doing it every cycle. scanner = scandata.scanner - self.name: str = scanner.name - self.area_id: str = self.scanner_device.area_id + self.name = scanner.name + self.area_id = self.scanner_device.area_id self.area_name = self.scanner_device.area_name - new_stamp: float | None + new_stamp: float | None = None + + scanner_address = scandata.scanner.source + # Only remote scanners log timestamps here (local usb adaptors do not), - if scanner.source in self.cached_remote_scanners or isinstance(scanner, BaseHaRemoteScanner): - self.cached_remote_scanners.add(scanner.source) + if scanner_address in self.cached_remote_scanners or isinstance(scanner, BaseHaRemoteScanner): + self.cached_remote_scanners.add(scanner_address) # Found a remote scanner which has timestamp history... self.scanner_sends_stamps = True # There's no API for this, so we somewhat sneakily are accessing @@ -236,15 +248,16 @@ def _update_raw_distance(self, reading_is_new=True) -> float: setting change (such as altering a device's ref_power setting). """ # Check if we should use a device-based ref_power - if self.ref_power == 0: - ref_power = self.options.get(CONF_REF_POWER) + if self.ref_power == 0: # No user-supplied per-device value + # use global default + ref_power = self.conf_ref_power else: ref_power = self.ref_power distance = rssi_to_metres( - self.rssi + self.options.get(CONF_RSSI_OFFSETS, {}).get(self.address, 0), + self.rssi + self.conf_rssi_offset, ref_power, - self.options.get(CONF_ATTENUATION), + self.conf_attenuation ) self.rssi_distance_raw = distance if reading_is_new: @@ -371,11 +384,11 @@ def calculate_data(self): peak_velocity = delta_d / delta_t # if our initial reading is an approach, we are done here if peak_velocity >= 0: - for old_distance, hist_stamp in zip(self.hist_distance[2:], self.hist_stamp[2:], strict=False): - if hist_stamp is None: + for old_distance, old_stamp in zip(self.hist_distance[2:], self.hist_stamp[2:], strict=False): + if old_stamp is None: continue # Skip this iteration if hist_stamp[i] is None - delta_t = velo_newstamp - hist_stamp + delta_t = velo_newstamp - old_stamp if delta_t <= 0: # Additionally, skip if delta_t is zero or negative # to avoid division by zero @@ -384,7 +397,7 @@ def calculate_data(self): velocity = delta_d / delta_t - if velocity > peak_velocity: + if velocity > peak_velocity: # noqa: PLR1730 - max() is slower. # but on subsequent comparisons we only care if they're faster retreats peak_velocity = velocity # we've been through the history and have peak velo retreat, or the most recent @@ -396,7 +409,7 @@ def calculate_data(self): self.hist_velocity.insert(0, velocity) - if velocity > self.max_velocity: + if velocity > self.conf_max_velocity: if self.parent_device_address.upper() in self.options.get(CONF_DEVICES, []): _LOGGER.debug( "This sparrow %s flies too fast (%2fm/s), ignoring", @@ -413,10 +426,8 @@ def calculate_data(self): dist_count = len(self.hist_distance_by_interval) # trim the log to length - if self.smoothing_samples < dist_count: - del self.hist_distance_by_interval[self.smoothing_samples :] - # Set equal to smoothing samples - dist_count = self.smoothing_samples + if len(self.hist_distance_by_interval) > self.conf_smoothing_samples: + del self.hist_distance_by_interval[self.conf_smoothing_samples :] # Calculate a moving-window average, that only includes # historical values if they're "closer" (ie more reliable). @@ -430,13 +441,13 @@ def calculate_data(self): dist_total: float = 0 local_min: float = self.rssi_distance_raw or DISTANCE_INFINITE for distance in self.hist_distance_by_interval: - if distance <= local_min: + if distance is not None and distance <= local_min: local_min = distance dist_total += local_min - if dist_total > 0: # Calculate the minimised-windowed-average - movavg = dist_total / dist_count - else: # we have only a single measurement. + if (_hist_dist_len := len(self.hist_distance_by_interval)) > 0: + movavg = dist_total / _hist_dist_len + else: movavg = local_min # The average is only helpful if it's lower than the actual reading. if self.rssi_distance_raw is None or movavg < self.rssi_distance_raw: From 26296a2346f8033b62603399070ce3359cd2a70c Mon Sep 17 00:00:00 2001 From: Luke Date: Fri, 8 Nov 2024 08:30:10 -0500 Subject: [PATCH 29/35] Clean up and remove self.hist_dist_count --- .../bermuda/bermuda_device_scanner.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/custom_components/bermuda/bermuda_device_scanner.py b/custom_components/bermuda/bermuda_device_scanner.py index 8ec7870..56635da 100644 --- a/custom_components/bermuda/bermuda_device_scanner.py +++ b/custom_components/bermuda/bermuda_device_scanner.py @@ -89,12 +89,13 @@ def __init__( self.hist_distance_by_interval = [] # updated per-interval self.hist_interval = [] # WARNING: This is actually "age of ad when we polled" self.hist_velocity = [] # Effective velocity versus previous stamped reading + self.tx_power: float | None = None self.cached_remote_scanners = set() - self.rssi_offset = self.options.get(CONF_RSSI_OFFSETS, {}).get(self.address, 0) - self.ref_power = self.options.get(CONF_REF_POWER) - self.attenuation = self.options.get(CONF_ATTENUATION) - self.max_velocity = self.options.get(CONF_MAX_VELOCITY) - self.smoothing_samples = self.options.get(CONF_SMOOTHING_SAMPLES) + self.conf_rssi_offset = self.options.get(CONF_RSSI_OFFSETS, {}).get(self.address, 0) + self.conf_ref_power = self.options.get(CONF_REF_POWER) + self.conf_attenuation = self.options.get(CONF_ATTENUATION) + self.conf_max_velocity = self.options.get(CONF_MAX_VELOCITY) + self.conf_smoothing_samples = self.options.get(CONF_SMOOTHING_SAMPLES) self.adverts: dict[str, list] = { "manufacturer_data": [], "service_data": [], @@ -102,13 +103,6 @@ def __init__( "platform_data": [], } - self.cached_remote_scanners = set() - self.conf_rssi_offset = self.options.get(CONF_RSSI_OFFSETS, {}).get(self.address, 0) - self.conf_ref_power = self.options.get(CONF_REF_POWER) - self.conf_attenuation = self.options.get(CONF_ATTENUATION) - self.conf_max_velocity = self.options.get(CONF_MAX_VELOCITY) - self.conf_smoothing_samples = self.options.get(CONF_SMOOTHING_SAMPLES) - # Just pass the rest on to update... self.update_advertisement(scandata) From 7f7c652b1109e692900000d44077454e5c2bf38b Mon Sep 17 00:00:00 2001 From: Ashley Gittins Date: Sun, 1 Dec 2024 15:39:36 +0000 Subject: [PATCH 30/35] feat: Redaction improvements - fix: IRK and iBeacon redactions (no ":") - Catch raw iBeacon uuid in advert hex strings - Ensure redaction list is refreshed before processing data (first_run on recursive func) --- custom_components/bermuda/coordinator.py | 37 ++++++++++++++---------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/custom_components/bermuda/coordinator.py b/custom_components/bermuda/coordinator.py index e79542e..904dbf3 100644 --- a/custom_components/bermuda/coordinator.py +++ b/custom_components/bermuda/coordinator.py @@ -1288,7 +1288,6 @@ async def service_dump_devices(self, call: ServiceCall) -> ServiceResponse: # p out[address] = device.to_dict() if redact: - self.redaction_list_update() out = cast(ServiceResponse, self.redact_data(out)) return out @@ -1314,6 +1313,8 @@ def redaction_list_update(self): i += 1 if address.count("_") == 2: self.redactions[address] = f"{address[:4]}::CFG_iBea_{i}::{address[32:]}" + # Raw uuid in advert + self.redactions[address.split("_")[0]] = f"{address[:4]}::CFG_iBea_{i}_{address[32:]}::" elif len(address) == 17: self.redactions[address] = f"{address[:2]}::CFG_MAC_{i}::{address[-2:]}" else: @@ -1326,9 +1327,11 @@ def redaction_list_update(self): # Only add if they are not already there. i += 1 if device.address_type == ADDR_TYPE_PRIVATE_BLE_DEVICE: - self.redactions[address] = f"{address[:2]}::IRK_DEV_{i}" + self.redactions[address] = f"{address[:4]}::IRK_DEV_{i}" elif address.count("_") == 2: self.redactions[address] = f"{address[:4]}::OTHER_iBea_{i}::{address[32:]}" + # Raw uuid in advert + self.redactions[address.split("_")[0]] = f"{address[:4]}::OTHER_iBea_{i}_{address[32:]}::" elif len(address) == 17: # a MAC self.redactions[address] = f"{address[:2]}::OTHER_MAC_{i}::{address[-2:]}" else: @@ -1358,7 +1361,7 @@ async def stop_purging(self): self._purge_task.cancel() self._purge_task = None - def redact_data(self, data): + def redact_data(self, data, first_run=True): """ Wash any collection of data of any MAC addresses. @@ -1366,27 +1369,29 @@ def redact_data(self, data): washes any remaining mac-like addresses. This routine is recursive, so if you're changing it bear that in mind! """ - if len(self.redactions) == 0: - # Initialise the list of addresses if not already done. + if first_run: + # On first/outer call, refresh the redaction list to ensure + # we don't let any new addresses slip through. Might be expensive + # on first call, but will be much cheaper for subsequent calls. self.redaction_list_update() + first_run = False if isinstance(data, str): data = data.lower() # the end of the recursive wormhole, do the actual work: - if ":" in data: - if data not in self.redactions: - for find, fix in list(self.redactions.items()): - if find in data: - self.redactions[data] = data.replace(find, fix) - data = self.redactions[data] - break - else: - data = self.redactions[data] + if data not in self.redactions: + for find, fix in list(self.redactions.items()): + if find in data: + self.redactions[data] = data.replace(find, fix) + data = self.redactions[data] + break + else: + data = self.redactions[data] # redactions done, now replace any remaining MAC addresses # We are only looking for xx:xx:xx... format. return self._redact_generic_re.sub(self._redact_generic_sub, data) elif isinstance(data, dict): - return {self.redact_data(k): self.redact_data(v) for k, v in data.items()} + return {self.redact_data(k, False): self.redact_data(v, False) for k, v in data.items()} elif isinstance(data, list): - return [self.redact_data(v) for v in data] + return [self.redact_data(v, False) for v in data] else: return data From 92d4b45d617eda166f2c9c74de14be6abd790ad1 Mon Sep 17 00:00:00 2001 From: Ashley Gittins Date: Sun, 1 Dec 2024 16:49:31 +0000 Subject: [PATCH 31/35] perf: add lru_cache to rssi cals and clean_charbuf --- custom_components/bermuda/util.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/custom_components/bermuda/util.py b/custom_components/bermuda/util.py index 83d8564..d3ceebb 100644 --- a/custom_components/bermuda/util.py +++ b/custom_components/bermuda/util.py @@ -2,7 +2,9 @@ from __future__ import annotations +from functools import lru_cache +@lru_cache(1024) def rssi_to_metres(rssi, ref_power=None, attenuation=None): """ Convert instant rssi value to a distance in metres. @@ -15,11 +17,6 @@ def rssi_to_metres(rssi, ref_power=None, attenuation=None): ref_power: db. measured rssi when at 1m distance from rx. The will be affected by both receiver sensitivity and transmitter calibration, antenna design and orientation etc. - - TODO: the ref_power and attenuation figures can/should probably be mapped - against each receiver and transmitter for variances. We could also fine- - tune the attenuation in real time based on changing values coming from - known-fixed beacons (eg thermometers, window sensors etc) """ if ref_power is None: return False @@ -30,7 +27,7 @@ def rssi_to_metres(rssi, ref_power=None, attenuation=None): return 10 ** ((ref_power - rssi) / (10 * attenuation)) - +@lru_cache(256) def clean_charbuf(instring: str | None) -> str: """ Some people writing C on bluetooth devices seem to From 07dd6dc3170a4643527b22d5947b3450372a0216 Mon Sep 17 00:00:00 2001 From: Ashley Gittins Date: Sun, 1 Dec 2024 16:50:06 +0000 Subject: [PATCH 32/35] linting --- custom_components/bermuda/util.py | 1 + 1 file changed, 1 insertion(+) diff --git a/custom_components/bermuda/util.py b/custom_components/bermuda/util.py index d3ceebb..79cb8d2 100644 --- a/custom_components/bermuda/util.py +++ b/custom_components/bermuda/util.py @@ -4,6 +4,7 @@ from functools import lru_cache + @lru_cache(1024) def rssi_to_metres(rssi, ref_power=None, attenuation=None): """ From f565d203dc40120993dd01be316e9eb2c82b1219 Mon Sep 17 00:00:00 2001 From: Ashley Gittins Date: Sun, 1 Dec 2024 17:09:52 +0000 Subject: [PATCH 33/35] Log redact timing in dump_devices --- custom_components/bermuda/coordinator.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/custom_components/bermuda/coordinator.py b/custom_components/bermuda/coordinator.py index 904dbf3..68fe2bb 100644 --- a/custom_components/bermuda/coordinator.py +++ b/custom_components/bermuda/coordinator.py @@ -1288,7 +1288,13 @@ async def service_dump_devices(self, call: ServiceCall) -> ServiceResponse: # p out[address] = device.to_dict() if redact: + _stamp_redact = MONOTONIC_TIME() out = cast(ServiceResponse, self.redact_data(out)) + _stamp_redact_elapsed = MONOTONIC_TIME() - _stamp_redact + if _stamp_redact_elapsed > 3: # It should be fast now. + _LOGGER.warning("Dump devices redaction took %2f seconds", _stamp_redact_elapsed) + else: + _LOGGER.debug("Dump devices redaction took %2f seconds", _stamp_redact_elapsed) return out def redaction_list_update(self): From 6b062be4e25ae1ae99760f8d742bb78e5bbd5c41 Mon Sep 17 00:00:00 2001 From: Ashley Gittins Date: Sun, 1 Dec 2024 17:54:23 +0000 Subject: [PATCH 34/35] Cleanups: BermudaDeviceScanner scanner - Remove redundant scanner data - Tidy check for scanner_sends_stamps - Removed doubled-up fields from merge conflict chaos. - Clean last bit of dist_count --- .../bermuda/bermuda_device_scanner.py | 58 +++++++------------ 1 file changed, 21 insertions(+), 37 deletions(-) diff --git a/custom_components/bermuda/bermuda_device_scanner.py b/custom_components/bermuda/bermuda_device_scanner.py index 56635da..49ef474 100644 --- a/custom_components/bermuda/bermuda_device_scanner.py +++ b/custom_components/bermuda/bermuda_device_scanner.py @@ -63,8 +63,7 @@ def __init__( ) -> None: # I am declaring these just to control their order in the dump, # which is a bit silly, I suspect. - self.name: str = scandata.scanner.name - self.scanner_device_name = scanner_device.name + self.name: str = scanner_device.name or scandata.scanner.name self.scanner_device = scanner_device # links to the source device self.adapter: str = scandata.scanner.adapter self.address = scanner_device.address @@ -75,7 +74,8 @@ def __init__( self.parent_device_address = parent_device.address self.options = options self.stamp: float | None = 0 - self.scanner_sends_stamps: bool = False + # Only remote scanners log timestamps, local usb adaptors do not. + self.scanner_sends_stamps = isinstance(scanner_device, BaseHaRemoteScanner) self.new_stamp: float | None = None # Set when a new advert is loaded from update self.rssi: float | None = None self.tx_power: float | None = None @@ -89,8 +89,6 @@ def __init__( self.hist_distance_by_interval = [] # updated per-interval self.hist_interval = [] # WARNING: This is actually "age of ad when we polled" self.hist_velocity = [] # Effective velocity versus previous stamped reading - self.tx_power: float | None = None - self.cached_remote_scanners = set() self.conf_rssi_offset = self.options.get(CONF_RSSI_OFFSETS, {}).get(self.address, 0) self.conf_ref_power = self.options.get(CONF_REF_POWER) self.conf_attenuation = self.options.get(CONF_ATTENUATION) @@ -124,13 +122,8 @@ def update_advertisement(self, scandata: BluetoothScannerDevice): self.area_name = self.scanner_device.area_name new_stamp: float | None = None - scanner_address = scandata.scanner.source - - # Only remote scanners log timestamps here (local usb adaptors do not), - if scanner_address in self.cached_remote_scanners or isinstance(scanner, BaseHaRemoteScanner): - self.cached_remote_scanners.add(scanner_address) + if self.scanner_sends_stamps: # Found a remote scanner which has timestamp history... - self.scanner_sends_stamps = True # There's no API for this, so we somewhat sneakily are accessing # what is intended to be a protected dict. # pylint: disable-next=protected-access @@ -199,24 +192,19 @@ def update_advertisement(self, scandata: BluetoothScannerDevice): self.stamp = new_stamp self.hist_stamp.insert(0, self.stamp) - # Safe to update these values regardless of stamps... - - self.adapter: str = scanner.adapter - self.source: str = scanner.source - if self.tx_power is not None and scandata.advertisement.tx_power != self.tx_power: - # Not really an erorr, we just don't account for this happening - - # I want to know if it does. - # AJG 2024-01-11: This does happen. Looks like maybe apple devices? - # Changing from warning to debug to quiet users' logs. - # Also happens with esphome set with long beacon interval tx, as it alternates - # between sending some generic advert and the iBeacon advert. ie, it's bogus for that - # case. - # _LOGGER.debug( - # "Device changed TX-POWER! That was unexpected: %s %sdB", - # self.parent_device_address, - # scandata.advertisement.tx_power, - # ) - pass + # if self.tx_power is not None and scandata.advertisement.tx_power != self.tx_power: + # # Not really an erorr, we just don't account for this happening - + # # I want to know if it does. + # # AJG 2024-01-11: This does happen. Looks like maybe apple devices? + # # Changing from warning to debug to quiet users' logs. + # # Also happens with esphome set with long beacon interval tx, as it alternates + # # between sending some generic advert and the iBeacon advert. ie, it's bogus for that + # # case. + # _LOGGER.debug( + # "Device changed TX-POWER! That was unexpected: %s %sdB", + # self.parent_device_address, + # scandata.advertisement.tx_power, + # ) self.tx_power = scandata.advertisement.tx_power # Track each advertisement element as or if they change. @@ -242,17 +230,13 @@ def _update_raw_distance(self, reading_is_new=True) -> float: setting change (such as altering a device's ref_power setting). """ # Check if we should use a device-based ref_power - if self.ref_power == 0: # No user-supplied per-device value + if self.ref_power == 0: # No user-supplied per-device value # use global default ref_power = self.conf_ref_power else: ref_power = self.ref_power - distance = rssi_to_metres( - self.rssi + self.conf_rssi_offset, - ref_power, - self.conf_attenuation - ) + distance = rssi_to_metres(self.rssi + self.conf_rssi_offset, ref_power, self.conf_attenuation) self.rssi_distance_raw = distance if reading_is_new: # Add a new historical reading @@ -391,7 +375,8 @@ def calculate_data(self): velocity = delta_d / delta_t - if velocity > peak_velocity: # noqa: PLR1730 - max() is slower. + # Don't use max() as it's slower. + if velocity > peak_velocity: # but on subsequent comparisons we only care if they're faster retreats peak_velocity = velocity # we've been through the history and have peak velo retreat, or the most recent @@ -418,7 +403,6 @@ def calculate_data(self): else: self.hist_distance_by_interval.insert(0, self.hist_distance_by_interval[0]) - dist_count = len(self.hist_distance_by_interval) # trim the log to length if len(self.hist_distance_by_interval) > self.conf_smoothing_samples: del self.hist_distance_by_interval[self.conf_smoothing_samples :] From f64e4921add9b1a5bc8cfc58b46a5ff6301ef473 Mon Sep 17 00:00:00 2001 From: Ashley Gittins Date: Sun, 1 Dec 2024 17:57:43 +0000 Subject: [PATCH 35/35] linting --- custom_components/bermuda/util.py | 1 + 1 file changed, 1 insertion(+) diff --git a/custom_components/bermuda/util.py b/custom_components/bermuda/util.py index 79cb8d2..bb961a3 100644 --- a/custom_components/bermuda/util.py +++ b/custom_components/bermuda/util.py @@ -28,6 +28,7 @@ def rssi_to_metres(rssi, ref_power=None, attenuation=None): return 10 ** ((ref_power - rssi) / (10 * attenuation)) + @lru_cache(256) def clean_charbuf(instring: str | None) -> str: """