From e398f98b93a2075331109a5bcfe8d1010e75ba50 Mon Sep 17 00:00:00 2001 From: Phil Bruckner Date: Wed, 28 Feb 2024 08:51:49 -0600 Subject: [PATCH] Replace locationsharinglib, reorg code & other misc improvements (#19) Create replacement for locationsharinglib which more directly implements capabilities needed/used by integration. Migrate configuration entry from version 1 to 2. Add note to README.md about purging history of legacy trackers. Remove support for initial beta versions when it comes to how entities are restored. Write cookies & parsed data to log when invalid session indicated. When loading new cookies file, store in newly named file in `.storage/google_maps` instead of overwriting existing file. This makes code much cleaner. Existing file will be deleted when config is reloaded. Reorganize code to simplify, creating new helpers & coordinator modules. --- README.md | 4 + custom_components/google_maps/__init__.py | 622 +----------------- custom_components/google_maps/config_flow.py | 135 ++-- custom_components/google_maps/const.py | 4 - custom_components/google_maps/coordinator.py | 212 ++++++ .../google_maps/device_tracker.py | 63 +- .../google_maps/gm_loc_sharing.py | 315 +++++++++ custom_components/google_maps/helpers.py | 232 +++++++ custom_components/google_maps/manifest.json | 8 +- 9 files changed, 893 insertions(+), 702 deletions(-) create mode 100644 custom_components/google_maps/coordinator.py create mode 100644 custom_components/google_maps/gm_loc_sharing.py create mode 100644 custom_components/google_maps/helpers.py diff --git a/README.md b/README.md index 26f94d3..ffd4400 100644 --- a/README.md +++ b/README.md @@ -324,3 +324,7 @@ If/when any "legacy" trackers are no longer desired, they can be removed from th 2. Removing associated entries in `known_devices.yaml`. If that would make the file empty, then the file can simply be deleted instead. 3. Restarting Home Assistant. + +Note that there will still be history for the legacy trackers, at least for a while. +You can purge that history via the `recorder.purge_entities` service. +See the [Recorder](https://www.home-assistant.io/integrations/recorder/) integration documentation for more details. diff --git a/custom_components/google_maps/__init__.py b/custom_components/google_maps/__init__.py index 13a12c1..c96b054 100644 --- a/custom_components/google_maps/__init__.py +++ b/custom_components/google_maps/__init__.py @@ -1,508 +1,66 @@ """The google_maps component.""" from __future__ import annotations -from collections.abc import Callable, Mapping -from dataclasses import asdict as dc_asdict, dataclass, field -from datetime import datetime, timedelta from functools import partial -from http.cookiejar import LoadError, MozillaCookieJar import logging -from os import PathLike -from pathlib import Path -from typing import Any, NewType, Self, cast - -from locationsharinglib import Person, Service -from locationsharinglib.locationsharinglib import VALID_COOKIE_NAMES -from locationsharinglib.locationsharinglibexceptions import ( - InvalidCookieFile, - InvalidCookies, - InvalidData, -) -from requests import RequestException, Response, Session -from requests.adapters import HTTPAdapter -from urllib3 import Retry -from urllib3.exceptions import MaxRetryError +from typing import cast from homeassistant.components.device_tracker import DOMAIN as DT_DOMAIN from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_BATTERY_CHARGING, - ATTR_BATTERY_LEVEL, - ATTR_ENTITY_PICTURE, - ATTR_GPS_ACCURACY, - ATTR_LATITUDE, - ATTR_LONGITUDE, - CONF_SCAN_INTERVAL, - CONF_USERNAME, - EVENT_HOMEASSISTANT_FINAL_WRITE, - Platform, -) -from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.const import CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.event import async_track_point_in_time -from homeassistant.helpers.issue_registry import ( - IssueSeverity, - async_create_issue, - async_delete_issue, -) -from homeassistant.helpers.restore_state import ExtraStoredData -from homeassistant.helpers.storage import STORAGE_DIR -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from homeassistant.util import dt as dt_util, slugify -from .const import ( - ATTR_ADDRESS, - ATTR_LAST_SEEN, - ATTR_NICKNAME, - CONF_COOKIES_FILE, - CONF_CREATE_ACCT_ENTITY, - COOKIE_WARNING_PERIOD, - CREDENTIALS_FILE, - DOMAIN, - NAME_PREFIX, - RETRIES_BACKOFF, - RETRIES_STATUSES, - RETRIES_TOTAL, -) +from .const import CONF_COOKIES_FILE, CONF_CREATE_ACCT_ENTITY, DOMAIN, NAME_PREFIX +from .coordinator import GMDataUpdateCoordinator, GMIntegData +from .helpers import ConfigID, ConfigUniqueIDs, cookies_file_path _LOGGER = logging.getLogger(__name__) _PLATFORMS = [Platform.DEVICE_TRACKER] -_UNAUTHORIZED = 401 -_FORBIDDEN = 403 -_TOO_MANY_REQUESTS = 429 -_AUTH_ERRORS = (_UNAUTHORIZED, _FORBIDDEN) - - -def old_cookies_file_path(hass: HomeAssistant, username: str) -> Path: - """Return path to cookies file from legacy implementation.""" - return Path(hass.config.path()) / f"{CREDENTIALS_FILE}.{slugify(username)}" - - -def cookies_file_path(hass: HomeAssistant, cookies_file: str) -> Path: - """Return path to cookies file.""" - return Path(hass.config.path()) / STORAGE_DIR / DOMAIN / cookies_file - - -def get_expiration(cookies: str) -> datetime | None: - """Return expiration of cookies.""" - return min( - [ - dt_util.as_local(dt_util.utc_from_timestamp(int(cookie_data[4]))) - for cookie_data in [ - line.strip().split() - for line in cookies.splitlines() - if line.strip() and not line.strip().startswith("#") - ] - if cookie_data[5] in VALID_COOKIE_NAMES - ], - default=None, - ) - - -def exp_2_str(expiration: datetime | None) -> str: - """Convert expiration to a string.""" - return str(expiration) if expiration is not None else "unknown" - - -def expiring_soon(expiration: datetime | None) -> bool: - """Return if cookies are expiring soon.""" - return expiration is not None and expiration - dt_util.now() < COOKIE_WARNING_PERIOD - - -class FromAttributesError(Exception): - """Cannot create object from state attributes.""" - - -@dataclass(frozen=True) -class LocationData: - """Location data.""" - - address: str - gps_accuracy: int - last_seen: datetime - latitude: float - longitude: float - - def as_dict(self) -> dict[str, Any]: - """Return a dict representation of the data.""" - return dc_asdict(self) - - @staticmethod - def _last_seen(data: Mapping[str, Any], key: str) -> datetime: - """Get last_seen from mapping, converting to datetime if necessary.""" - last_seen: datetime | str | None - try: - last_seen = cast(datetime | str, data[key]) - if isinstance(last_seen, datetime): - return last_seen - last_seen = dt_util.parse_datetime(last_seen) - except (KeyError, TypeError) as err: - raise FromAttributesError from err - if last_seen is None: - raise FromAttributesError - return last_seen - - @classmethod - def from_dict(cls, restored: dict[str, Any]) -> Self | None: - """Initialize location data from a dict.""" - try: - last_seen = cls._last_seen(restored, "last_seen") - except FromAttributesError: - return None - try: - return cls( - restored["address"], - restored["gps_accuracy"], - last_seen, - restored["latitude"], - restored["longitude"], - ) - except KeyError: - return None - - @classmethod - def from_person(cls, person: Person) -> Self: - """Initialize location data from Person object.""" - return cls( - cast(str, person.address), - cast(int, person.accuracy), - person.datetime, - cast(float, person.latitude), - cast(float, person.longitude), - ) - - @classmethod - def from_attributes(cls, attrs: Mapping[str, Any]) -> Self: - """Initialize location data from state attributes.""" - last_seen = cls._last_seen(attrs, ATTR_LAST_SEEN) - try: - return cls( - attrs[ATTR_ADDRESS], - attrs[ATTR_GPS_ACCURACY], - last_seen, - attrs[ATTR_LATITUDE], - attrs[ATTR_LONGITUDE], - ) - except KeyError as err: - raise FromAttributesError from err +async def entry_updated(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle config entry update.""" + await hass.config_entries.async_reload(entry.entry_id) -@dataclass(frozen=True) -class MiscData: - """Miscellaneous data.""" - - battery_charging: bool - battery_level: int | None - entity_picture: str - full_name: str - nickname: str - - def as_dict(self) -> dict[str, Any]: - """Return a dict representation of the data.""" - return dc_asdict(self) - - @classmethod - def from_dict(cls, restored: dict[str, Any]) -> Self | None: - """Initialize miscellaneous data from a dict.""" - try: - return cls( - restored["battery_charging"], - restored["battery_level"], - restored["entity_picture"], - restored["full_name"], - restored["nickname"], - ) - except KeyError: - return None - - @classmethod - def from_person(cls, person: Person) -> Self: - """Initialize miscellaneous data from Person object.""" - return cls( - person.charging, - cast(int | None, person.battery_level), - cast(str, person.picture_url), - cast(str, person.full_name), - cast(str, person.nickname), - ) - - @classmethod - def from_attributes(cls, attrs: Mapping[str, Any], full_name: str) -> Self: - """Initialize miscellaneous data from state attributes.""" - try: - return cls( - attrs[ATTR_BATTERY_CHARGING], - attrs.get(ATTR_BATTERY_LEVEL), - attrs[ATTR_ENTITY_PICTURE], - full_name, - attrs[ATTR_NICKNAME], - ) - except KeyError as err: - raise FromAttributesError from err - - -@dataclass(frozen=True) -class PersonData(ExtraStoredData): - """Shared person data.""" - - loc: LocationData | None - misc: MiscData | None - - def as_dict(self) -> dict[str, Any]: - """Return a dict representation of the data.""" - return dc_asdict(self) - - @classmethod - def from_dict(cls, restored: dict[str, Any]) -> Self | None: - """Return PersonData created from a dict.""" - if (loc := restored.get("loc")) is not None: - loc = LocationData.from_dict(loc) - if (misc := restored.get("misc")) is not None: - misc = MiscData.from_dict(misc) - return cls(loc, misc) - - @classmethod - def from_person(cls, person: Person) -> Self: - """Initialize shared person data from Person object.""" - return cls( - LocationData.from_person(person), - MiscData.from_person(person), - ) - - -ConfigID = NewType("ConfigID", str) -UniqueID = NewType("UniqueID", str) -GMData = dict[UniqueID, PersonData] -GMDataUpdateCoordinator = DataUpdateCoordinator[GMData] - - -class ConfigUniqueIDs: - """Unique ID config assignments. - - Since multiple Google accounts might be be added, and it's possible for people to - have shared their location with more than one of those accounts, to avoid having the - same Entity being created by more than one account (i.e., ConfigEntry), keep a - record of which config each entity is, or will be, associated with. This will not - only avoid having to keep querying the Entity Registry, it will also avoid race - conditions where multiple configs might try to create an Entity for the same shared - person at the same time. - """ - - def __init__(self, hass: HomeAssistant) -> None: - """Initialize assignments from Entity Registry.""" - self._all_uids: set[UniqueID] = set() - self._cfg_uids: dict[ConfigID, set[UniqueID]] = {} - ent_reg = er.async_get(hass) - for cfg in hass.config_entries.async_entries(DOMAIN): - cid = cast(ConfigID, cfg.entry_id) - cfg_uids = { - cast(UniqueID, ent.unique_id) - for ent in er.async_entries_for_config_entry(ent_reg, cid) - } - self._all_uids.update(cfg_uids) - self._cfg_uids[cid] = cfg_uids +async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Migrate config entry.""" + _LOGGER.debug("%s: Migrating from version %s", entry.title, entry.version) - @property - def empty(self) -> bool: - """Return if no unique IDs are assigned to any config.""" - if not self._all_uids: - assert not self._cfg_uids - return True + if entry.version > 2: + # Can't downgrade from some unknown future version. return False - def own(self, cid: ConfigID, uid: UniqueID) -> bool: - """Return if config already owns unique ID.""" - return uid in self.owned(cid) - - def owned(self, cid: ConfigID) -> frozenset[UniqueID]: - """Return unique IDs owned by config.""" - return frozenset(self._cfg_uids.get(cid, set())) - - def owned_by_others(self, cid: ConfigID) -> set[UniqueID]: - """Return unique IDs that are owned by other configs.""" - return self._all_uids - self.owned(cid) - - def take(self, cid: ConfigID, uids: set[UniqueID]) -> set[UniqueID]: - """Take ownership of a set of unique IDs. - - Returns set of unique IDs actually taken; - i.e., that did not already belong to other configs. - """ - uids = uids - self.owned_by_others(cid) - self._all_uids.update(uids) - self._cfg_uids.setdefault(cid, set()).update(uids) - return uids - - def release(self, cid: ConfigID, uid: UniqueID) -> None: - """Release ownership of a single unique ID if not owned by another config.""" - if uid in self.owned_by_others(cid): - return - self._all_uids.discard(uid) - self._cfg_uids[cid].discard(uid) - - def remove(self, cid: ConfigID) -> None: - """Remove config, releasing any unique IDs it owned.""" - self._all_uids.difference_update(self._cfg_uids.pop(cid, set())) - - -@dataclass -class GMIntegData: - """Google Maps integration data.""" - - unique_ids: ConfigUniqueIDs - coordinators: dict[ConfigID, GMDataUpdateCoordinator] = field(default_factory=dict) - - -class GMService(Service): # type: ignore[misc] - """Service class with better error detection, handling & reporting.""" - - _data: list[str] - saved_cookies: dict[str, tuple[int | None, str | None]] - - def __init__( # pylint: disable=useless-parent-delegation - self, cookies_file: str | PathLike, authenticating_account: str - ) -> None: - """Initialize service.""" - try: - super().__init__(cookies_file, authenticating_account) - finally: - self._dump_cookies() - - @property - def cookies(self) -> MozillaCookieJar: - """Return session's cookies.""" - return cast(MozillaCookieJar, self._session.cookies) - - @cookies.setter - def cookies(self, cookies: MozillaCookieJar) -> None: - """Set session's cookies.""" - self._session.cookies = cookies # type: ignore[assignment] - - @staticmethod - def _get_server_response(session: Session) -> Response: - """Get response from server using session and check for unauthorized error.""" - resp = None + if entry.version == 1: + data = dict(entry.data) + options = dict(entry.options) + options[CONF_COOKIES_FILE] = data.pop(CONF_COOKIES_FILE) try: - resp = cast(Response, Service._get_server_response(session)) - resp.raise_for_status() - except RequestException as err: - if resp and resp.status_code in _AUTH_ERRORS: - _LOGGER.debug( - "Error: %s: %i %s; reauthorize", - err.__class__.__name__, - resp.status_code, - resp.reason, - ) - raise InvalidCookies(f"{err.__class__.__name__}: {err}") from err - raise - return resp - - def _get_authenticated_session(self, cookies_file: str | PathLike) -> Session: - """Get authenticated session.""" - adapter = HTTPAdapter( - max_retries=Retry( - total=RETRIES_TOTAL, - status_forcelist=RETRIES_STATUSES, - backoff_factor=RETRIES_BACKOFF, + hass.config_entries.async_update_entry( + entry, data=data, options=options, version=2 ) - ) - self._session = Session() - self._session.mount("https://", adapter) - self.cookies = MozillaCookieJar(cookies_file) - try: - self.cookies.load() - except (FileNotFoundError, LoadError) as err: - raise InvalidCookieFile(str(err)) from None - if not {cookie.name for cookie in self.cookies} & VALID_COOKIE_NAMES: - raise InvalidCookies(f"Missing either of {VALID_COOKIE_NAMES} cookies!") - self._update_saved_cookies(self.cookies) - return self._session - - def get_resp_and_parse(self) -> None: - """Get server response, parse and check for invalid session.""" - try: - self._data = cast( - list[str], - self._parse_location_data( - self._get_server_response(self._session).text - ), - ) - try: - if self._data[6] == "GgA=": - raise InvalidCookies("Invalid session indicated") - except IndexError: - raise InvalidData(f"Unexpected data: {self._data}") from None - except InvalidCookies: - self._dump_cookies() - raise - - def _get_data(self) -> list[str]: - """Get last received & parsed data.""" - return self._data + except TypeError: + # 2024.2 and earlier did not accept version as a parameter. + entry.version = 2 + hass.config_entries.async_update_entry(entry, data=data, options=options) - def get_all_people(self) -> list[Person]: - """Retrieve all people sharing their location.""" - people = cast(list[Person], self.get_shared_people()) - if auth_person := self.get_authenticated_person(): - people.append(auth_person) - return people - - def _update_saved_cookies(self, cookies: MozillaCookieJar) -> None: - """Get data for saved cookies.""" - self.saved_cookies = { - cookie.name: (cookie.expires, cookie.value) for cookie in cookies - } - - def save_cookies(self) -> None: - """Save session's cookies.""" - self.cookies.save(ignore_discard=True) - self._update_saved_cookies(self.cookies) - - def _dump_cookies(self) -> None: - """Dump cookies & expiration dates to log.""" - data: list[tuple[str, datetime | None]] = [] - for cookie in self.cookies: - if cookie.expires: - expiration = dt_util.as_local( - dt_util.utc_from_timestamp(cookie.expires) - ) - else: - expiration = None - data.append((cookie.name, expiration)) - data.sort(key=lambda d: d[0]) - data.sort(key=lambda d: datetime.min if d[1] is None else d[1]) - _LOGGER.debug( - "%s: cookies: %s", - self.email, - ", ".join([f"{name}: {exp}" for name, exp in data]), - ) - - -async def entry_updated(hass: HomeAssistant, entry: ConfigEntry) -> None: - """Handle config entry update.""" - await hass.config_entries.async_reload(entry.entry_id) - - -PeopleFunc = Callable[[], list[Person]] + _LOGGER.debug("%s: Migration to version %s successful", entry.title, entry.version) + return True async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up config entry.""" - if not (gmi_data := cast(GMIntegData | None, hass.data.get(DOMAIN))): - hass.data[DOMAIN] = gmi_data = GMIntegData(ConfigUniqueIDs(hass)) - unique_ids = gmi_data.unique_ids - cid = ConfigID(entry.entry_id) - cf_path = cookies_file_path(hass, entry.data[CONF_COOKIES_FILE]) username = entry.data[CONF_USERNAME] create_acct_entity = entry.options[CONF_CREATE_ACCT_ENTITY] - scan_interval = entry.options[CONF_SCAN_INTERVAL] + + if not (gmi_data := cast(GMIntegData | None, hass.data.get(DOMAIN))): + hass.data[DOMAIN] = gmi_data = GMIntegData(ConfigUniqueIDs(hass)) # For "account person", unique ID is username (which is also returned in person.id.) ent_reg = er.async_get(hass) + unique_ids = gmi_data.unique_ids if create_acct_entity: if not unique_ids.own(cid, username) and unique_ids.take(cid, {username}): ent_reg.async_get_or_create( @@ -520,121 +78,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: dev_reg.async_remove_device(device.id) unique_ids.release(cid, username) - service: GMService | None = None - get_people_func: PeopleFunc - cookies_last_saved: datetime - - @callback - def save_cookies_if_changed(event: Event | None = None) -> None: - """Save session's cookies.""" - nonlocal cookies_last_saved - - if not service: - return - cur_cookies = { - cookie.name: (cookie.expires, cookie.value) for cookie in service.cookies - } - if ( - cur_cookies == service.saved_cookies - or event is None - and dt_util.now() - cookies_last_saved < timedelta(minutes=15) - ): - return - - msg: list[str] = [] - cur_names = set(cur_cookies) - saved_names = set(service.saved_cookies) - if dropped := saved_names - cur_names: - msg.append(f"dropped: {', '.join(dropped)}") - diff = { - name - for name in cur_names & saved_names - if cur_cookies[name] != service.saved_cookies[name] - } - if diff: - msg.append(f"updated: {', '.join(diff)}") - if new := cur_names - saved_names: - msg.append(f"new: {', '.join(new)}") - _LOGGER.debug("%s: Saving cookies, changes: %s", entry.title, ", ".join(msg)) - cookies_last_saved = dt_util.now() - hass.async_add_executor_job(service.save_cookies) - - async def update_method() -> GMData: - """Get shared location data.""" - nonlocal service, get_people_func, cookies_last_saved - - try: - if not service: - service = cast( - GMService, - await hass.async_add_executor_job(GMService, cf_path, username), - ) - if create_acct_entity: - get_people_func = service.get_all_people - else: - get_people_func = cast(PeopleFunc, service.get_shared_people) - cookies_last_saved = dt_util.now() - await hass.async_add_executor_job(service.get_resp_and_parse) - save_cookies_if_changed() - people = get_people_func() - except (InvalidCookieFile, InvalidCookies) as err: - raise ConfigEntryAuthFailed(f"{err.__class__.__name__}: {err}") from err - except (MaxRetryError, RequestException, InvalidData) as err: - raise UpdateFailed(f"{err.__class__.__name__}: {err}") from err - return { - UniqueID(cast(str, person.id)): PersonData.from_person(person) - for person in people - } - - coordinator = GMDataUpdateCoordinator( - hass, - _LOGGER, - name=f"Google Maps ({entry.title})", - update_interval=timedelta(seconds=scan_interval), - update_method=update_method, - ) + coordinator = GMDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() gmi_data.coordinators[cid] = coordinator - # Since we got past async_config_entry_first_refresh we know cookies haven't expired - # yet. Create a repair issue if/when they will expire "soon." - expiration = get_expiration(await hass.async_add_executor_job(cf_path.read_text)) - - @callback - def create_issue(_now: datetime | None = None) -> None: - """Create repair issue for cookies which are expiring soon.""" - async_create_issue( - hass, - DOMAIN, - entry.entry_id, - is_fixable=False, - is_persistent=False, - severity=IssueSeverity.WARNING, - translation_key="expiring_soon", - translation_placeholders={ - "entry_id": entry.entry_id, - "expiration": exp_2_str(expiration), - "username": username, - }, - ) - - if expiring_soon(expiration): - create_issue() - else: - async_delete_issue(hass, DOMAIN, entry.entry_id) - if expiration: - entry.async_on_unload( - async_track_point_in_time( - hass, create_issue, expiration - COOKIE_WARNING_PERIOD - ) - ) - entry.async_on_unload(entry.add_update_listener(entry_updated)) - entry.async_on_unload( - hass.bus.async_listen_once( - EVENT_HOMEASSISTANT_FINAL_WRITE, save_cookies_if_changed - ) - ) await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS) return True @@ -643,18 +91,16 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, _PLATFORMS) if unload_ok: - # TODO: Save cookies & close session??? gmi_data = cast(GMIntegData, hass.data[DOMAIN]) - del gmi_data.coordinators[cast(ConfigID, entry.entry_id)] + cid = ConfigID(entry.entry_id) + del gmi_data.coordinators[cid] return unload_ok async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: """Remove a config entry.""" gmi_data = cast(GMIntegData, hass.data[DOMAIN]) - gmi_data.unique_ids.remove(cast(ConfigID, entry.entry_id)) - if not gmi_data.coordinators and gmi_data.unique_ids.empty: - del hass.data[DOMAIN] + gmi_data.unique_ids.remove(ConfigID(entry.entry_id)) hass.async_add_executor_job( partial( cookies_file_path(hass, entry.data[CONF_COOKIES_FILE]).unlink, diff --git a/custom_components/google_maps/config_flow.py b/custom_components/google_maps/config_flow.py index d0460ac..d0ec16d 100644 --- a/custom_components/google_maps/config_flow.py +++ b/custom_components/google_maps/config_flow.py @@ -2,18 +2,14 @@ from __future__ import annotations from abc import abstractmethod +from asyncio import Lock from collections.abc import Mapping +from datetime import datetime import logging from os import PathLike from pathlib import Path -from typing import Any +from typing import Any, cast -from locationsharinglib.locationsharinglibexceptions import ( - InvalidCookieFile, - InvalidCookies, - InvalidData, -) -from requests import RequestException import voluptuous as vol from homeassistant.components.file_upload import process_uploaded_file @@ -41,14 +37,6 @@ from homeassistant.loader import async_get_integration from homeassistant.util.uuid import random_uuid_hex -from . import ( - GMService, - cookies_file_path, - exp_2_str, - expiring_soon, - get_expiration, - old_cookies_file_path, -) from .const import ( CONF_COOKIES_FILE, CONF_CREATE_ACCT_ENTITY, @@ -57,21 +45,36 @@ DOMAIN, ) from .cookies import CHROME_PROCEDURE, EDGE_PROCEDURE, FIREFOX_PROCEDURE +from .coordinator import GMIntegData +from .gm_loc_sharing import ( + GMLocSharing, + InvalidCookies, + InvalidCookiesFile, + InvalidData, + RequestFailed, +) +from .helpers import ( + ConfigID, + cookies_file_path, + exp_2_str, + expiring_soon, + old_cookies_file_path, +) _LOGGER = logging.getLogger(__name__) _CONF_UPDATE_COOKIES = "update_cookies" _CONF_USE_EXISTING_COOKIES = "use_existing_cookies" -_GMSERVICE_ERRORS = (InvalidCookieFile, InvalidCookies, InvalidData, RequestException) +_GMSERVICE_ERRORS = (InvalidCookies, InvalidCookiesFile, InvalidData, RequestFailed) class GoogleMapsFlow(FlowHandler): """Google Maps flow mixin.""" _username: str - _cookies: str - # The following are only used in the reauth flow. + _api: GMLocSharing + _expiration: datetime | None + # The following is only used in the reauth flow. _reauth_entry: ConfigEntry | None = None - _cookies_file: str @property @abstractmethod @@ -84,7 +87,9 @@ def _cookies_file_ok(self, cookies_file: str | PathLike) -> bool: Must be called in an executor. """ try: - GMService(cookies_file, self._username) + self._api.load_cookies(str(cookies_file)) + self._expiration = self._api.cookies_expiration + self._api.get_new_data() except _GMSERVICE_ERRORS as err: _LOGGER.debug( "Error while validating cookies file %s: %r", cookies_file, err @@ -92,15 +97,13 @@ def _cookies_file_ok(self, cookies_file: str | PathLike) -> bool: return False return True - def _get_uploaded_cookies(self, uploaded_file_id: str) -> str | None: - """Validate and read cookies from uploaded cookies file. + def _uploaded_cookies_ok(self, uploaded_file_id: str) -> bool: + """Determine if cookies in uploaded cookies file are ok. Must be called in an executor. """ with process_uploaded_file(self.hass, uploaded_file_id) as cf_path: - if self._cookies_file_ok(cf_path): - return cf_path.read_text() - return None + return self._cookies_file_ok(cf_path) def _save_cookies(self, cookies_file: str) -> None: """Save cookies. @@ -109,7 +112,12 @@ def _save_cookies(self, cookies_file: str) -> None: """ cf_path = cookies_file_path(self.hass, cookies_file) cf_path.parent.mkdir(exist_ok=True) - cf_path.write_text(self._cookies) + self._api.save_cookies(str(cf_path)) + + async def _save_new_cookies(self) -> None: + """Save new cookies to newly named file.""" + self.options[CONF_COOKIES_FILE] = cookies_file = random_uuid_hex() + await self.hass.async_add_executor_job(self._save_cookies, cookies_file) async def async_step_cookies( self, user_input: dict[str, Any] | None = None @@ -117,7 +125,6 @@ async def async_step_cookies( """Get a cookies file.""" if user_input is not None: if not user_input[_CONF_USE_EXISTING_COOKIES]: - del self._cookies return await self.async_step_get_cookies_procedure_menu() if self._reauth_entry: return await self.async_step_reauth_done() @@ -129,8 +136,6 @@ async def async_step_cookies( if not await self.hass.async_add_executor_job(self._cookies_file_ok, cf_path): return await self.async_step_old_cookies_invalid(cf_path=cf_path) - self._cookies = await self.hass.async_add_executor_job(cf_path.read_text) - data_schema = vol.Schema( {vol.Required(_CONF_USE_EXISTING_COOKIES): BooleanSelector()} ) @@ -143,7 +148,7 @@ async def async_step_cookies( description_placeholders={ "username": self._username, "cookies_file": str(cf_path.name), - "expiration": exp_2_str(get_expiration(self._cookies)), + "expiration": exp_2_str(self._expiration), }, last_step=False, ) @@ -220,11 +225,9 @@ async def async_step_cookies_upload( errors = {} if user_input is not None: - cookies = await self.hass.async_add_executor_job( - self._get_uploaded_cookies, user_input[CONF_COOKIES_FILE] - ) - if cookies: - self._cookies = cookies + if await self.hass.async_add_executor_job( + self._uploaded_cookies_ok, user_input[CONF_COOKIES_FILE] + ): return await self.async_step_uploaded_cookie_menu() errors[CONF_COOKIES_FILE] = "invalid_cookies_file" @@ -256,7 +259,7 @@ async def async_step_uploaded_cookie_menu( menu_options=menu_options, description_placeholders={ "username": self._username, - "expiration": exp_2_str(get_expiration(self._cookies)), + "expiration": exp_2_str(self._expiration), }, ) @@ -346,11 +349,9 @@ async def async_step_reauth_done( class GoogleMapsConfigFlow(ConfigFlow, GoogleMapsFlow, domain=DOMAIN): """Google Maps config flow.""" - VERSION = 1 + VERSION = 2 - def __init__(self) -> None: - """Initialize config flow.""" - self._options: dict[str, Any] = {} + _options: dict[str, Any] @staticmethod @callback @@ -369,6 +370,8 @@ async def async_step_user( """Start user config flow.""" if user_input is not None: self._username = user_input[CONF_USERNAME] + self._api = GMLocSharing(self._username) + self._options = {} return await self.async_step_cookies() data_schema = vol.Schema( @@ -384,21 +387,22 @@ async def async_step_user( async def async_step_reauth(self, data: Mapping[str, Any]) -> FlowResult: """Start reauthorization flow.""" - self._cookies_file = data[CONF_COOKIES_FILE] self._username = data[CONF_USERNAME] + self._api = GMLocSharing(self._username) self._reauth_entry = self.hass.config_entries.async_get_entry( self.context["entry_id"] ) + assert self._reauth_entry + self._options = dict(self._reauth_entry.options) return await self.async_step_cookies() async def async_step_done(self, _: dict[str, Any] | None = None) -> FlowResult: """Finish the user config flow.""" # Save cookies. - cookies_file = random_uuid_hex() - await self.hass.async_add_executor_job(self._save_cookies, cookies_file) + await self._save_new_cookies() return self.async_create_entry( title=self._username, - data={CONF_COOKIES_FILE: cookies_file, CONF_USERNAME: self._username}, + data={CONF_USERNAME: self._username}, options=self.options, ) @@ -407,64 +411,61 @@ async def async_step_reauth_done( ) -> FlowResult: """Finish the reauthorization flow.""" # Save cookies. + await self._save_new_cookies() assert self._reauth_entry - await self.hass.async_add_executor_job(self._save_cookies, self._cookies_file) - _LOGGER.debug("Reauthorization successful") - self.hass.async_create_task( - self.hass.config_entries.async_reload(self._reauth_entry.entry_id) + self.hass.config_entries.async_update_entry( + self._reauth_entry, options=self.options ) + _LOGGER.debug("Reauthorization successful") return self.async_abort(reason="reauth_successful") class GoogleMapsOptionsFlow(OptionsFlowWithConfigEntry, GoogleMapsFlow): """Google Maps options flow.""" + _update_cookies = False + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> FlowResult: """Start options flow.""" if user_input is not None: if user_input[_CONF_UPDATE_COOKIES]: + self._update_cookies = True return await self.async_step_cookies() return await self.async_step_account_entity() self._username = self.config_entry.data[CONF_USERNAME] cf_path = cookies_file_path( - self.hass, self.config_entry.data[CONF_COOKIES_FILE] + self.hass, self.config_entry.options[CONF_COOKIES_FILE] ) - cookies = await self.hass.async_add_executor_job(cf_path.read_text) - expiration = get_expiration(cookies) + self._api = GMLocSharing(self._username) + gmi_data = cast(GMIntegData, self.hass.data[DOMAIN]) + coordinator = gmi_data.coordinators.get(ConfigID(self.config_entry.entry_id)) + async with coordinator.cookie_lock if coordinator else Lock(): + file_ok = await self.hass.async_add_executor_job( + self._cookies_file_ok, cf_path + ) data_schema = vol.Schema( {vol.Required(_CONF_UPDATE_COOKIES): BooleanSelector()} ) data_schema = self.add_suggested_values_to_schema( - data_schema, {_CONF_UPDATE_COOKIES: expiring_soon(expiration)} + data_schema, + {_CONF_UPDATE_COOKIES: not file_ok or expiring_soon(self._expiration)}, ) return self.async_show_form( step_id="init", data_schema=data_schema, description_placeholders={ "username": self._username, - "expiration": exp_2_str(expiration), + "expiration": exp_2_str(self._expiration), }, last_step=False, ) async def async_step_done(self, _: dict[str, Any] | None = None) -> FlowResult: """Finish the flow.""" - if hasattr(self, "_cookies"): - await self.hass.async_add_executor_job( - self._save_cookies, self.config_entry.data[CONF_COOKIES_FILE] - ) - # Cookies file content has been updated, so config entry needs to be - # reloaded to use the new cookies. However, if none of the (other) options - # have actually changed, the entry update listeners won't be called, and the - # entry will therefore not get reloaded. If this is the case, initiate a - # reload from here. We don't have to worry about the flow being completely - # finished because neither the config data nor options are changing. - if self.options == self.config_entry.options: - self.hass.async_create_task( - self.hass.config_entries.async_reload(self.config_entry.entry_id) - ) + if self._update_cookies: + await self._save_new_cookies() return self.async_create_entry(title="", data=self.options) diff --git a/custom_components/google_maps/const.py b/custom_components/google_maps/const.py index c690cfa..fec987c 100644 --- a/custom_components/google_maps/const.py +++ b/custom_components/google_maps/const.py @@ -11,10 +11,6 @@ DEF_SCAN_INTERVAL = timedelta(seconds=DEF_SCAN_INTERVAL_SEC) COOKIE_WARNING_PERIOD = timedelta(weeks=4) -RETRIES_TOTAL = 5 -RETRIES_STATUSES = frozenset({500, 502, 503}) -RETRIES_BACKOFF = 0.25 - ATTR_ADDRESS = "address" ATTR_FULL_NAME = "full_name" ATTR_LAST_SEEN = "last_seen" diff --git a/custom_components/google_maps/coordinator.py b/custom_components/google_maps/coordinator.py new file mode 100644 index 0000000..ce95215 --- /dev/null +++ b/custom_components/google_maps/coordinator.py @@ -0,0 +1,212 @@ +"""DataUpdateCoordinator for the Google Maps integration.""" +from __future__ import annotations + +from asyncio import Lock +from collections.abc import Callable +from dataclasses import dataclass, field +from datetime import datetime, timedelta +import logging +from pathlib import Path + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + CONF_SCAN_INTERVAL, + CONF_USERNAME, + EVENT_HOMEASSISTANT_FINAL_WRITE, +) +from homeassistant.core import Event, HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.event import async_track_point_in_time +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util import dt as dt_util + +from .const import ( + CONF_COOKIES_FILE, + CONF_CREATE_ACCT_ENTITY, + COOKIE_WARNING_PERIOD, + DOMAIN, +) +from .gm_loc_sharing import ( + GMLocSharing, + InvalidCookies, + InvalidCookiesFile, + InvalidData, + RequestFailed, +) +from .helpers import ( + ConfigID, + ConfigUniqueIDs, + PersonData, + UniqueID, + cookies_file_path, + expiring_soon, +) + +_LOGGER = logging.getLogger(__name__) + + +GMData = dict[UniqueID, PersonData] + + +class GMDataUpdateCoordinator(DataUpdateCoordinator[GMData]): + """Google Maps data update coordinator.""" + + config_entry: ConfigEntry + _cookies_last_synced: datetime + _unsub_exp: Callable[[], None] | None = None + + def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + """Initialize coordinator.""" + self._cid = ConfigID(entry.entry_id) + self._username = entry.data[CONF_USERNAME] + self._cookies_file = str( + cookies_file_path(hass, entry.options[CONF_COOKIES_FILE]) + ) + self._create_acct_entity = entry.options[CONF_CREATE_ACCT_ENTITY] + + self._api = GMLocSharing(self._username) + self._cookie_lock = Lock() + self._unsub_final_write = hass.bus.async_listen_once( + EVENT_HOMEASSISTANT_FINAL_WRITE, self._save_cookies_if_changed + ) + + scan_interval = timedelta(seconds=entry.options[CONF_SCAN_INTERVAL]) + super().__init__(hass, _LOGGER, name=entry.title, update_interval=scan_interval) + # always_update added in 2023.9.0b0. + if hasattr(self, "always_update"): + self.always_update = False + + @property + def cookie_lock(self) -> Lock: + """Return cookie lock.""" + return self._cookie_lock + + async def async_shutdown(self) -> None: + """Cancel listeners, save cookies & close API.""" + await super().async_shutdown() + self._unsub_all() + cur_cookies_file = str( + cookies_file_path(self.hass, self.config_entry.options[CONF_COOKIES_FILE]) + ) + # Has cookies file name changed, e.g., due to reauth or user reconfiguration? + # If not, save cookies to existing file. If so, delete file that was being used + # because there's a new one to be used after reload/restart. + if cur_cookies_file == self._cookies_file: + await self._save_cookies_if_changed(shutting_down=True) + else: + await self.hass.async_add_executor_job(Path(self._cookies_file).unlink) + self._api.close() + + def _unsub_all(self) -> None: + """Run removers.""" + self._unsub_final_write() + self._unsub_expiration() + + def _unsub_expiration(self) -> None: + """Remove expiration listener.""" + if self._unsub_exp: + self._unsub_exp() + self._unsub_exp = None + + async def async_config_entry_first_refresh(self) -> None: + """Refresh data for the first time when a config entry is setup.""" + # Load the cookies before first update. + async with self.cookie_lock: + try: + await self.hass.async_add_executor_job( + self._api.load_cookies, self._cookies_file + ) + except (InvalidCookiesFile, InvalidCookies) as err: + raise ConfigEntryAuthFailed(f"{err.__class__.__name__}: {err}") from err + self._cookies_file_synced() + + await super().async_config_entry_first_refresh() + + async def _async_update_data(self) -> GMData: + """Fetch the latest data from the source.""" + async with self.cookie_lock: + try: + await self.hass.async_add_executor_job(self._api.get_new_data) + people = self._api.get_people(self._create_acct_entity) + except InvalidCookies as err: + raise ConfigEntryAuthFailed(f"{err.__class__.__name__}: {err}") from err + except (RequestFailed, InvalidData) as err: + raise UpdateFailed(f"{err.__class__.__name__}: {err}") from err + + await self.hass.async_create_task(self._save_cookies_if_changed()) + return { + UniqueID(person.id): PersonData.from_person(person) for person in people + } + + async def _save_cookies_if_changed( + self, + event: Event | None = None, + shutting_down: bool = False, + ) -> None: + """Save session's cookies if changed.""" + shutting_down |= bool(event) + async with self.cookie_lock: + if not ( + self._api.cookies_changed + and ( + shutting_down + or dt_util.now() - self._cookies_last_synced # noqa: F821 + >= timedelta(minutes=15) + ) + ): + return + try: + await self.hass.async_add_executor_job( + self._api.save_cookies, self._cookies_file + ) + except OSError as err: + self.logger.error( + "Error while saving cookies: %s: %s", err.__class__.__name__, err + ) + self._cookies_file_synced(shutting_down) + + def _cookies_file_synced(self, shutting_down: bool = False) -> None: + """Cookies file synced with current cookies.""" + cookies_expiration = self._api.cookies_expiration + self._cookies_last_synced = dt_util.now() + if expiring_soon(cookies_expiration): + self._create_issue() + else: + async_delete_issue(self.hass, DOMAIN, self._cid) + if cookies_expiration and not shutting_down: + self._unsub_expiration() + self._unsub_exp = async_track_point_in_time( + self.hass, + self._create_issue, + cookies_expiration - COOKIE_WARNING_PERIOD, + ) + + @callback + def _create_issue(self, _now: datetime | None = None) -> None: + """Create repair issue for cookies which are expiring soon.""" + async_create_issue( + self.hass, + DOMAIN, + self._cid, + is_fixable=False, + is_persistent=False, + severity=IssueSeverity.WARNING, + translation_key="expiring_soon", + translation_placeholders={ + "entry_id": self._cid, + "username": self._username, + }, + ) + + +@dataclass +class GMIntegData: + """Google Maps integration data.""" + + unique_ids: ConfigUniqueIDs + coordinators: dict[ConfigID, GMDataUpdateCoordinator] = field(default_factory=dict) diff --git a/custom_components/google_maps/device_tracker.py b/custom_components/google_maps/device_tracker.py index bce07bc..e392530 100644 --- a/custom_components/google_maps/device_tracker.py +++ b/custom_components/google_maps/device_tracker.py @@ -2,13 +2,14 @@ from __future__ import annotations from collections.abc import Mapping -from contextlib import suppress from copy import copy import logging from typing import Any, cast from locationsharinglib import Service -from locationsharinglib.locationsharinglibexceptions import InvalidCookies +from locationsharinglib.locationsharinglibexceptions import ( + InvalidCookies as lsl_InvalidCookies, +) import voluptuous as vol from homeassistant.components.device_tracker import ( @@ -46,17 +47,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import dt as dt_util, slugify -from . import ( - ConfigID, - FromAttributesError, - GMDataUpdateCoordinator, - GMIntegData, - LocationData, - MiscData, - PersonData, - UniqueID, - old_cookies_file_path, -) from .const import ( ATTR_ADDRESS, ATTR_FULL_NAME, @@ -68,6 +58,15 @@ DOMAIN, NAME_PREFIX, ) +from .coordinator import GMDataUpdateCoordinator, GMIntegData +from .helpers import ( + ConfigID, + LocationData, + MiscData, + PersonData, + UniqueID, + old_cookies_file_path, +) _LOGGER = logging.getLogger(__name__) @@ -121,7 +120,7 @@ def __init__( self.success_init = True - except InvalidCookies: + except lsl_InvalidCookies: _LOGGER.error( "The cookie file provided does not provide a valid session. Please" " create another one and try again" @@ -268,13 +267,13 @@ def extra_state_attributes(self) -> Mapping[str, Any] | None: """Return entity specific state attributes.""" if self._misc is None: return None - attrs: dict[str, Any] = {ATTR_BATTERY_CHARGING: self._misc.battery_charging} + attrs: dict[str, Any] = {ATTR_NICKNAME: self._misc.nickname} + if (charging := self._misc.battery_charging) is not None: + attrs[ATTR_BATTERY_CHARGING] = charging if self._loc: attrs[ATTR_ADDRESS] = self._loc.address - attrs[ATTR_NICKNAME] = self._misc.nickname - if self._loc: attrs[ATTR_LAST_SEEN] = self._loc.last_seen - return attrs + return dict(sorted(attrs.items())) @property def device_info(self) -> DeviceInfo | None: @@ -342,29 +341,15 @@ async def async_added_to_hass(self) -> None: await super().async_added_to_hass() # Restore state if possible. - if last_state := await self.async_get_last_state(): - # extra_restore_state_data was not implemented in 1.0.0b2 or earlier, so if - # it's not available, try restoring from saved attributes like what was done - # then. - last_extra_data = None - if res_last_extra_data := await self.async_get_last_extra_data(): - last_extra_data = PersonData.from_dict(res_last_extra_data.as_dict()) - attrs = last_state.attributes - - # Always restore loc data as "previous location" first, then overwrite with - # new location below if available and "better." - if last_extra_data: - self._loc = last_extra_data.loc - else: - with suppress(FromAttributesError): - self._loc = LocationData.from_attributes(attrs) + if (last_extra_data := await self.async_get_last_extra_data()) and ( + last_person_data := PersonData.from_dict(last_extra_data.as_dict()) + ): + # Always restore loc data as "previous location" first, then overwrite + # with new location below if available and "better." + self._loc = last_person_data.loc # Only restore misc data if we didn't get any when initialized. if self._misc is None: - if last_extra_data: - self._misc = last_extra_data.misc - else: - with suppress(FromAttributesError): - self._misc = MiscData.from_attributes(attrs, self._full_name) + self._misc = last_person_data.misc # Now that previous state has been restored, update with new data if possible. if not (data := self.coordinator.data.get(cast(UniqueID, self.unique_id))): diff --git a/custom_components/google_maps/gm_loc_sharing.py b/custom_components/google_maps/gm_loc_sharing.py new file mode 100644 index 0000000..9dccd84 --- /dev/null +++ b/custom_components/google_maps/gm_loc_sharing.py @@ -0,0 +1,315 @@ +"""Google Maps Location Sharing.""" +from __future__ import annotations + +from collections.abc import Sequence +from dataclasses import dataclass +from datetime import datetime +from functools import cached_property +from http.cookiejar import MozillaCookieJar +import json +import logging +from typing import Any, Self, cast + +from requests import RequestException, Session +from requests.adapters import HTTPAdapter +from urllib3 import Retry +from urllib3.exceptions import MaxRetryError + +_LOGGER = logging.getLogger(__name__) +_PROTOCOL = "https://" +_URL = f"{_PROTOCOL}www.google.com/maps/rpc/locationsharing/read" +_PARAMS: dict[str, Any] = { + "authuser": 2, + "hl": "en", + "gl": "us", + # pd holds the information about the rendering of the map and + # it is irrelevant with the location sharing capabilities. + # the below info points to google's headquarters. + "pb": ( + "!1m7!8m6!1m3!1i14!2i8413!3i5385!2i6!3x4095" + "!2m3!1e0!2sm!3i407105169!3m7!2sen!5e1105!12m4" + "!1e68!2m2!1sset!2sRoadmap!4e1!5m4!1e4!8m2!1e0!" + "1e1!6m9!1e12!2i2!26m1!4b1!30m1!" + "1f1.3953487873077393!39b1!44e1!50e0!23i4111425" + ), +} + +_HTTP_PAYLOAD_TOO_LARGE = 413 +_HTTP_TOO_MANY_REQUESTS = 429 +_HTTP_INTERNAL_SERVER_ERROR = 500 +_HTTP_BAD_GATEWAY = 502 +_HTTP_SERVER_UNAVAILABLE = 503 + +_RETRIES_TOTAL = 5 +_RETRIES_STATUSES = frozenset( + { + _HTTP_BAD_GATEWAY, + _HTTP_INTERNAL_SERVER_ERROR, + _HTTP_PAYLOAD_TOO_LARGE, + _HTTP_SERVER_UNAVAILABLE, + _HTTP_TOO_MANY_REQUESTS, + } +) +_RETRIES_BACKOFF = 0.25 + +_VALID_COOKIE_NAMES = {"__Secure-1PSID", "__Secure-3PSID"} + + +class GMError(Exception): + """Google Maps location sharing base exception.""" + + +class InvalidCookies(GMError): + """Invalid cookies.""" + + +class InvalidCookiesFile(GMError): + """Invalid cookies file.""" + + +class InvalidData(GMError): + """Invalid data from server.""" + + +class RequestFailed(GMError): + """Server request failed.""" + + +@dataclass +class GMPerson: + """Person's location data.""" + + id: str + + # Attributes associated with last_seen + address: str + country_code: str + gps_accuracy: int + last_seen: datetime + latitude: float + longitude: float + + battery_charging: bool | None + battery_level: int | None + full_name: str + nickname: str + picture_url: str | None + + def __post_init__(self) -> None: + """Post initialization.""" + self.last_seen = datetime.fromtimestamp( + int(self.last_seen) / 1000 # type: ignore[call-overload] + ).astimezone() + + @classmethod + def shared_from_data(cls, data: Sequence[Any]) -> Self | None: + """Initialize shared person from server data.""" + try: + battery_charging = bool(data[13][0]) + except (IndexError, TypeError): + battery_charging = None + try: + battery_level = data[13][1] + except (IndexError, TypeError): + battery_level = None + try: + return cls( + data[6][0], + data[1][4], + data[1][6], + data[1][3], + data[1][2], + data[1][1][2], + data[1][1][1], + battery_charging, + battery_level, + data[6][2], + data[6][3], + data[6][1], + ) + except (IndexError, TypeError): + return None + + @classmethod + def acct_from_data(cls, data: Sequence[Any], account_email: str) -> Self | None: + """Initialize account holder from server data.""" + try: + return cls( + account_email, + data[1][4], + data[1][6], + data[1][3], + data[1][2], + data[1][1][2], + data[1][1][1], + None, + None, + account_email, + account_email, + None, + ) + except (IndexError, TypeError): + return None + + +CookieData = dict[str, tuple[int | None, str | None]] + + +class GMLocSharing: + """Google Maps location sharing.""" + + def __init__(self, account_email: str) -> None: + """Initialize API.""" + self._account_email = account_email + self._session = Session() + self._session.mount( + _PROTOCOL, + HTTPAdapter( + max_retries=Retry( + total=_RETRIES_TOTAL, + status_forcelist=_RETRIES_STATUSES, + backoff_factor=_RETRIES_BACKOFF, + ) + ), + ) + self._session.cookies = MozillaCookieJar() # type: ignore[assignment] + self._cookies_file_data: CookieData = {} + self._data: Sequence[Any] = [] + + @cached_property + def _cookies(self) -> MozillaCookieJar: + """Return session's cookies.""" + return cast(MozillaCookieJar, self._session.cookies) + + @property + def _cookie_data(self) -> CookieData: + """Return pertient data for current cookies.""" + return {cookie.name: (cookie.expires, cookie.value) for cookie in self._cookies} + + @property + def cookies_changed(self) -> bool: + """Return if cookies have changed since they were loaded or last saved.""" + return self._cookie_data != self._cookies_file_data + + @property + def cookies_expiration(self) -> datetime | None: + """Return expiration of 'important' cookies.""" + cookie_data = self._cookie_data + expirations: list[int] = [] + for name in _VALID_COOKIE_NAMES: + if (data := cookie_data.get(name)) and (expiration := data[0]): + expirations.append(expiration) + if not expirations: + return None + return datetime.fromtimestamp(min(expirations)).astimezone() + + def close(self) -> None: + """Close API.""" + self._session.close() + + def load_cookies(self, cookies_file: str) -> None: + """Load cookies from file.""" + self._cookies.clear() + try: + self._cookies.load(cookies_file) + except OSError as err: + raise InvalidCookiesFile(f"{err.__class__.__name__}: {err}") from None + self._dump_cookies() + if not {cookie.name for cookie in self._cookies} & _VALID_COOKIE_NAMES: + raise InvalidCookies(f"Missing either of {_VALID_COOKIE_NAMES} cookies") + self._cookies_file_data = self._cookie_data + + def save_cookies(self, cookies_file: str) -> None: + """Save cookies to file.""" + self._dump_changed_cookies() + self._cookies.save(cookies_file, ignore_discard=True) + self._cookies_file_data = self._cookie_data + + def get_new_data(self) -> None: + """Get new data from Google server.""" + try: + resp = self._session.get(_URL, params=_PARAMS, verify=True) + resp.raise_for_status() + except (RequestException, MaxRetryError) as err: + raise RequestFailed(f"{err.__class__.__name__}: {err}") from err + raw_data = resp.text + try: + self._data = json.loads(raw_data[5:]) + except (IndexError, json.JSONDecodeError) as err: + raise InvalidData(f"Could not parse: {raw_data}") from err + if not isinstance(self._data, Sequence): + raise InvalidData(f"Expected a Sequence, got: {self._data}") + try: + if self._data[6] == "GgA=": + self._dump_cookies() + _LOGGER.debug("%s: Parsed data: %s", self._account_email, self._data) + raise InvalidCookies("Invalid session indicated") + except IndexError: + raise InvalidData(f"Unexpected parsed data: {self._data}") from None + + def get_people(self, include_acct_person: bool) -> list[GMPerson]: + """Get people from data.""" + people: list[GMPerson] = [] + bad_data: list[list[Any]] = [] + if len(self._data) < 1: + raise InvalidData("No shared location data") + for person_data in self._data[0] or []: + if person := GMPerson.shared_from_data(person_data): + people.append(person) + else: + bad_data.append(person_data) + if include_acct_person and len(self._data) >= 10: + if person := GMPerson.acct_from_data(self._data[9], self._account_email): + people.append(person) + else: + bad_data.append(self._data[9]) + for bad_person_data in bad_data: + _LOGGER.debug( + "%s: Missing location or other data for person: %s", + self._account_email, + bad_person_data, + ) + return people + + def _dump_cookies(self) -> None: + """Dump cookies & expiration dates to log.""" + if not _LOGGER.isEnabledFor(logging.DEBUG): + return + data: list[tuple[str, datetime | None]] = [] + for cookie in self._cookies: + if cookie.expires: + expiration = datetime.fromtimestamp(cookie.expires) + else: + expiration = None + data.append((cookie.name, expiration)) + data.sort(key=lambda d: (datetime.max if d[1] is None else d[1], d[0])) + _LOGGER.debug( + "%s: Cookies: %s", + self._account_email, + ", ".join([f"{name}: {exp}" for name, exp in data]), + ) + + def _dump_changed_cookies(self) -> None: + """Dump cookie changes since last saved to log.""" + if not _LOGGER.isEnabledFor(logging.DEBUG): + return + msg: list[str] = [] + cookie_data = self._cookie_data + cur_names = set(cookie_data) + saved_names = set(self._cookies_file_data) + if dropped := saved_names - cur_names: + msg.append(f"dropped: {', '.join(dropped)}") + diff = { + name + for name in cur_names & saved_names + if cookie_data[name] != self._cookies_file_data[name] + } + if diff: + msg.append(f"updated: {', '.join(diff)}") + if new := cur_names - saved_names: + msg.append(f"new: {', '.join(new)}") + _LOGGER.debug( + "%s: Changed cookies since last saved: %s", + self._account_email, + ", ".join(msg), + ) diff --git a/custom_components/google_maps/helpers.py b/custom_components/google_maps/helpers.py new file mode 100644 index 0000000..f01e216 --- /dev/null +++ b/custom_components/google_maps/helpers.py @@ -0,0 +1,232 @@ +"""Google Maps helper functions, etc.""" +from __future__ import annotations + +from dataclasses import asdict as dc_asdict, dataclass +from datetime import datetime +from pathlib import Path +from typing import Any, NewType, Self, cast + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.restore_state import ExtraStoredData +from homeassistant.helpers.storage import STORAGE_DIR +from homeassistant.util import dt as dt_util, slugify + +from .const import COOKIE_WARNING_PERIOD, CREDENTIALS_FILE, DOMAIN +from .gm_loc_sharing import GMPerson + + +def old_cookies_file_path(hass: HomeAssistant, username: str) -> Path: + """Return path to cookies file from legacy implementation.""" + return Path(hass.config.path()) / f"{CREDENTIALS_FILE}.{slugify(username)}" + + +def cookies_file_path(hass: HomeAssistant, cookies_file: str) -> Path: + """Return path to cookies file.""" + return Path(hass.config.path()) / STORAGE_DIR / DOMAIN / cookies_file + + +def exp_2_str(expiration: datetime | None) -> str: + """Convert expiration to a string.""" + return str(expiration) if expiration is not None else "unknown" + + +def expiring_soon(expiration: datetime | None) -> bool: + """Return if cookies are expiring soon.""" + return expiration is not None and expiration - dt_util.now() < COOKIE_WARNING_PERIOD + + +ConfigID = NewType("ConfigID", str) +UniqueID = NewType("UniqueID", str) + + +class FromAttributesError(Exception): + """Cannot create object from state attributes.""" + + +@dataclass(frozen=True) +class LocationData: + """Location data.""" + + address: str + gps_accuracy: int + last_seen: datetime + latitude: float + longitude: float + + def as_dict(self) -> dict[str, Any]: + """Return a dict representation of the data.""" + return dc_asdict(self) + + @classmethod + def from_dict(cls, restored: dict[str, Any]) -> Self | None: + """Initialize location data from a dict.""" + last_seen: datetime | str | None + try: + last_seen = cast(datetime | str, restored["last_seen"]) + if not isinstance(last_seen, datetime): + last_seen = dt_util.parse_datetime(last_seen) + except (KeyError, TypeError): + return None + if last_seen is None: + return None + try: + return cls( + restored["address"], + restored["gps_accuracy"], + last_seen, + restored["latitude"], + restored["longitude"], + ) + except KeyError: + return None + + @classmethod + def from_person(cls, person: GMPerson) -> Self: + """Initialize location data from GMPerson object.""" + return cls( + person.address, + person.gps_accuracy, + person.last_seen, + person.latitude, + person.longitude, + ) + + +@dataclass(frozen=True) +class MiscData: + """Miscellaneous data.""" + + battery_charging: bool | None + battery_level: int | None + entity_picture: str | None + full_name: str + nickname: str + + def as_dict(self) -> dict[str, Any]: + """Return a dict representation of the data.""" + return dc_asdict(self) + + @classmethod + def from_dict(cls, restored: dict[str, Any]) -> Self | None: + """Initialize miscellaneous data from a dict.""" + try: + return cls( + restored["battery_charging"], + restored["battery_level"], + restored["entity_picture"], + restored["full_name"], + restored["nickname"], + ) + except KeyError: + return None + + @classmethod + def from_person(cls, person: GMPerson) -> Self: + """Initialize miscellaneous data from GMPerson object.""" + return cls( + person.battery_charging, + person.battery_level, + person.picture_url, + person.full_name, + person.nickname, + ) + + +@dataclass(frozen=True) +class PersonData(ExtraStoredData): + """Shared person data.""" + + loc: LocationData | None + misc: MiscData | None + + def as_dict(self) -> dict[str, Any]: + """Return a dict representation of the data.""" + return dc_asdict(self) + + @classmethod + def from_dict(cls, restored: dict[str, Any]) -> Self | None: + """Return PersonData created from a dict.""" + if (loc := restored.get("loc")) is not None: + loc = LocationData.from_dict(loc) + if (misc := restored.get("misc")) is not None: + misc = MiscData.from_dict(misc) + return cls(loc, misc) + + @classmethod + def from_person(cls, person: GMPerson) -> Self: + """Initialize shared person data from GMPerson object.""" + return cls( + LocationData.from_person(person), + MiscData.from_person(person), + ) + + +class ConfigUniqueIDs: + """Unique ID config assignments. + + Since multiple Google accounts might be be added, and it's possible for people to + have shared their location with more than one of those accounts, to avoid having the + same Entity being created by more than one account (i.e., ConfigEntry), keep a + record of which config each entity is, or will be, associated with. This will not + only avoid having to keep querying the Entity Registry, it will also avoid race + conditions where multiple configs might try to create an Entity for the same shared + person at the same time. + """ + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize assignments from Entity Registry.""" + self._all_uids: set[UniqueID] = set() + self._cfg_uids: dict[ConfigID, set[UniqueID]] = {} + + ent_reg = er.async_get(hass) + for cfg in hass.config_entries.async_entries(DOMAIN): + cid = cast(ConfigID, cfg.entry_id) + cfg_uids = { + cast(UniqueID, ent.unique_id) + for ent in er.async_entries_for_config_entry(ent_reg, cid) + } + self._all_uids.update(cfg_uids) + self._cfg_uids[cid] = cfg_uids + + @property + def empty(self) -> bool: + """Return if no unique IDs are assigned to any config.""" + if not self._all_uids: + assert not self._cfg_uids + return True + return False + + def own(self, cid: ConfigID, uid: UniqueID) -> bool: + """Return if config already owns unique ID.""" + return uid in self.owned(cid) + + def owned(self, cid: ConfigID) -> frozenset[UniqueID]: + """Return unique IDs owned by config.""" + return frozenset(self._cfg_uids.get(cid, set())) + + def owned_by_others(self, cid: ConfigID) -> set[UniqueID]: + """Return unique IDs that are owned by other configs.""" + return self._all_uids - self.owned(cid) + + def take(self, cid: ConfigID, uids: set[UniqueID]) -> set[UniqueID]: + """Take ownership of a set of unique IDs. + + Returns set of unique IDs actually taken; + i.e., that did not already belong to other configs. + """ + uids = uids - self.owned_by_others(cid) + self._all_uids.update(uids) + self._cfg_uids.setdefault(cid, set()).update(uids) + return uids + + def release(self, cid: ConfigID, uid: UniqueID) -> None: + """Release ownership of a single unique ID if not owned by another config.""" + if uid in self.owned_by_others(cid): + return + self._all_uids.discard(uid) + self._cfg_uids[cid].discard(uid) + + def remove(self, cid: ConfigID) -> None: + """Remove config, releasing any unique IDs it owned.""" + self._all_uids.difference_update(self._cfg_uids.pop(cid, set())) diff --git a/custom_components/google_maps/manifest.json b/custom_components/google_maps/manifest.json index 639345e..2376ac6 100644 --- a/custom_components/google_maps/manifest.json +++ b/custom_components/google_maps/manifest.json @@ -4,10 +4,10 @@ "codeowners": ["@pnbruckner"], "config_flow": true, "dependencies": ["file_upload"], - "documentation": "https://github.com/pnbruckner/ha-google-maps/blob/1.1.1/README.md", + "documentation": "https://github.com/pnbruckner/ha-google-maps/blob/1.2.0b1/README.md", "iot_class": "cloud_polling", "issue_tracker": "https://github.com/pnbruckner/ha-google-maps/issues", - "loggers": ["locationsharinglib"], - "requirements": ["locationsharinglib==5.0.1"], - "version": "1.1.1" + "loggers": [], + "requirements": [], + "version": "1.2.0b1" }