Skip to content

Commit

Permalink
chore: fix lint / coverage issues
Browse files Browse the repository at this point in the history
  • Loading branch information
tseaver committed Nov 17, 2021
1 parent da65cc7 commit a7273b9
Show file tree
Hide file tree
Showing 11 changed files with 31 additions and 52 deletions.
5 changes: 1 addition & 4 deletions google/cloud/firestore_v1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,12 @@

"""Python idiomatic client for Google Cloud Firestore."""

from typing import Union

import pkg_resources

__version__: Union[str, None]
try:
__version__ = pkg_resources.get_distribution("google-cloud-firestore").version
except pkg_resources.DistributionNotFound:
__version__ = None
__version__ = None # type: ignore

from google.cloud.firestore_v1 import types
from google.cloud.firestore_v1._helpers import GeoPoint
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/firestore_v1/async_transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ async def get(
[ref_or_query], transaction=self, **kwargs
)
elif isinstance(ref_or_query, AsyncQuery):
return await ref_or_query.stream(transaction=self, **kwargs) # type: ignore
return await ref_or_query.stream(transaction=self, **kwargs) # type: ignore
else:
raise ValueError(
'Value for argument "ref_or_query" must be a AsyncDocumentReference or a AsyncQuery.'
Expand Down
4 changes: 3 additions & 1 deletion google/cloud/firestore_v1/base_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,9 @@
)
_ACTIVE_TXN: str = "There is already an active transaction."
_INACTIVE_TXN: str = "There is no active transaction."
_CLIENT_INFO: Any = client_info.ClientInfo(client_library_version=__version__)
_CLIENT_INFO: Any = client_info.ClientInfo(
client_library_version=__version__ # type: ignore
)
_FIRESTORE_EMULATOR_HOST: str = "FIRESTORE_EMULATOR_HOST"


Expand Down
25 changes: 8 additions & 17 deletions google/cloud/firestore_v1/base_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
from google.cloud.firestore_v1.types import RunQueryResponse
from google.cloud.firestore_v1.order import Order


_BAD_DIR_STRING: str
_BAD_OP_NAN_NULL: str
_BAD_OP_STRING: str
Expand Down Expand Up @@ -107,7 +108,8 @@ class _NotPassed:
OptionalInt = Union[int, _NotPassed]
OptionalBool = Union[bool, _NotPassed]
CursorParamStripped = Tuple[Union[tuple, dict, list], bool]
CursorParam = Tuple[Union[tuple, dict, list, DocumentSnapshot], bool]
CursorArg = Union[DocumentSnapshot, dict, list, tuple, None]
CursorParam = Tuple[CursorArg, bool]
OptionalCursorParam = Union[CursorParam, _NotPassed]


Expand Down Expand Up @@ -474,10 +476,7 @@ def _check_snapshot(self, document_snapshot) -> None:
raise ValueError("Cannot use snapshot from another collection as a cursor.")

def _cursor_helper(
self,
document_fields_or_snapshot: Union[DocumentSnapshot, dict, list, tuple],
before: bool,
start: bool,
self, document_fields_or_snapshot: CursorArg, before: bool, start: bool,
) -> "BaseQuery":
"""Set values to be used for a ``start_at`` or ``end_at`` cursor.
Expand Down Expand Up @@ -530,9 +529,7 @@ def _cursor_helper(

return self._copy(**query_kwargs)

def start_at(
self, document_fields_or_snapshot: Union[DocumentSnapshot, dict, list, tuple]
) -> "BaseQuery":
def start_at(self, document_fields_or_snapshot: CursorArg) -> "BaseQuery":
"""Start query results at a particular document value.
The result set will **include** the document specified by
Expand Down Expand Up @@ -562,9 +559,7 @@ def start_at(
"""
return self._cursor_helper(document_fields_or_snapshot, before=True, start=True)

def start_after(
self, document_fields_or_snapshot: Union[DocumentSnapshot, dict, list, tuple]
) -> "BaseQuery":
def start_after(self, document_fields_or_snapshot: CursorArg) -> "BaseQuery":
"""Start query results after a particular document value.
The result set will **exclude** the document specified by
Expand Down Expand Up @@ -595,9 +590,7 @@ def start_after(
document_fields_or_snapshot, before=False, start=True
)

def end_before(
self, document_fields_or_snapshot: Union[DocumentSnapshot, dict, list, tuple]
) -> "BaseQuery":
def end_before(self, document_fields_or_snapshot: CursorArg) -> "BaseQuery":
"""End query results before a particular document value.
The result set will **exclude** the document specified by
Expand Down Expand Up @@ -628,9 +621,7 @@ def end_before(
document_fields_or_snapshot, before=True, start=False
)

def end_at(
self, document_fields_or_snapshot: Union[DocumentSnapshot, dict, list, tuple]
) -> "BaseQuery":
def end_at(self, document_fields_or_snapshot: CursorArg) -> "BaseQuery":
"""End query results at a particular document value.
The result set will **include** the document specified by
Expand Down
15 changes: 7 additions & 8 deletions google/cloud/firestore_v1/base_transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

"""Helpers for applying Google Cloud Firestore changes in a transaction."""

from typing import Any, Coroutine, List, NoReturn, Optional, Union
from typing import List, Union

from google.cloud.firestore_v1 import types
from google.cloud.firestore_v1.types import write
Expand All @@ -41,10 +41,10 @@ class BaseTransaction(object):
"""Accumulate read-and-write operations to be sent in a transaction.
Args:
max_attempts (Optional[int]): The maximum number of attempts for
max_attempts (int): The maximum number of attempts for
the transaction (i.e. allowing retries). Defaults to
:attr:`~google.cloud.firestore_v1.transaction.MAX_ATTEMPTS`.
read_only (Optional[bool]): Flag indicating if the transaction
read_only (bool): Flag indicating if the transaction
should be read-only or should allow writes. Defaults to
:data:`False`.
"""
Expand All @@ -57,7 +57,7 @@ def __init__(self, max_attempts=MAX_ATTEMPTS, read_only=False) -> None:

def _options_protobuf(
self, retry_id: Union[bytes, None]
) -> Optional[types.common.TransactionOptions]:
) -> Union[types.common.TransactionOptions, None]:
"""Convert the current object to protobuf.
The ``retry_id`` value is used when retrying a transaction that
Expand All @@ -69,7 +69,6 @@ def _options_protobuf(
to be retried.
Returns:
Optional[google.cloud.firestore_v1.types.TransactionOptions]:
The protobuf ``TransactionOptions`` if ``read_only==True`` or if
there is a transaction ID to be retried, else :data:`None`.
Expand Down Expand Up @@ -107,7 +106,7 @@ def id(self):
"""Get the current transaction ID.
Returns:
Optional[bytes]: The transaction ID (or :data:`None` if the
Union[bytes, None]: The transaction ID (or :data:`None` if the
current transaction is not in progress).
"""
return self._id
Expand Down Expand Up @@ -135,9 +134,9 @@ class _BaseTransactional(object):
def __init__(self, to_wrap) -> None:
self.to_wrap = to_wrap
self.current_id = None
"""Optional[bytes]: The current transaction ID."""
"""Union[bytes, None]: The current transaction ID."""
self.retry_id = None
"""Optional[bytes]: The ID of the first attempted transaction."""
"""Union[bytes, None]: The ID of the first attempted transaction."""

def _reset(self) -> None:
"""Unset the transaction IDs."""
Expand Down
12 changes: 7 additions & 5 deletions google/cloud/firestore_v1/bulk_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import functools
import logging
import time
from typing import Callable, Deque, Dict, List, Optional, Union, TYPE_CHECKING
from typing import Callable, Deque, Dict, List, Optional, Union

from google.rpc import status_pb2 # type: ignore

Expand Down Expand Up @@ -79,6 +79,7 @@ class AsyncBulkWriterMixin:
The entrypoint to the parallelizable code path is `_send_batch()`, which is
wrapped in a decorator which ensures that the `SendMode` is honored.
"""

_in_flight_documents: int = 0
_total_batches_sent: int = 0
_total_write_operations: int = 0
Expand Down Expand Up @@ -503,7 +504,7 @@ def _schedule_ready_retries(self):
def _request_send(self, batch_size: int) -> bool:
# Set up this boolean to avoid repeatedly taking tokens if we're only
# waiting on the `max_in_flight` limit.
have_received_tokens: bool = False
got_tokens: bool = False

while True:
# To avoid bottlenecks on the server, an additional limit is that no
Expand All @@ -515,10 +516,9 @@ def _request_send(self, batch_size: int) -> bool:
)
# Ask for tokens each pass through this loop until they are granted,
# and then stop.
if not have_received_tokens:
have_received_tokens = bool(self._rate_limiter.take_tokens(batch_size))
got_tokens = got_tokens or bool(self._rate_limiter.take_tokens(batch_size))

if not under_threshold or not have_received_tokens:
if not under_threshold or not got_tokens:
# Try again until both checks are true.
# Note that this sleep is helpful to prevent the main BulkWriter
# thread from spinning through this loop as fast as possible and
Expand Down Expand Up @@ -727,6 +727,7 @@ class BulkWriterOperation:
that ferries it into its next retry without getting confused with other
similar writes to the same document.
"""

attempts: int

def add_to_batch(self, batch: BulkWriteBatch):
Expand Down Expand Up @@ -766,6 +767,7 @@ class BaseOperationRetry:
Methods on this class be moved directly to `OperationRetry` when support for
Python 3.6 is dropped and `dataclasses` becomes universal.
"""

operation: BulkWriterOperation
run_at: datetime.datetime

Expand Down
5 changes: 1 addition & 4 deletions google/cloud/firestore_v1/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,10 +279,7 @@ def stream(
response = next(response_iterator, None)
except exceptions.GoogleAPICallError as exc:
if self._retry_query_after_exception(exc, retry, transaction):
if last_snapshot is not None:
new_query = self.start_after(last_snapshot)
else:
new_query = self
new_query = cast(Query, self.start_after(last_snapshot))
response_iterator, _ = new_query._get_stream_iterator(
transaction, retry, timeout,
)
Expand Down
6 changes: 1 addition & 5 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,11 +86,7 @@ def mypy(session):
"""Verify type hints are mypy compatible."""
session.install("-e", ".")
session.install(
"mypy",
"types-setuptools",
"types-protobuf",
"types-dataclasses",
"types-mock",
"mypy", "types-setuptools", "types-protobuf", "types-dataclasses", "types-mock",
)
# Note: getenerated tests (in 'tests/unit/gapic') are not yet
# mypy-safe
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/v1/test_base_document.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import unittest

import mock
from proto.datetime_helpers import DatetimeWithNanoseconds # type: ignore
from proto.datetime_helpers import DatetimeWithNanoseconds # type: ignore


class TestBaseDocumentReference(unittest.TestCase):
Expand Down
6 changes: 1 addition & 5 deletions tests/unit/v1/test_bulk_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
from google.cloud.firestore_v1.async_client import AsyncClient
from google.cloud.firestore_v1.base_document import BaseDocumentReference
from google.cloud.firestore_v1.client import Client
from google.cloud.firestore_v1.base_client import BaseClient
from google.cloud.firestore_v1.bulk_batch import BulkWriteBatch
from google.cloud.firestore_v1.bulk_writer import (
BulkRetry,
Expand Down Expand Up @@ -139,9 +138,7 @@ def test_ctor_explicit(self):

@staticmethod
def _get_document_reference(
client,
collection_name: str = "col",
id: str = None,
client, collection_name: str = "col", id: str = None,
) -> Type:
return client.collection(collection_name).document(id)

Expand All @@ -152,7 +149,6 @@ def _doc_iter(self, client, num: int, ids: Optional[List[str]] = None):

def _verify_bw_activity(self, bw: NoSendBulkWriter, counts: List[Tuple[int, int]]):
total_batches = sum([el[1] for el in counts])
batches_word = "batches" if total_batches != 1 else "batch"
assert len(bw._responses) == total_batches

expected_counts = dict(counts)
Expand Down
1 change: 0 additions & 1 deletion tests/unit/v1/test_rate_limiter.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

import datetime
import unittest
from typing import Optional

import mock
import google
Expand Down

0 comments on commit a7273b9

Please sign in to comment.