Skip to content

Commit

Permalink
chore: [autoapprove] Update black and isort to latest versions (#777
Browse files Browse the repository at this point in the history
)

Source-Link: googleapis/synthtool@0c7b033
Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
gcf-owl-bot[bot] and gcf-owl-bot[bot] authored Oct 10, 2023
1 parent 1547351 commit d07eebf
Show file tree
Hide file tree
Showing 23 changed files with 34 additions and 77 deletions.
4 changes: 2 additions & 2 deletions .github/.OwlBot.lock.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,5 @@
# limitations under the License.
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb
# created: 2023-10-02T21:31:03.517640371Z
digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547
# created: 2023-10-09T14:06:13.397766266Z
6 changes: 3 additions & 3 deletions .kokoro/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \
--hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \
--hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e
# via -r requirements.in
urllib3==1.26.12 \
--hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \
--hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997
urllib3==1.26.17 \
--hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \
--hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b
# via
# requests
# twine
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ repos:
- id: end-of-file-fixer
- id: check-yaml
- repo: https://github.com/psf/black
rev: 22.3.0
rev: 23.7.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2053,7 +2053,6 @@ def __call__(
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:

r"""Call the cancel operation method over HTTP.
Args:
Expand Down Expand Up @@ -2119,7 +2118,6 @@ def __call__(
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:

r"""Call the delete operation method over HTTP.
Args:
Expand Down Expand Up @@ -2182,7 +2180,6 @@ def __call__(
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.Operation:

r"""Call the get operation method over HTTP.
Args:
Expand Down Expand Up @@ -2249,7 +2246,6 @@ def __call__(
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.ListOperationsResponse:

r"""Call the list operations method over HTTP.
Args:
Expand Down
6 changes: 0 additions & 6 deletions google/cloud/firestore_v1/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -440,7 +440,6 @@ def extract_fields(
yield prefix_path, _EmptyDict
else:
for key, value in sorted(document_data.items()):

if expand_dots:
sub_key = FieldPath.from_string(key)
else:
Expand Down Expand Up @@ -503,7 +502,6 @@ def __init__(self, document_data) -> None:
iterator = self._get_document_iterator(prefix_path)

for field_path, value in iterator:

if field_path == prefix_path and value is _EmptyDict:
self.empty_document = True

Expand Down Expand Up @@ -565,7 +563,6 @@ def _get_update_mask(self, allow_empty_mask=False) -> None:
def get_update_pb(
self, document_path, exists=None, allow_empty_mask=False
) -> types.write.Write:

if exists is not None:
current_document = common.Precondition(exists=exists)
else:
Expand Down Expand Up @@ -762,7 +759,6 @@ def _normalize_merge_paths(self, merge) -> list:
return merge_paths

def _apply_merge_paths(self, merge) -> None:

if self.empty_document:
raise ValueError("Cannot merge specific fields with empty document.")

Expand All @@ -773,7 +769,6 @@ def _apply_merge_paths(self, merge) -> None:
self.merge = merge_paths

for merge_path in merge_paths:

if merge_path in self.transform_paths:
self.transform_merge.append(merge_path)

Expand Down Expand Up @@ -1187,7 +1182,6 @@ def deserialize_bundle(
bundle: Optional[FirestoreBundle] = None
data: Dict
for data in _parse_bundle_elements_data(serialized):

# BundleElements are serialized as JSON containing one key outlining
# the type, with all further data nested under that key
keys: List[str] = list(data.keys())
Expand Down
6 changes: 3 additions & 3 deletions google/cloud/firestore_v1/async_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,9 +292,9 @@ async def stream(
yield snapshot

@staticmethod
def _get_collection_reference_class() -> Type[
"firestore_v1.async_collection.AsyncCollectionReference"
]:
def _get_collection_reference_class() -> (
Type["firestore_v1.async_collection.AsyncCollectionReference"]
):
from google.cloud.firestore_v1.async_collection import AsyncCollectionReference

return AsyncCollectionReference
Expand Down
7 changes: 4 additions & 3 deletions google/cloud/firestore_v1/base_aggregation.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,9 +196,10 @@ def stream(
retries.Retry, None, gapic_v1.method._MethodDefault
] = gapic_v1.method.DEFAULT,
timeout: float | None = None,
) -> Generator[List[AggregationResult], Any, None] | AsyncGenerator[
List[AggregationResult], None
]:
) -> (
Generator[List[AggregationResult], Any, None]
| AsyncGenerator[List[AggregationResult], None]
):
"""Runs the aggregation query.
This sends a``RunAggregationQuery`` RPC and returns an iterator in the stream of ``RunAggregationQueryResponse`` messages.
Expand Down
1 change: 0 additions & 1 deletion google/cloud/firestore_v1/base_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,6 @@ def where(
wrapped_names = []

for name in value:

if isinstance(name, str):
name = self.document(name)

Expand Down
2 changes: 0 additions & 2 deletions google/cloud/firestore_v1/base_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -808,7 +808,6 @@ def _filters_pb(self) -> Optional[StructuredQuery.Filter]:
else:
return _filter_pb(filter)
else:

composite_filter = query.StructuredQuery.CompositeFilter(
op=StructuredQuery.CompositeFilter.Operator.AND,
)
Expand All @@ -826,7 +825,6 @@ def _filters_pb(self) -> Optional[StructuredQuery.Filter]:
def _normalize_projection(projection) -> StructuredQuery.Projection:
"""Helper: convert field paths to message."""
if projection is not None:

fields = list(projection.fields)

if not fields:
Expand Down
3 changes: 0 additions & 3 deletions google/cloud/firestore_v1/bulk_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,6 @@ def _retry_operation(
self,
operation: "BulkWriterOperation",
) -> concurrent.futures.Future:

delay: int = 0
if self._options.retry == BulkRetry.exponential:
delay = operation.attempts**2 # pragma: NO COVER
Expand Down Expand Up @@ -365,7 +364,6 @@ def flush(self):
return

while True:

# Queue any waiting operations and try our luck again.
# This can happen if users add a number of records not divisible by
# 20 and then call flush (which should be ~19 out of 20 use cases).
Expand Down Expand Up @@ -469,7 +467,6 @@ def _send_until_queue_is_empty(self):
self._schedule_ready_retries()

while self._queued_batches:

# For FIFO order, add to the right of this deque (via `append`) and take
# from the left (via `popleft`).
operations: List[BulkWriterOperation] = self._queued_batches.popleft()
Expand Down
7 changes: 3 additions & 4 deletions google/cloud/firestore_v1/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,6 @@ def get(
def _chunkify(
self, chunk_size: int
) -> Generator[List[DocumentSnapshot], None, None]:

max_to_return: Optional[int] = self._limit
num_returned: int = 0
original: Query = self._copy()
Expand Down Expand Up @@ -354,9 +353,9 @@ def on_snapshot(docs, changes, read_time):
return Watch.for_query(self, callback, document.DocumentSnapshot)

@staticmethod
def _get_collection_reference_class() -> Type[
"firestore_v1.collection.CollectionReference"
]:
def _get_collection_reference_class() -> (
Type["firestore_v1.collection.CollectionReference"]
):
from google.cloud.firestore_v1.collection import CollectionReference

return CollectionReference
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2180,7 +2180,6 @@ def __call__(
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:

r"""Call the cancel operation method over HTTP.
Args:
Expand Down Expand Up @@ -2246,7 +2245,6 @@ def __call__(
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:

r"""Call the delete operation method over HTTP.
Args:
Expand Down Expand Up @@ -2309,7 +2307,6 @@ def __call__(
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.Operation:

r"""Call the get operation method over HTTP.
Args:
Expand Down Expand Up @@ -2376,7 +2373,6 @@ def __call__(
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.ListOperationsResponse:

r"""Call the list operations method over HTTP.
Args:
Expand Down
2 changes: 0 additions & 2 deletions google/cloud/firestore_v1/watch.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,6 @@ def __init__(
self._init_stream()

def _init_stream(self):

rpc_request = self._get_rpc_request

self._rpc = ResumableBidiRpc(
Expand Down Expand Up @@ -445,7 +444,6 @@ def on_snapshot(self, proto):
which = pb.WhichOneof("response_type")

if which == "target_change":

target_change_type = pb.target_change.target_change_type
_LOGGER.debug(f"on_snapshot: target change: {target_change_type}")

Expand Down
35 changes: 18 additions & 17 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,53 +17,54 @@
# Generated by synthtool. DO NOT EDIT!

from __future__ import absolute_import

import os
import pathlib
import re
import shutil
from typing import Dict, List
import warnings

import nox

FLAKE8_VERSION = "flake8==6.1.0"
PYTYPE_VERSION = "pytype==2020.7.24"
BLACK_VERSION = "black==22.3.0"
ISORT_VERSION = "isort==5.10.1"
BLACK_VERSION = "black[jupyter]==23.7.0"
ISORT_VERSION = "isort==5.11.0"
LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]

DEFAULT_PYTHON_VERSION = "3.8"

UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"]
UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"]
UNIT_TEST_STANDARD_DEPENDENCIES = [
"mock",
"asyncmock",
"pytest",
"pytest-cov",
"pytest-asyncio",
]
UNIT_TEST_EXTERNAL_DEPENDENCIES = [
UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [
"aiounittest",
"six",
]
UNIT_TEST_LOCAL_DEPENDENCIES = []
UNIT_TEST_DEPENDENCIES = []
UNIT_TEST_EXTRAS = []
UNIT_TEST_EXTRAS_BY_PYTHON = {}
UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = []
UNIT_TEST_DEPENDENCIES: List[str] = []
UNIT_TEST_EXTRAS: List[str] = []
UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {}

SYSTEM_TEST_PYTHON_VERSIONS = ["3.7"]
SYSTEM_TEST_STANDARD_DEPENDENCIES = [
SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.7"]
SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [
"mock",
"pytest",
"google-cloud-testutils",
]
SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [
SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [
"pytest-asyncio",
"six",
]
SYSTEM_TEST_LOCAL_DEPENDENCIES = []
SYSTEM_TEST_DEPENDENCIES = []
SYSTEM_TEST_EXTRAS = []
SYSTEM_TEST_EXTRAS_BY_PYTHON = {}
SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = []
SYSTEM_TEST_DEPENDENCIES: List[str] = []
SYSTEM_TEST_EXTRAS: List[str] = []
SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {}

CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()

Expand All @@ -78,6 +79,7 @@
"lint_setup_py",
"blacken",
"docs",
"format",
]

# Error if a python version is missing
Expand Down Expand Up @@ -214,7 +216,6 @@ def unit(session):


def install_systemtest_dependencies(session, *constraints):

# Use pre-release gRPC for system tests.
# Exclude version 1.52.0rc1 which has a known issue.
# See https://github.com/grpc/grpc/issues/32163
Expand Down
2 changes: 0 additions & 2 deletions tests/system/test_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -1411,7 +1411,6 @@ def _persist_documents(


def _do_recursive_delete(client, bulk_writer, empty_philosophers=False):

if empty_philosophers:
doc_paths = philosophers = []
else:
Expand Down Expand Up @@ -1823,7 +1822,6 @@ def test_count_query_stream_default_alias(query, database):

@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True)
def test_count_query_stream_with_alias(query, database):

count_query = query.count(alias="total")
for result in count_query.stream():
for aggregation_result in result:
Expand Down
Loading

0 comments on commit d07eebf

Please sign in to comment.