Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add client debug logging support for unary-stream gRPC/REST calls #794

Open
wants to merge 9 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 21 additions & 21 deletions .kokoro/docker/docs/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ argcomplete==3.5.3 \
--hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \
--hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392
# via nox
cachetools==5.5.1 \
--hash=sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95 \
--hash=sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb
cachetools==5.5.0 \
--hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \
--hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a
# via google-auth
certifi==2024.12.14 \
--hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \
Expand Down Expand Up @@ -124,23 +124,23 @@ distlib==0.3.9 \
--hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \
--hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403
# via virtualenv
filelock==3.17.0 \
--hash=sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338 \
--hash=sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e
filelock==3.16.1 \
--hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \
--hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435
# via virtualenv
gcp-docuploader==0.6.5 \
--hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \
--hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea
# via -r requirements.in
google-api-core==2.24.1 \
--hash=sha256:bc78d608f5a5bf853b80bd70a795f703294de656c096c0968320830a4bc280f1 \
--hash=sha256:f8b36f5456ab0dd99a1b693a40a31d1e7757beea380ad1b38faaf8941eae9d8a
google-api-core==2.24.0 \
--hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \
--hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf
# via
# google-cloud-core
# google-cloud-storage
google-auth==2.38.0 \
--hash=sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4 \
--hash=sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a
google-auth==2.37.0 \
--hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \
--hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0
# via
# google-api-core
# google-cloud-core
Expand All @@ -149,9 +149,9 @@ google-cloud-core==2.4.1 \
--hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \
--hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61
# via google-cloud-storage
google-cloud-storage==3.0.0 \
--hash=sha256:2accb3e828e584888beff1165e5f3ac61aa9088965eb0165794a82d8c7f95297 \
--hash=sha256:f85fd059650d2dbb0ac158a9a6b304b66143b35ed2419afec2905ca522eb2c6a
google-cloud-storage==2.19.0 \
--hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \
--hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2
# via gcp-docuploader
google-crc32c==1.6.0 \
--hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \
Expand Down Expand Up @@ -208,9 +208,9 @@ platformdirs==4.3.6 \
--hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \
--hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb
# via virtualenv
proto-plus==1.26.0 \
--hash=sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22 \
--hash=sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7
proto-plus==1.25.0 \
--hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \
--hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91
# via google-api-core
protobuf==5.29.3 \
--hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \
Expand Down Expand Up @@ -291,7 +291,7 @@ urllib3==2.3.0 \
--hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \
--hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d
# via requests
virtualenv==20.29.1 \
--hash=sha256:4e4cb403c0b0da39e13b46b1b2476e505cb0046b25f242bee80f62bf990b2779 \
--hash=sha256:b8b8970138d32fb606192cb97f6cd4bb644fa486be9308fb9b63f81091b5dc35
virtualenv==20.28.1 \
--hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \
--hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329
# via nox
26 changes: 24 additions & 2 deletions google/api_core/_rest_streaming_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
"""Helpers for server-side streaming in REST."""

from collections import deque
import logging
import string
from typing import Deque, Union
import types
Expand All @@ -23,6 +24,8 @@
import google.protobuf.message
from google.protobuf.json_format import Parse

_LOGGER = logging.getLogger(__name__)


class BaseResponseIterator:
"""Base Iterator over REST API responses. This class should not be used directly.
Expand Down Expand Up @@ -97,19 +100,38 @@ def _process_chunk(self, chunk: str):
self._obj += char
self._escape_next = not self._escape_next if char == "\\" else False

def _log_response_payload(self, response_payload: str): # pragma: NO COVER
rest_response = {
"payload": response_payload,
"status": "OK",
}
_LOGGER.debug(
"Received response via REST stream",
extra={
"response": rest_response,
},
)

def _create_grab(self):
logging_enabled = _LOGGER.isEnabledFor(logging.DEBUG)
if issubclass(self._response_message_cls, proto.Message):

def grab(this):
response_payload = this._ready_objs.popleft()
if logging_enabled: # pragma: NO COVER
self._log_response_payload(response_payload)
return this._response_message_cls.from_json(
this._ready_objs.popleft(), ignore_unknown_fields=True
response_payload, ignore_unknown_fields=True
)

return grab
elif issubclass(self._response_message_cls, google.protobuf.message.Message):

def grab(this):
return Parse(this._ready_objs.popleft(), this._response_message_cls())
response_payload = this._ready_objs.popleft()
if logging_enabled: # pragma: NO COVER
self._log_response_payload(response_payload)
return Parse(response_payload, this._response_message_cls())

return grab
else:
Expand Down
30 changes: 28 additions & 2 deletions google/api_core/grpc_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,13 @@

import collections
import functools
import logging
import pickle
import warnings

import google.protobuf.json_format
import grpc
import proto

from google.api_core import exceptions
import google.auth
Expand Down Expand Up @@ -48,6 +52,7 @@
else:
HAS_GRPC_GCP = False

_LOGGER = logging.getLogger(__name__)

# The list of gRPC Callable interfaces that return iterators.
_STREAM_WRAP_CLASSES = (grpc.UnaryStreamMultiCallable, grpc.StreamStreamMultiCallable)
Expand Down Expand Up @@ -112,8 +117,29 @@ def __next__(self) -> P:
if hasattr(self, "_stored_first_result"):
result = self._stored_first_result
del self._stored_first_result
return result
return next(self._wrapped)
else:
result = next(self._wrapped)
logging_enabled = _LOGGER.isEnabledFor(logging.DEBUG)
if logging_enabled: # pragma: NO COVER
if isinstance(result, proto.Message):
response_payload = type(result).to_json(result)
elif isinstance(result, google.protobuf.message.Message):
response_payload = google.protobuf.json_format.MessageToJson(result)
else:
response_payload = (
f"{type(result).__name__}: {str(pickle.dumps(result))}"
)
grpc_response = {
"payload": response_payload,
"status": "OK",
}
_LOGGER.debug(
f"Received response of type {type(result)} via gRPC stream",
extra={
"response": grpc_response,
},
)
return result
except grpc.RpcError as exc:
# If the stream has already returned data, we cannot recover here.
raise exceptions.from_grpc_error(exc) from exc
Expand Down
31 changes: 29 additions & 2 deletions google/api_core/grpc_helpers_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,23 @@

import asyncio
import functools
import logging
import pickle

from typing import AsyncGenerator, Generic, Iterator, Optional, TypeVar

import google.protobuf.json_format
import grpc
from grpc import aio
import proto

from google.api_core import exceptions, grpc_helpers

# denotes the proto response type for grpc calls
P = TypeVar("P")

_LOGGER = logging.getLogger(__name__)

# NOTE(lidiz) Alternatively, we can hack "__getattribute__" to perform
# automatic patching for us. But that means the overhead of creating an
# extra Python function spreads to every single send and receive.
Expand Down Expand Up @@ -94,7 +100,28 @@ def __init__(self):

async def read(self) -> P:
try:
return await self._call.read()
result = await self._call.read()
logging_enabled = _LOGGER.isEnabledFor(logging.DEBUG)
if logging_enabled: # pragma: NO COVER
if isinstance(result, proto.Message):
response_payload = type(result).to_json(result)
elif isinstance(result, google.protobuf.message.Message):
response_payload = google.protobuf.json_format.MessageToJson(result)
else:
response_payload = (
f"{type(result).__name__}: {str(pickle.dumps(result))}"
)
grpc_response = {
"payload": response_payload,
"status": "OK",
}
_LOGGER.debug(
f"Received response of type {type(result)} via gRPC stream",
extra={
"response": grpc_response,
},
)
return result
except grpc.RpcError as rpc_error:
raise exceptions.from_grpc_error(rpc_error) from rpc_error

Expand Down Expand Up @@ -219,7 +246,7 @@ def create_channel(
default_host=None,
compression=None,
attempt_direct_path: Optional[bool] = False,
**kwargs
**kwargs,
):
"""Create an AsyncIO secure channel with credentials.

Expand Down
4 changes: 2 additions & 2 deletions google/api_core/retry/retry_streaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,8 @@ def retry_target_stream(
[List[Exception], RetryFailureReason, Optional[float]],
Tuple[Exception, Optional[Exception]],
] = build_retry_error,
init_args: _P.args = (),
init_kwargs: _P.kwargs = {},
init_args: tuple = (),
init_kwargs: dict = {},
**kwargs,
) -> Generator[_Y, Any, None]:
"""Create a generator wrapper that retries the wrapped stream if it fails.
Expand Down
4 changes: 2 additions & 2 deletions google/api_core/retry/retry_streaming_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@ async def retry_target_stream(
[list[Exception], RetryFailureReason, float | None],
tuple[Exception, Exception | None],
] = build_retry_error,
init_args: _P.args = (),
init_kwargs: _P.kwargs = {},
init_args: tuple = (),
init_kwargs: dict = {},
**kwargs,
) -> AsyncGenerator[_Y, None]:
"""Create a generator wrapper that retries the wrapped stream if it fails.
Expand Down
19 changes: 18 additions & 1 deletion tests/asyncio/test_grpc_helpers_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ async def test_wrap_stream_errors_raised():


@pytest.mark.asyncio
async def test_wrap_stream_errors_read():
async def test_wrap_stream_errors_read_with_grpc_error():
grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)

mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
Expand All @@ -206,6 +206,23 @@ async def test_wrap_stream_errors_read():
assert exc_info.value.response == grpc_error


@pytest.mark.asyncio
async def test_wrap_stream_errors_read_without_grpc_error():
mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)

mock_call.read = mock.AsyncMock()
multicallable = mock.Mock(return_value=mock_call)

wrapped_callable = grpc_helpers_async._wrap_stream_errors(
multicallable, grpc_helpers_async._WrappedStreamStreamCall
)

wrapped_call = await wrapped_callable(1, 2, three="four")
multicallable.assert_called_once_with(1, 2, three="four")
assert mock_call.wait_for_connection.call_count == 1
await wrapped_call.read()


@pytest.mark.asyncio
async def test_wrap_stream_errors_aiter():
grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
Expand Down