Skip to content

Commit

Permalink
Feat: Added a few async tests and hybrid
Browse files Browse the repository at this point in the history
  • Loading branch information
felixnext committed May 25, 2024
1 parent 85d7114 commit 6a39141
Show file tree
Hide file tree
Showing 14 changed files with 331 additions and 39 deletions.
43 changes: 21 additions & 22 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,36 +5,35 @@ name: Python package

on:
push:
branches: [ main ]
branches: [main]
pull_request:
branches: [ main ]
branches: [main]

jobs:
build:

runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]

steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install flake8 pytest pytest-mock pytest-xdist pytest-forked
for file in requirements*.txt; do pip install -r $file; done
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=13 --max-line-length=127 --statistics
- name: Test with pytest
run: |
pytest --forked
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install flake8 pytest pytest-mock pytest-xdist pytest-forked pytest-asyncio
for file in requirements*.txt; do pip install -r $file; done
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=13 --max-line-length=127 --statistics
- name: Test with pytest
run: |
pytest --forked
2 changes: 1 addition & 1 deletion functown/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "2.2.0"
__version__ = "2.3.0"

import logging

Expand Down
17 changes: 8 additions & 9 deletions functown/insights/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,11 @@
Copyright (c) 2023, Felix Geilert
"""


from dataclasses import dataclass
from datetime import datetime
from enum import Enum
import logging
from typing import Dict, Union, Type, List, Any, Callable
from typing import Dict, Union, Type, List, Any, Callable, Optional

from opencensus.ext.azure import metrics_exporter
from opencensus.stats.measurement_map import MeasurementMap
Expand Down Expand Up @@ -79,7 +78,7 @@ class MetricSpec:
columns: List[str]
mtype: MetricType
dtype: Type[Union[int, float]] = int
namespace: str = None
namespace: Optional[str] = None
start_value: Union[int, float, None] = None

@property
Expand Down Expand Up @@ -131,7 +130,7 @@ def __init__(
vm: stats_module.ViewManager,
map: MeasurementMap,
add_name_column: bool = True,
handler_columns: Dict[str, Any] = None,
handler_columns: Optional[Dict[str, Any]] = None,
):
# store the spec
self.spec = spec
Expand Down Expand Up @@ -190,8 +189,8 @@ def __build_tag_map(self, columns: Dict[str, Any]) -> tag_map_module.TagMap:
for key, value in cols.items():
if key not in self.spec.columns:
logging.warning(
f"Key {key} is not a valid column for metric {self.sepc.name}. "
"Ignoring."
f"Key {key} is not a valid column for metric "
f"{self.spec.name}. Ignoring."
)
continue
tag.insert(key, value)
Expand Down Expand Up @@ -379,7 +378,7 @@ def create_metrics(
self,
specs: List[MetricSpec],
mode: MetricUseMode = MetricUseMode.HARD_FAIL,
global_columns: Dict[str, Any] = None,
global_columns: Optional[Dict[str, Any]] = None,
) -> bool:
"""Creates a list of metrics based on the specifications.
Expand Down Expand Up @@ -433,7 +432,7 @@ def create_metrics(

return True

def record(self, values: Dict[str, Any], columns: Dict[str, Any] = None):
def record(self, values: Dict[str, Any], columns: Optional[Dict[str, Any]] = None):
"""Records the given values to the metrics.
In this case the values for all metrics are recorded under the same tag-map
Expand Down Expand Up @@ -474,7 +473,7 @@ def connect_insights(
self,
instrumentation_key: str,
callback: Callable[[metrics_exporter.Envelope], bool],
enable_standard_metrics: bool = None,
enable_standard_metrics: Optional[bool] = None,
flush_sec: float = 15,
):
"""Connects the metrics to Azure Insights.
Expand Down
1 change: 1 addition & 0 deletions functown/serialization/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

try:
from .protobuf import ProtobufResponse, ProtobufRequest
from .hybrid import HybridProtoResponse
except ImportError:
logging.warning("Unable to load protobuf, please install `functown[protobuf]`")

Expand Down
4 changes: 2 additions & 2 deletions functown/serialization/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"""

from abc import abstractmethod
from typing import Any, Dict, Tuple, Union
from typing import Any, Dict, Tuple, Union, Optional

from azure.functions import HttpResponse, HttpRequest

Expand All @@ -25,7 +25,7 @@ class SerializationDecorator(BaseDecorator):
def __init__(
self,
func=None,
headers: Dict[str, str] = None,
headers: Optional[Dict[str, str]] = None,
status_code: int = 200,
**kwargs
):
Expand Down
139 changes: 139 additions & 0 deletions functown/serialization/hybrid.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
"""Converts input as either json or protobuf
Note: This does not include deserialize as Protobuf can handle that.
Copyright (c) 2023, Felix Geilert
"""

import typing as tp
import logging
import json

from azure.functions import HttpRequest

from functown.args import ContentTypes, RequestArgHandler, HeaderEnum
from google.protobuf import json_format

from .base import SerializationDecorator


class HybridProtoResponse(SerializationDecorator):
"""Provides a protobuf serialized response for an Azure Function.
Args:
func (Callable): The function to decorate.
pb_class (Any): The protobuf class to use for serialization. If
provided, will perform a hard type check on the response.
Defaults to `None`.
headers (Dict[str, str]): The headers to add to the response.
status_code (int): The status code of the response.
allow_json (bool): Whether to allow dict or list objects as input. In
this case the object will automatically be converted into the
regarding protobuf class. (Note that this strictly requires pb_class
to be provided). Defaults to `None` (True if pb_class provided).
json_all_fields (bool): Whether to include all fields in the JSON
response. Defaults to `True`.
Example:
>>> @HybridResponse
... def main(req: HttpRequest) -> Dict[str, str]:
... pb = pb2.Example()
... return pb
>>> @HybridResponse(headers={"X-My-Header": "My-Value"})
... def main(req: HttpRequest) -> Dict[str, str]:
... pb = pb2.Example()
... return pb
"""

def __init__(
self,
func=None,
pb_class: tp.Any = None,
headers: tp.Optional[dict[str, str]] = None,
status_code: int = 200,
allow_json: tp.Optional[bool] = None,
json_all_fields: bool = True,
**kwargs,
):
super().__init__(func, headers, status_code, **kwargs)
self._pb_class = pb_class
self._json_all = json_all_fields
self._allow_json = (
allow_json if allow_json is not None else pb_class is not None
)

def serialize(
self, req: HttpRequest, res: tp.Any, *args, **kwargs
) -> tuple[tp.Union[bytes, str], str]:
# FEAT: integrate async support [LIN:MED-568]
# check for request header
mime_raw = RequestArgHandler(req).get_header(
HeaderEnum.CONTENT_TYPE, required=False
)
mime = (
mime_raw.split(";")[0].lower()
if mime_raw is not None and isinstance(mime_raw, str)
else None
)

# check for response type
use_json = False
if mime == ContentTypes.JSON.value.lower():
use_json = True
elif mime != ContentTypes.BINARY.value.lower():
logging.warning(f"Unknown mime type '{mime}', defaulting to binary/json")

# check if already serialized
if isinstance(res, bytes):
if use_json is False:
return res, ContentTypes.BINARY
# convert from serialized data to protobuf
res = self._pb_class.FromString(res)

# perform type check (if requested)
if self._pb_class is not None:
# check if json or list and convert
if self._allow_json is True and isinstance(res, (list, dict)):
res = json_format.ParseDict(res, self._pb_class())
elif not isinstance(res, self._pb_class):
raise ValueError(f"Response is not of type {self._pb_class.__name__}")
elif (isinstance(res, dict) or isinstance(res, list)) and (
use_json is True or mime != ContentTypes.BINARY.value.lower()
):
# NOTE: return json if pb_class is not provided and not explicit
# request as cast
logging.warning("Auto-Casting to JSON response")
return json.dumps(res), ContentTypes.JSON

# check for SerializeToString method
if not hasattr(res, "SerializeToString") and use_json is False:
raise ValueError(
f"Response ({type(res)}) does not have a " "SerializeToString method"
)

# execute correct response
if use_json is True:
# DEBT: sub-objects are skipped instead of null values [LIN:MED-391]
# dict = json_format.MessageToDict(
# res,
# # NOTE: this is helpful for gql parsing
# # (avoid resolver errors)
# including_default_value_fields=self._json_all,
# # NOTE: ensure consistency between proto and gql schema
# preserving_proto_field_name=True,
# )
# iterate all fields in type and add missing null fields

return (
json_format.MessageToJson(
res,
# NOTE: this is helpful for gql parsing
# (avoid resolver errors)
including_default_value_fields=self._json_all,
# NOTE: ensure consistency between proto and gql schema
preserving_proto_field_name=True,
),
ContentTypes.JSON,
)
return res.SerializeToString(), ContentTypes.BINARY
3 changes: 2 additions & 1 deletion functown/utils/base_decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
Copyright (c) 2023, Felix Geilert
"""


import logging
from inspect import Parameter, Signature, signature
import threading
Expand All @@ -15,6 +14,8 @@
# defines type used for ids
IDTYPE = str

# FIXME: allow to be async


class BaseDecorator(object):
"""Base Decorator class.
Expand Down
3 changes: 2 additions & 1 deletion requirements-test.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@ pytest
pytest-cov
pytest-mock
pytest-xdist
pytest-forked
pytest-forked
pytest-asyncio
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@


__status__ = "Package"
__copyright__ = "Copyright 2023"
__copyright__ = "Copyright 2024"
__license__ = "MIT License"
__author__ = "Felix Geilert"

Expand Down
19 changes: 19 additions & 0 deletions tests/auth/test_auth_decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,22 @@ def test_func(request: HttpRequest, token: Token, **kwargs):

with pytest.raises(TokenError):
test_func(token_request)


@pytest.mark.asyncio
async def test_auth_decorator_async(token_request: HttpRequest):
@AuthHandler(["test.bar"])
async def test_func(request: HttpRequest, token: Token, **kwargs):
# assert
assert type(token) == Token
assert token.user_id == "test"

await test_func(token_request)

# test failure case
@AuthHandler(["test.baz"])
async def test_func(request: HttpRequest, token: Token, **kwargs):
pass

with pytest.raises(TokenError):
await test_func(token_request)
3 changes: 1 addition & 2 deletions tests/insights/test_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
Copyright (c) 2023, Felix Geilert
"""


import pytest
import time

Expand Down Expand Up @@ -105,7 +104,7 @@ def test_metric_counter(name, cols, mtype, dtype, start, values, expected_data):
)

# create a metric
handler = MetricHandler()
handler = MetricHandler(add_name_column=True)
handler.create_metrics([spec])
metric = handler[name]

Expand Down
Loading

0 comments on commit 6a39141

Please sign in to comment.