-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
10 changed files
with
467 additions
and
14 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,53 @@ | ||
import asyncio | ||
from textwrap import dedent | ||
|
||
from wanga.models.messages import parse_messages | ||
from wanga.models.model import ToolParams | ||
from wanga.models.openai import OpenaAIModel | ||
from wanga.schema import default_schema_extractor | ||
|
||
|
||
def test_reply(): | ||
model = OpenaAIModel("gpt-3.5-turbo") | ||
prompt = r""" | ||
[|system|] | ||
You are a helpful assistant. | ||
[|user|] | ||
2 + 2 = ? | ||
""" | ||
prompt = dedent(prompt.removeprefix("\n")) | ||
|
||
messages = parse_messages(prompt) | ||
response = model.reply(messages) | ||
|
||
response_text = response.response_options[0].message.content | ||
assert isinstance(response_text, str) | ||
assert "4" in response_text | ||
|
||
async_response_text = asyncio.run(model.reply_async(messages), debug=True).response_options[0].message.content | ||
assert isinstance(async_response_text, str) | ||
assert "4" in async_response_text | ||
|
||
|
||
def test_context_size(): | ||
assert OpenaAIModel("gpt-4-turbo").context_length == 128000 | ||
assert OpenaAIModel("gpt-4").context_length == 8192 | ||
|
||
|
||
def test_num_tokens(): | ||
model = OpenaAIModel("gpt-3.5-turbo") | ||
prompt = r""" | ||
[|system|] | ||
You are a helpful assistant. | ||
[|user|] | ||
2 + 2 = ? | ||
""" | ||
|
||
def tool(x: int, y: str): | ||
pass | ||
|
||
tool_schema = default_schema_extractor.extract_schema(tool) | ||
prompt = dedent(prompt.removeprefix("\n")) | ||
messages = parse_messages(prompt) | ||
tools = ToolParams(tools=[tool_schema]) | ||
assert abs(model.estimate_num_tokens(messages, tools) - model.calculate_num_tokens(messages, tools)) < 2 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,38 @@ | ||
from typing import TypeAlias | ||
from importlib import import_module | ||
from types import ModuleType | ||
from typing import TYPE_CHECKING, TypeAlias | ||
|
||
__all__ = [ | ||
"JSON", | ||
"openai", | ||
"anthropic", | ||
] | ||
|
||
JSON: TypeAlias = str | float | int | bool | None | dict[str, "JSON"] | list["JSON"] | ||
|
||
|
||
class LazyModule(ModuleType): | ||
def __init__(self, name: str, extra: str, self_name: str): | ||
super().__init__(name) | ||
self.extra = extra | ||
self.self_name = self_name | ||
|
||
def __getattr__(self, name: str): | ||
try: | ||
module = import_module(self.__name__) | ||
except ImportError: | ||
module = import_module( | ||
f"Module {self.__name__} not installed, install {self.extra}" | ||
f"capability using `pip install {self.self_name}[{self.extra}]`" | ||
) | ||
return getattr(module, name) | ||
|
||
|
||
if TYPE_CHECKING: | ||
import anthropic | ||
import openai | ||
import openai_function_tokens | ||
else: | ||
openai: ModuleType = LazyModule("openai", "openai", "wanga") | ||
openai_function_tokens = LazyModule("openai_function_tokens", "openai", "wanga") | ||
anthropic: ModuleType = LazyModule("anthropic", "anthropic", "wanga") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.