Skip to content

Commit

Permalink
Remove redundant test cases
Browse files Browse the repository at this point in the history
  • Loading branch information
ZixinYang committed Oct 26, 2023
1 parent c2f6ce4 commit 2c1926a
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 21 deletions.
4 changes: 2 additions & 2 deletions libs/langchain/langchain/llms/fireworks.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ class Fireworks(BaseLLM):
model: str = "accounts/fireworks/models/llama-v2-7b-chat"
model_kwargs: dict = Field(
default_factory=lambda: {
"temperature": 0.1,
"temperature": 0.7,
"max_tokens": 512,
"top_p": 0.9,
"top_p": 1,
}.copy()
)
fireworks_api_key: Optional[str] = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,16 +68,6 @@ def test_chat_fireworks_llm_output_contains_model_id() -> None:
assert llm_result.llm_output["model"] == chat.model


def test_chat_fireworks_llm_output_stop_words() -> None:
"""Test llm_output contains model_id."""
chat = ChatFireworks()
message = HumanMessage(content="Hello")
llm_result = chat.generate([[message]], stop=[","])
assert llm_result.llm_output is not None
assert llm_result.llm_output["model"] == chat.model
assert llm_result.generations[0][0].text[-1] == ","


def test_fireworks_invoke() -> None:
"""Tests chat completion with invoke"""
chat = ChatFireworks()
Expand Down
9 changes: 0 additions & 9 deletions libs/langchain/tests/integration_tests/llms/test_fireworks.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,15 +104,6 @@ def test_fireworks_multiple_prompts() -> None:
assert len(output.generations) == 2


def test_fireworks_stop_words() -> None:
"""Test completion with stop words."""
llm = Fireworks()
output = llm.generate(["How is the weather in New York today?"], stop=[","])
assert isinstance(output, LLMResult)
assert isinstance(output.generations, list)
assert output.generations[0][0].text[-1] == ","


def test_fireworks_streaming() -> None:
"""Test stream completion."""
llm = Fireworks()
Expand Down

0 comments on commit 2c1926a

Please sign in to comment.