Skip to content

Commit

Permalink
[Bugfix] Bump transformers to 4.43.2 (vllm-project#6752)
Browse files Browse the repository at this point in the history
  • Loading branch information
mgoin authored and phil committed Aug 6, 2024
1 parent 0f8250d commit 3144a11
Showing 1 changed file with 28 additions and 27 deletions.
55 changes: 28 additions & 27 deletions tests/test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,9 @@ def test_get_sliding_window():


def test_rope_customization():
TEST_ROPE_SCALING = {"rope_type": "dynamic", "factor": 2.0}
TEST_ROPE_SCALING = {"type": "dynamic", "factor": 2.0}
TEST_ROPE_THETA = 16_000_000.0
LONGCHAT_ROPE_SCALING = {"type": "linear", "factor": 8.0}

llama_model_config = ModelConfig(
"meta-llama/Meta-Llama-3-8B-Instruct",
Expand Down Expand Up @@ -95,29 +96,29 @@ def test_rope_customization():
None) == TEST_ROPE_THETA
assert llama_model_config.max_model_len == 16384

# TODO: add these back when the rope configs are fixed
# LONGCHAT_ROPE_SCALING = {"rope_type": "linear", "factor": 8.0}
# longchat_model_config = ModelConfig(
# "lmsys/longchat-13b-16k",
# "lmsys/longchat-13b-16k",
# tokenizer_mode="auto",
# trust_remote_code=False,
# dtype="float16",
# seed=0,
# )
# assert getattr(longchat_model_config.hf_config, "rope_scaling",
# None) == LONGCHAT_ROPE_SCALING
# assert longchat_model_config.max_model_len == 16384

# longchat_model_config = ModelConfig(
# "lmsys/longchat-13b-16k",
# "lmsys/longchat-13b-16k",
# tokenizer_mode="auto",
# trust_remote_code=False,
# dtype="float16",
# seed=0,
# rope_scaling=TEST_ROPE_SCALING,
# )
# assert getattr(longchat_model_config.hf_config, "rope_scaling",
# None) == TEST_ROPE_SCALING
# assert longchat_model_config.max_model_len == 4096
longchat_model_config = ModelConfig(
"lmsys/longchat-13b-16k",
"lmsys/longchat-13b-16k",
tokenizer_mode="auto",
trust_remote_code=False,
dtype="float16",
seed=0,
)
# Check if LONGCHAT_ROPE_SCALING entries are in longchat_model_config
assert all(
longchat_model_config.hf_config.rope_scaling.get(key) == value
for key, value in LONGCHAT_ROPE_SCALING.items())
assert longchat_model_config.max_model_len == 16384

longchat_model_config = ModelConfig(
"lmsys/longchat-13b-16k",
"lmsys/longchat-13b-16k",
tokenizer_mode="auto",
trust_remote_code=False,
dtype="float16",
seed=0,
rope_scaling=TEST_ROPE_SCALING,
)
assert getattr(longchat_model_config.hf_config, "rope_scaling",
None) == TEST_ROPE_SCALING
assert longchat_model_config.max_model_len == 4096

0 comments on commit 3144a11

Please sign in to comment.