Skip to content

Commit

Permalink
[CI/Build] Check transformers v4.47 (vllm-project#10991)
Browse files Browse the repository at this point in the history
Signed-off-by: DarkLight1337 <[email protected]>
  • Loading branch information
DarkLight1337 authored and weilong.yu committed Dec 13, 2024
1 parent 86ef9e6 commit e6e054d
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 12 deletions.
4 changes: 2 additions & 2 deletions requirements-test.txt
Original file line number Diff line number Diff line change
Expand Up @@ -506,7 +506,7 @@ tiktoken==0.7.0
# mistral-common
timm==1.0.11
# via -r requirements-test.in
tokenizers==0.20.3
tokenizers==0.21.0
# via transformers
torch==2.5.1
# via
Expand Down Expand Up @@ -534,7 +534,7 @@ tqdm==4.66.6
# transformers
tqdm-multiprocess==0.0.11
# via lm-eval
transformers==4.46.3
transformers==4.47.0
# via
# lm-eval
# peft
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

import pytest
import torch
import transformers
from transformers import AutoImageProcessor, AutoTokenizer

from vllm.inputs import InputContext, token_inputs
Expand Down Expand Up @@ -36,8 +35,6 @@ def get_max_idefics3_image_tokens():
return get_max_idefics3_image_tokens


@pytest.mark.skipif(transformers.__version__ < "4.46.0",
reason="Model introduced in HF >= 4.46.0")
@pytest.mark.parametrize("model", models)
@pytest.mark.parametrize("longest_edge", [None, 168, 336, 400, 2 * 336])
def test_input_mapper_override(model: str, image_assets: _ImageAssets,
Expand Down Expand Up @@ -77,8 +74,6 @@ def test_input_mapper_override(model: str, image_assets: _ImageAssets,
assert torch.all(hf_result["pixel_values"] == vllm_result["pixel_values"])


@pytest.mark.skipif(transformers.__version__ < "4.46.0",
reason="Model introduced in HF >= 4.46.0")
@pytest.mark.parametrize("model", models)
@pytest.mark.parametrize("longest_edge, expected_max_tokens", [
(None, 2873),
Expand Down Expand Up @@ -107,8 +102,6 @@ def test_max_tokens_override(get_max_idefics3_image_tokens, model: str,
assert expected_max_tokens == actual_max_tokens


@pytest.mark.skipif(transformers.__version__ < "4.46.0",
reason="Model introduced in HF >= 4.46.0")
@pytest.mark.parametrize("model", models)
@pytest.mark.parametrize("longest_edge, toks_per_img, num_imgs", [
(168, 169, 1),
Expand Down Expand Up @@ -143,8 +136,6 @@ def test_dummy_data_override(dummy_data_for_idefics3, model: str,
assert img_tok_count == toks_per_img * num_imgs


@pytest.mark.skipif(transformers.__version__ < "4.46.0",
reason="Model introduced in HF >= 4.46.0")
@pytest.mark.parametrize("model", models)
@pytest.mark.parametrize("longest_edge,expected_toks_per_img,num_imgs", [
(336, 169 * (1**2 + 1), 1),
Expand Down
2 changes: 1 addition & 1 deletion tests/models/embedding/vision_language/test_llava_next.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def _run_test(
)


@pytest.mark.skipif(transformers.__version__.startswith("4.46"),
@pytest.mark.skipif(transformers.__version__ >= "4.46",
reason="Model broken with changes in transformers 4.46")
@pytest.mark.core_model
@pytest.mark.parametrize("model", MODELS)
Expand Down

0 comments on commit e6e054d

Please sign in to comment.