Skip to content

Commit

Permalink
Fix test
Browse files Browse the repository at this point in the history
  • Loading branch information
vshampor committed Jan 10, 2025
1 parent 0553946 commit 40b0225
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 24 deletions.
2 changes: 2 additions & 0 deletions src/cpp/src/cache_eviction.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -313,6 +313,8 @@ namespace ov::genai {
size_t num_logical_blocks_before_eviction,
bool deltas_only) {

OPENVINO_ASSERT(num_logical_blocks_before_eviction <= m_rope_cos_lut.size(), "more blocks before eviction than possible by max_context_length");

std::vector<BlockRotationData> retval;
if (evicted_block_logical_indices.empty()) {
return retval;
Expand Down
30 changes: 6 additions & 24 deletions tests/python_tests/test_whisper_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,44 +320,26 @@ def test_max_new_tokens(model_descr, sample_from_dataset):


@pytest.mark.parametrize("model_descr", get_whisper_models_list(tiny_only=True))
@pytest.mark.parametrize("sample_from_dataset", [*get_fixture_params_for_n_whisper_dataset_samples(n=1, language="fr"),
*get_fixture_params_for_n_whisper_dataset_samples(n=1, language="de")], indirect=True)
@pytest.mark.parametrize("language", ["fr", "de"])
@pytest.mark.precommit
def test_language_mode(model_descr, sample_from_dataset):
assert genai_result.texts[0] == expected

genai_result = pipe.generate(sample_from_dataset)

assert genai_result.texts[0] != expected

config = pipe.get_generation_config()
config.max_new_tokens = 10
genai_result = pipe.generate(sample_from_dataset, config)
assert genai_result.texts[0] == expected


@pytest.mark.parametrize("model_descr", get_whisper_models_list(tiny_only=True))
@pytest.mark.parametrize("sample_from_dataset", get_fixture_params_for_n_whisper_dataset_samples(n=3, language="fr"), indirect=True)
@pytest.mark.precommit
def test_language_mode_fr(model_descr, sample_from_dataset):
model_id, path = model_descr
def test_language_mode(model_descr, language):
model_id, path, opt_pipe, pipe = read_whisper_model(model_descr)
samples, language = sample_from_dataset
sample = get_whisper_dataset(language, long_form=False)[0]

expected = opt_pipe(
samples[0], max_new_tokens=30, generate_kwargs={"language": language}
sample, max_new_tokens=30, generate_kwargs={"language": language}
)

genai_result = pipe.generate(
samples[0], max_new_tokens=30, language=f"<|{language}|>"
sample, max_new_tokens=30, language=f"<|{language}|>"
)

compare_results(expected, genai_result)

config = pipe.get_generation_config()
config.max_new_tokens = 30
config.language = f"<|{language}|>"
genai_result = pipe.generate(sample_from_dataset, config)
genai_result = pipe.generate(sample, config)

compare_results(expected, genai_result)

Expand Down

0 comments on commit 40b0225

Please sign in to comment.