From 2b6b8ac258cb75369ba590fd37a6c7a2c054f88c Mon Sep 17 00:00:00 2001 From: Helen Ngo Date: Thu, 5 Dec 2024 14:14:23 -0800 Subject: [PATCH] ADLR/megatron-lm!2425 - Fix test after new inference default added --- tests/unit_tests/test_inference.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit_tests/test_inference.py b/tests/unit_tests/test_inference.py index bf70bf298f..140b30125c 100644 --- a/tests/unit_tests/test_inference.py +++ b/tests/unit_tests/test_inference.py @@ -53,8 +53,6 @@ def client(app): @unittest.mock.patch('megatron.inference.text_generation.communication.mpu') @unittest.mock.patch('megatron.inference.text_generation.generation.ForwardStep') @unittest.mock.patch('megatron.inference.text_generation.tokenization.get_tokenizer') -@pytest.mark.flaky -@pytest.mark.flaky_in_dev def test_completions( mock_get_tokenizer1, mock_forward_step, @@ -70,7 +68,9 @@ def test_completions( Utils.initialize_distributed() # set up the mocks - args = argparse.Namespace(max_position_embeddings=1024, max_tokens_to_oom=1_000_000) + args = argparse.Namespace( + max_position_embeddings=1024, max_tokens_to_oom=1_000_000, inference_max_seq_length=1024 + ) mock_get_args_1.return_value = args mock_get_tokenizer1.return_value = gpt2_tiktoken_tokenizer mock_get_tokenizer2.return_value = gpt2_tiktoken_tokenizer