From 5619626000c45315b847e043b064ca038ccf641c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Petr=20Van=C4=9Bk?= Date: Sat, 13 Jul 2024 20:34:36 +0200 Subject: [PATCH] Improve huge_patch test (#60) * Measure just the performance of the parser We don't want to measure time of test data preparation because it could be slow in some python interpreters, depending on how string concatenation is implemented. * Improve memory efficiency for test data preparation Data preparation for huge_patch test could be very slow because strings are immutable, each concatenation creates a new string and discards the old ones. New approach of data preparation is to concatenate large string from smaller parts with ''.join() method. This method reduces memory usage and enhances performance, because it minimizes the number of new string objects created. --- tests/test_patch.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/test_patch.py b/tests/test_patch.py index cbdaaec..577ee25 100644 --- a/tests/test_patch.py +++ b/tests/test_patch.py @@ -1426,8 +1426,7 @@ def test_svn_mixed_line_ends(self): self.assertEqual(results[0].header, expected_header) def test_huge_patch(self): - start_time = time.time() - text = """diff --git a/huge.file b/huge.file + text_parts = ["""diff --git a/huge.file b/huge.file index 0000000..1111111 100644 --- a/huge.file +++ a/huge.file @@ -1439,9 +1438,10 @@ def test_huge_patch(self): -44444444 +55555555 +66666666 -""" - for n in range(0, 1000000): - text += "+" + hex(n) + "\n" +"""] + text_parts.extend("+" + hex(n) + "\n" for n in range(0, 1000000)) + text = ''.join(text_parts) + start_time = time.time() result = list(wtp.patch.parse_patch(text)) self.assertEqual(1, len(result)) self.assertEqual(1000007, len(result[0].changes))