From 247a510c24339352f2cf6a5876f7195bd2b6bf8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Petr=20Van=C4=9Bk?= Date: Thu, 20 Jun 2024 15:02:29 +0200 Subject: [PATCH] Improve memory efficiency for test data preparation Data preparation for huge_patch test could be very slow because strings are immutable, each concatenation creates a new string and discards the old ones. New approach of data preparation is to concatenate large string from smaller parts with ''.join() method. This method reduces memory usage and enhances performance, because it minimizes the number of new string objects created. --- tests/test_patch.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_patch.py b/tests/test_patch.py index a9fb619..577ee25 100644 --- a/tests/test_patch.py +++ b/tests/test_patch.py @@ -1426,7 +1426,7 @@ def test_svn_mixed_line_ends(self): self.assertEqual(results[0].header, expected_header) def test_huge_patch(self): - text = """diff --git a/huge.file b/huge.file + text_parts = ["""diff --git a/huge.file b/huge.file index 0000000..1111111 100644 --- a/huge.file +++ a/huge.file @@ -1438,9 +1438,9 @@ def test_huge_patch(self): -44444444 +55555555 +66666666 -""" - for n in range(0, 1000000): - text += "+" + hex(n) + "\n" +"""] + text_parts.extend("+" + hex(n) + "\n" for n in range(0, 1000000)) + text = ''.join(text_parts) start_time = time.time() result = list(wtp.patch.parse_patch(text)) self.assertEqual(1, len(result))