diff --git a/test/test_api.py b/test/test_api.py index 0619f2e9..34aa0ae4 100755 --- a/test/test_api.py +++ b/test/test_api.py @@ -5,6 +5,8 @@ """ import os +from contextlib import redirect_stderr +from io import StringIO from unittest import main import click @@ -23,13 +25,14 @@ def test_call_align(self): # API accepts them too. langs = ("fra",) # make sure language can be an iterable, not just a list. with SoundSwallowerStub("t0b0d0p0s0w0:920:1520", "t0b0d0p0s1w0:1620:1690"): - (status, exception, log) = api.align( - self.data_dir / "ej-fra.txt", - self.data_dir / "ej-fra.m4a", - self.tempdir / "output", - langs, - output_formats=["html", "TextGrid", "srt"], - ) + with redirect_stderr(StringIO()): + (status, exception, log) = api.align( + self.data_dir / "ej-fra.txt", + self.data_dir / "ej-fra.m4a", + self.tempdir / "output", + langs, + output_formats=["html", "TextGrid", "srt"], + ) self.assertEqual(status, 0) self.assertTrue(exception is None) self.assertIn("Words () not present; tokenizing", log) @@ -53,16 +56,18 @@ def test_call_align(self): "Make sure the API call doesn't not modify my variables", ) - (status, exception, log) = api.align("", "", self.tempdir / "errors") + with redirect_stderr(StringIO()): + (status, exception, log) = api.align("", "", self.tempdir / "errors") self.assertNotEqual(status, 0) self.assertFalse(exception is None) def test_call_make_xml(self): - (status, exception, log) = api.make_xml( - self.data_dir / "ej-fra.txt", - self.tempdir / "prepared.readalong", - ("fra", "eng"), - ) + with redirect_stderr(StringIO()): + (status, exception, log) = api.make_xml( + self.data_dir / "ej-fra.txt", + self.tempdir / "prepared.readalong", + ("fra", "eng"), + ) self.assertEqual(status, 0) self.assertTrue(exception is None) self.assertIn("Wrote ", log) diff --git a/test/test_audio.py b/test/test_audio.py index 5814c413..5ab69359 100755 --- a/test/test_audio.py +++ b/test/test_audio.py @@ -41,9 +41,6 @@ def align(self, input_text_path, input_audio_path, output_path, flags): input_audio_path, output_path, ] + flags - LOGGER.info( - f"Aligning {input_text_path} and {input_audio_path}, outputting to {output_path}" - ) return run(args, capture_output=True, check=False, encoding="utf-8") def test_mute_section(self): diff --git a/test/test_dna_text.py b/test/test_dna_text.py index 44fabfa5..e01bc37d 100755 --- a/test/test_dna_text.py +++ b/test/test_dna_text.py @@ -2,6 +2,8 @@ """Test handling of DNA text in tokenization""" +from contextlib import redirect_stderr +from io import StringIO from unittest import main from basic_test_case import BasicTestCase @@ -23,7 +25,8 @@ def test_tok_all_words(self): Voici une deuxième phrase. """ xml = parse_xml(txt) - tokenized = tokenize_xml.tokenize_xml(xml) + with redirect_stderr(StringIO()): + tokenized = tokenize_xml.tokenize_xml(xml) as_txt = etree.tounicode(tokenized) # print(etree.tounicode(tokenized)) @@ -54,7 +57,8 @@ def test_tok_some_words(self): Un mot ou deux à exclure. """ xml = parse_xml(txt) - tokenized = tokenize_xml.tokenize_xml(xml) + with redirect_stderr(StringIO()): + tokenized = tokenize_xml.tokenize_xml(xml) as_txt = etree.tounicode(tokenized) # print('as_txt="' + as_txt +'"') @@ -96,7 +100,8 @@ def test_tok_div_p_s(self): """ xml = parse_xml(txt) - tokenized = tokenize_xml.tokenize_xml(xml) + with redirect_stderr(StringIO()): + tokenized = tokenize_xml.tokenize_xml(xml) as_txt = etree.tounicode(tokenized) # print('as_txt="' + as_txt +'"') @@ -143,7 +148,8 @@ def test_dna_word(self): txt = """Une exclude phrase.""" xml = parse_xml(txt) - tokenized = tokenize_xml.tokenize_xml(xml) + with redirect_stderr(StringIO()): + tokenized = tokenize_xml.tokenize_xml(xml) self.assertRaises(RuntimeError, add_ids, tokenized) def test_dna_word_nested(self): @@ -151,7 +157,8 @@ def test_dna_word_nested(self): txt = """Une exclude phrase.""" xml = parse_xml(txt) - tokenized = tokenize_xml.tokenize_xml(xml) + with redirect_stderr(StringIO()): + tokenized = tokenize_xml.tokenize_xml(xml) self.assertRaises(RuntimeError, add_ids, tokenized) diff --git a/test/test_g2p_cli.py b/test/test_g2p_cli.py index 830ce549..554a2be3 100755 --- a/test/test_g2p_cli.py +++ b/test/test_g2p_cli.py @@ -4,6 +4,8 @@ import os import re +from contextlib import redirect_stderr +from io import StringIO from unittest import main from basic_test_case import BasicTestCase @@ -303,8 +305,9 @@ def test_align_with_invalid_preg2p(self): self.assertIn('error', results.output) audio_file = os.path.join(self.data_dir, "ej-fra.m4a") - with self.assertRaises(RuntimeError) as e: - results = align_audio(input_file, audio_file) + with redirect_stderr(StringIO()): + with self.assertRaises(RuntimeError) as e: + results = align_audio(input_file, audio_file) self.assertIn("could not be g2p'd", str(e.exception)) def test_align_with_preg2p(self): @@ -330,9 +333,10 @@ def test_align_with_preg2p(self): "t0b0d0p0s3w2:15:16", "t0b0d0p0s3w3:16:17", ): - _ = align_audio( - text_file, audio_file, save_temps=os.path.join(self.tempdir, "foo") - ) + with redirect_stderr(StringIO()): + _ = align_audio( + text_file, audio_file, save_temps=os.path.join(self.tempdir, "foo") + ) with open(os.path.join(self.tempdir, "foo.dict"), "r", encoding="utf8") as f: dict_file = f.read() self.assertIn("S AH S IY", dict_file) # "ceci" in fra @@ -452,7 +456,8 @@ def test_convert_xml_invalid(self): self.assertTrue(valid, "convert_xml with valid pre-g2p'd text") xml = parse_xml('invalid') - c_xml, valid = convert_xml(xml) + with redirect_stderr(StringIO()): + c_xml, valid = convert_xml(xml) self.assertEqual( etree.tounicode(c_xml), 'invalid' ) diff --git a/test/test_misc.py b/test/test_misc.py index 9ce219ec..85df66d6 100755 --- a/test/test_misc.py +++ b/test/test_misc.py @@ -293,9 +293,9 @@ def test_capture_logs_some_more(self): with capture_logs() as captured_logs: LOGGER.info("this will be captured") self.assertIn("this will be captured", captured_logs.getvalue()) - with self.assertLogs(): + with self.assertLogs(LOGGER): LOGGER.info("blah") - with self.assertLogs() as cm: + with self.assertLogs(LOGGER) as cm: with capture_logs() as captured_logs: LOGGER.info("This text does not propagate to root") LOGGER.info("This text is included in root") diff --git a/test/test_tokenize_xml.py b/test/test_tokenize_xml.py index 675eee01..744ce499 100755 --- a/test/test_tokenize_xml.py +++ b/test/test_tokenize_xml.py @@ -2,6 +2,8 @@ """Unit test suite for our XML tokenizer module""" +from contextlib import redirect_stderr +from io import StringIO from unittest import TestCase, main from lxml import etree @@ -23,7 +25,8 @@ def test_simple(self): Kwei! Tan e ici matisihin? """ xml = parse_xml(txt) - tokenized = tokenize_xml.tokenize_xml(xml) + with redirect_stderr(StringIO()): + tokenized = tokenize_xml.tokenize_xml(xml) # print(etree.tounicode(tokenized)) self.assertEqual(etree.tounicode(tokenized), ref) @@ -39,7 +42,8 @@ def test_mixed_lang(self): Bonjour! Comment ça va? """ xml = parse_xml(txt) - tokenized = tokenize_xml.tokenize_xml(xml) + with redirect_stderr(StringIO()): + tokenized = tokenize_xml.tokenize_xml(xml) # print(etree.tounicode(tokenized)) self.assertEqual(etree.tounicode(tokenized), ref) @@ -55,7 +59,8 @@ def test_mixed_words(self): Tan e ici matisihin? """ xml = parse_xml(txt) - tokenized = tokenize_xml.tokenize_xml(xml) + with redirect_stderr(StringIO()): + tokenized = tokenize_xml.tokenize_xml(xml) # print(etree.tounicode(tokenized)) self.assertEqual(etree.tounicode(tokenized), ref) @@ -73,7 +78,8 @@ def test_comments(self): Tan e ici matisihin? """ xml = parse_xml(txt) - tokenized = tokenize_xml.tokenize_xml(xml) + with redirect_stderr(StringIO()): + tokenized = tokenize_xml.tokenize_xml(xml) # print(etree.tounicode(tokenized)) self.assertEqual(etree.tounicode(tokenized), ref) diff --git a/test/test_web_api.py b/test/test_web_api.py index 9d92e3f1..bcb39720 100755 --- a/test/test_web_api.py +++ b/test/test_web_api.py @@ -2,6 +2,8 @@ import os import re +from contextlib import redirect_stderr +from io import StringIO from textwrap import dedent from unittest import main @@ -46,17 +48,20 @@ def test_assemble_from_plain_text(self): "type": "text/plain", "text_languages": ["fra"], } - response = self.API_CLIENT.post("/api/v1/assemble", json=request) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post("/api/v1/assemble", json=request) self.assertEqual(response.status_code, 200) def test_bad_path(self): # Test a request to a path that doesn't exist - response = self.API_CLIENT.get("/pathdoesntexist") + with redirect_stderr(StringIO()): + response = self.API_CLIENT.get("/pathdoesntexist") self.assertEqual(response.status_code, 404) def test_bad_method(self): # Test a request to a valid path with a bad method - response = self.API_CLIENT.get("/api/v1/assemble") + with redirect_stderr(StringIO()): + response = self.API_CLIENT.get("/api/v1/assemble") self.assertEqual(response.status_code, 405) def test_assemble_from_xml(self): @@ -67,7 +72,8 @@ def test_assemble_from_xml(self): "type": "application/readalong+xml", "text_languages": ["fra"], } - response = self.API_CLIENT.post("/api/v1/assemble", json=request) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post("/api/v1/assemble", json=request) self.assertEqual(response.status_code, 200) def test_illformed_xml(self): @@ -77,7 +83,8 @@ def test_illformed_xml(self): "type": "application/readalong+xml", "text_languages": ["fra"], } - response = self.API_CLIENT.post("/api/v1/assemble", json=request) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post("/api/v1/assemble", json=request) self.assertEqual(response.status_code, 422) def test_invalid_ras(self): @@ -87,13 +94,15 @@ def test_invalid_ras(self): "type": "application/readalong+xml", "text_languages": ["fra"], } - response = self.API_CLIENT.post("/api/v1/assemble", json=request) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post("/api/v1/assemble", json=request) self.assertEqual(response.status_code, 422) def test_create_grammar(self): # Test the create grammar function parsed = parse_xml(self.slurp_data_file("ej-fra.readalong")) - tokenized = tokenize_xml(parsed) + with redirect_stderr(StringIO()): + tokenized = tokenize_xml(parsed) ids_added = add_ids(tokenized) g2ped, valid = convert_xml(ids_added) from readalongs.web_api import create_grammar @@ -110,7 +119,8 @@ def test_bad_g2p(self): "type": "text/plain", "text_languages": ["test"], } - response = self.API_CLIENT.post("/api/v1/assemble", json=request) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post("/api/v1/assemble", json=request) self.assertIn("No language called", response.json()["detail"]) self.assertEqual(response.status_code, 422) @@ -121,7 +131,8 @@ def test_g2p_faiture(self): "type": "text/plain", "text_languages": ["fra"], } - response = self.API_CLIENT.post("/api/v1/assemble", json=request) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post("/api/v1/assemble", json=request) self.assertEqual(response.status_code, 422) content = response.json() self.assertIn("No valid g2p conversion", content["detail"]) @@ -133,7 +144,8 @@ def test_no_words(self): "type": "text/plain", "text_languages": ["eng"], } - response = self.API_CLIENT.post("/api/v1/assemble", json=request) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post("/api/v1/assemble", json=request) self.assertEqual(response.status_code, 422) content = response.json() self.assertIn("Could not find any words", content["detail"]) @@ -146,7 +158,8 @@ def test_empty_g2p(self): "type": "text/plain", "text_languages": ["eng", "und"], } - response = self.API_CLIENT.post("/api/v1/assemble", json=request) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post("/api/v1/assemble", json=request) self.assertEqual(response.status_code, 422) content_log = response.json()["detail"] for message_part in ["The output of the g2p process", "24", "23", "is empty"]: @@ -154,7 +167,8 @@ def test_empty_g2p(self): def test_langs(self): # Test the langs endpoint - response = self.API_CLIENT.get("/api/v1/langs") + with redirect_stderr(StringIO()): + response = self.API_CLIENT.get("/api/v1/langs") codes = [x["code"] for x in response.json()] self.assertEqual(set(codes), set(get_langs()[0])) self.assertEqual(codes, list(sorted(codes))) @@ -170,7 +184,8 @@ def test_logs(self): "debug": True, "text_languages": ["fra", "und"], } - response = self.API_CLIENT.post("/api/v1/assemble", json=request) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post("/api/v1/assemble", json=request) content = response.json() # print("Content", content) self.assertIn('Could not g2p "ña" as French', content["log"]) @@ -183,7 +198,8 @@ def test_debug(self): "debug": True, "text_languages": ["fra"], } - response = self.API_CLIENT.post("/api/v1/assemble", json=request) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post("/api/v1/assemble", json=request) content = response.json() self.assertEqual(content["input"], request) self.assertGreater(len(content["tokenized"]), 10) @@ -196,7 +212,8 @@ def test_debug(self): "type": "text/plain", "text_languages": ["fra"], } - response = self.API_CLIENT.post("/api/v1/assemble", json=request) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post("/api/v1/assemble", json=request) content = response.json() self.assertIsNone(content["input"]) self.assertIsNone(content["tokenized"]) @@ -229,9 +246,10 @@ def test_convert_to_TextGrid(self): "dur": 83.1, "ras": self.hej_verden_xml, } - response = self.API_CLIENT.post( - "/api/v1/convert_alignment/textgrid", json=request - ) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post( + "/api/v1/convert_alignment/textgrid", json=request + ) self.assertEqual(response.status_code, 200) self.assertIn("aligned.TextGrid", response.headers["content-disposition"]) self.assertEqual( @@ -293,9 +311,10 @@ class = "IntervalTier" request = { "ras": self.hej_verden_xml, } - response = self.API_CLIENT.post( - "/api/v1/convert_alignment/textgrid", json=request - ) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post( + "/api/v1/convert_alignment/textgrid", json=request + ) self.assertEqual(response.status_code, 200) self.assertIn("aligned.TextGrid", response.headers["content-disposition"]) self.assertNotIn("xmax = 83.100000", response.text) @@ -305,7 +324,10 @@ def test_convert_to_eaf(self): "dur": 83.1, "ras": self.hej_verden_xml, } - response = self.API_CLIENT.post("/api/v1/convert_alignment/eaf", json=request) + with redirect_stderr(StringIO()): + response = self.API_CLIENT.post( + "/api/v1/convert_alignment/eaf", json=request + ) self.assertEqual(response.status_code, 200) self.assertIn("