From 3303a5c4ad89e92fcf672c4592cdff1db3352c62 Mon Sep 17 00:00:00 2001 From: Claudia Comito <39374113+ClaudiaComito@users.noreply.github.com> Date: Tue, 21 Jan 2025 05:39:49 +0100 Subject: [PATCH] Bug fix: printing non-distributed data (#1756) * make 1-proc print great again * fix tabs size * skip formatter on non-distr data * remove time import (cherry picked from commit 3082dd95af6345c38de9fe675e87d51a61266ea1) --- heat/core/printing.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/heat/core/printing.py b/heat/core/printing.py index e06db65c50..5f9f95218d 100644 --- a/heat/core/printing.py +++ b/heat/core/printing.py @@ -303,6 +303,9 @@ def _tensor_str(dndarray, indent: int) -> str: # to do so, we slice up the torch data and forward it to torch internal printing mechanism summarize = elements > get_printoptions()["threshold"] torch_data = _torch_data(dndarray, summarize) + if not dndarray.is_distributed(): + # let torch handle formatting on non-distributed data + # formatter gets too slow for even moderately large tensors + return torch._tensor_str._tensor_str(torch_data, indent) formatter = torch._tensor_str._Formatter(torch_data) - return torch._tensor_str._tensor_str_with_formatter(torch_data, indent, summarize, formatter)