Skip to content

Commit

Permalink
Fix (learned_round): use of named tensor
Browse files Browse the repository at this point in the history
  • Loading branch information
Giuseppe5 committed Oct 3, 2023
1 parent a2d85ec commit 6d386d6
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 4 deletions.
4 changes: 2 additions & 2 deletions src/brevitas/nn/mixin/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,8 +172,8 @@ def unpack_input(self, inp: Union[Tensor, QuantTensor]):
cached_inp = _CachedIO(inp.detach(), self.cache_quant_io_metadata_only)
self._cached_inp = cached_inp
# Remove any naming metadata to avoid dowmstream errors
if not torch._C._get_tracing_state():
inp.value.rename_(None)
# if not torch._C._get_tracing_state():
# inp = QuantTensor(inp.value.rename(None), inp.scale, inp.zero_point, inp.bit_width, inp.signed, inp.training)
return inp

def pack_output(self, quant_output: QuantTensor):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,17 @@ def __init__(self, store_output: False):
self.output_store = None

def __call__(self, module, input_batch, output_batch):
input_batch = input_batch[0]

if hasattr(input_batch, 'names') and 'N' in input_batch.names:
batch_dim = input_batch.names.index('N')

input_batch.rename_(None)
input_batch = input_batch.transpose(0, batch_dim)
if self.store_output:
output_batch.rename_(None)
output_batch = output_batch.transpose(0, batch_dim)

if self.store_output:
self.output_store = output_batch
self.input_store = input_batch
Expand Down Expand Up @@ -183,9 +194,9 @@ def save_inp_out_data(
pass
if store_inp:
if keep_gpu:
cached[0].append(data_saver.input_store[0].detach())
cached[0].append(data_saver.input_store.detach())
else:
cached[0].append(data_saver.input_store[0].detach().cpu())
cached[0].append(data_saver.input_store.detach().cpu())
if store_out:
if keep_gpu:
cached[1].append(data_saver.output_store.detach())
Expand Down

0 comments on commit 6d386d6

Please sign in to comment.