@@ -128,8 +128,8 @@ def forward(self, x: Tensor, **kwargs: Any) -> Tensor:
128128
129129 blocks : list [Tensor ] = []
130130
131- shape = self .raw_output_shape # target output shape
132- ndim = len (shape ) # #feature dimensions incl. batch
131+ shape = self .raw_output_shape # target output shape
132+ ndim = len (shape ) # #feature dimensions incl. batch
133133
134134 for feature_map in self :
135135 # 1. Evaluate (dense) features for the current sub-map.
@@ -152,7 +152,7 @@ def forward(self, x: Tensor, **kwargs: Any) -> Tensor:
152152 # only up to such a scaling).
153153 num_copies = prod (tile_shape )
154154 if num_copies > 1 :
155- block = block * (num_copies ** - 0.5 )
155+ block = block * (num_copies ** - 0.5 )
156156
157157 # ``multi_index`` inserts ``None`` (i.e. `None` in slice syntax)
158158 # so that broadcasting expands the tensor along the new axes
@@ -174,8 +174,8 @@ def forward(self, x: Tensor, **kwargs: Any) -> Tensor:
174174
175175 @property
176176 def raw_output_shape (self ) -> Size :
177- # If the container is empty (e.g. DirectSumFeatureMap([])), treat the
178- # output as 0-D until feature maps are added.
177+ # If the container is empty (e.g. DirectSumFeatureMap([])), treat the
178+ # output as 0-D until feature maps are added.
179179 if not self :
180180 return Size ([])
181181
0 commit comments