Skip to content

Commit

Permalink
Use ascii char for tolerance; remove redundant forward calls in memor…
Browse files Browse the repository at this point in the history
…y_encoder; fix test_with_tolerance function
  • Loading branch information
parthchadha committed Nov 27, 2024
1 parent 4c7b3e3 commit f1eb3e3
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 32 deletions.
6 changes: 3 additions & 3 deletions tripy/examples/segment-anything-model-v2/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ This is an implementation of SAM2 model ([original repository](https://github.co
<!-- Tripy: TEST: EXPECTED_STDOUT Start -->
<!--
```
Generating image embedding took {137.81±10%} ms per run (averaged over 100 runs, with 5 warmup runs)
Predicting masks took {37.78±10%} ms per run (averaged over 100 runs, with 5 warmup runs)
Scores for each prediction: {0.78759766±5%} {0.640625±5%} {0.05099487±5%}
Generating image embedding took {137.81~10%} ms per run (averaged over 100 runs, with 5 warmup runs)
Predicting masks took {37.78~10%} ms per run (averaged over 100 runs, with 5 warmup runs)
Scores for each prediction: {0.78759766~5%} {0.640625~5%} {0.05099487~5%}
```
-->
<!-- Tripy: TEST: EXPECTED_STDOUT End -->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,12 +74,8 @@ def __init__(
self.encoder.append(tp.Conv(mask_out_chans, embed_dim, kernel_dims=(1, 1), dtype=self.dtype))

def __call__(self, x):
return self.forward(x)

def forward(self, x):
for l in self.encoder:
x = l(x)

return x


Expand Down Expand Up @@ -117,9 +113,6 @@ def __init__(
self.drop_path = Dummy()

def __call__(self, x):
return self.forward(x)

def forward(self, x):
input = x
x = self.dwconv(x)
x = self.norm(x)
Expand All @@ -146,9 +139,6 @@ def __init__(self, layer, num_layers, dim=None, input_projection=False, dtype="f
self.proj = tp.Conv(dim, dim, kernel_dims=(1, 1), dtype=self.dtype)

def __call__(self, x):
return self.forward(x)

def forward(self, x):
x = self.proj(x)
for layer in self.layers:
x = layer(x)
Expand All @@ -175,14 +165,6 @@ def __call__(
pix_feat: tp.Tensor,
masks: tp.Tensor,
skip_mask_sigmoid: bool = False,
):
return self.forward(pix_feat, masks, skip_mask_sigmoid)

def forward(
self,
pix_feat: tp.Tensor,
masks: tp.Tensor,
skip_mask_sigmoid: bool = False,
) -> Tuple[tp.Tensor, tp.Tensor]:
if not skip_mask_sigmoid:
masks = tp.sigmoid(masks)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
# limitations under the License.

import copy
from typing import Optional
from typing import Optional, Callable

import torch
import torch.nn as nn
Expand Down Expand Up @@ -150,7 +150,7 @@ def __init__(
hidden_dim: int,
output_dim: int,
num_layers: int,
activation: tp.Module = tp.relu,
activation: Callable[[tp.Tensor], tp.Tensor] = tp.relu,
sigmoid_output: bool = False,
dtype=tp.float32,
) -> None:
Expand Down
12 changes: 3 additions & 9 deletions tripy/tests/test_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,18 +87,12 @@ def __str__(self):
@pytest.mark.parametrize("example", EXAMPLES, ids=lambda case: str(case))
def test_examples(example, sandboxed_install_run):

def test_with_tolerance(number, value, tolerance):
tolerance = float(tolerance) / 100
lower = float(number) * (1 - tolerance)
upper = float(number) * (1 + tolerance)
try:
return lower <= float(value) <= upper
except ValueError:
return False
def test_with_tolerance(expected, actual, tolerance):
return (abs(float(actual) - float(expected)) / float(expected)) * 100 <= tolerance

def process_tolerances(expected_output):
specs = []
placeholder_regex = r"{(\d+\.?\d*)±(\d+)%}"
placeholder_regex = r"{(\d+\.?\d*)~(\d+)%}"
pattern = expected_output

# Replace tolerance patterns with more flexible capture group
Expand Down

0 comments on commit f1eb3e3

Please sign in to comment.