-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add efficientnet family including efficient, efficientlite and effici…
…entv2 (#4) * Fix tf pretrained model * Code cleanup * Add comments * Add draft efficientnet * Add unit test for efficientnet * Add efficientnet lite * Improve unit tests * Merge efficientnet lite into efficientnet * Add efficientnet v2
- Loading branch information
1 parent
1186201
commit f55d5ac
Showing
16 changed files
with
1,880 additions
and
101 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,15 +1,18 @@ | ||
from absl.testing import parameterized | ||
from keras import random | ||
from keras.src import testing | ||
|
||
from kimm.layers.attention import Attention | ||
|
||
|
||
class AttentionTest(testing.TestCase, parameterized.TestCase): | ||
def test_attention(self): | ||
x = random.uniform([1, 197, 768]) | ||
layer = Attention(768) | ||
|
||
y = layer(x) | ||
|
||
self.assertEqual(y.shape, [1, 197, 768]) | ||
def test_attention_basic(self): | ||
self.run_layer_test( | ||
Attention, | ||
init_kwargs={"hidden_dim": 20, "num_heads": 2}, | ||
input_shape=(1, 10, 20), | ||
expected_output_shape=(1, 10, 20), | ||
expected_num_trainable_weights=3, | ||
expected_num_non_trainable_weights=0, | ||
expected_num_losses=0, | ||
supports_masking=False, | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,15 +1,18 @@ | ||
from absl.testing import parameterized | ||
from keras import random | ||
from keras.src import testing | ||
|
||
from kimm.layers.layer_scale import LayerScale | ||
|
||
|
||
class LayerScaleTest(testing.TestCase, parameterized.TestCase): | ||
def test_layer_scale(self): | ||
x = random.uniform([1, 123]) | ||
layer = LayerScale(123) | ||
|
||
y = layer(x) | ||
|
||
self.assertEqual(y.shape, [1, 123]) | ||
def test_layer_scale_basic(self): | ||
self.run_layer_test( | ||
LayerScale, | ||
init_kwargs={"hidden_size": 10}, | ||
input_shape=(1, 10), | ||
expected_output_shape=(1, 10), | ||
expected_num_trainable_weights=1, | ||
expected_num_non_trainable_weights=0, | ||
expected_num_losses=0, | ||
supports_masking=False, | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,15 +1,26 @@ | ||
from absl.testing import parameterized | ||
from keras import random | ||
from keras import layers | ||
from keras.src import testing | ||
|
||
from kimm.layers.position_embedding import PositionEmbedding | ||
|
||
|
||
class PositionEmbeddingTest(testing.TestCase, parameterized.TestCase): | ||
def test_position_embedding(self): | ||
x = random.uniform([1, 123, 768]) | ||
layer = PositionEmbedding() | ||
def test_position_embedding_basic(self): | ||
self.run_layer_test( | ||
PositionEmbedding, | ||
init_kwargs={}, | ||
input_shape=(1, 10, 10), | ||
expected_output_shape=(1, 11, 10), | ||
expected_num_trainable_weights=2, | ||
expected_num_non_trainable_weights=0, | ||
expected_num_losses=0, | ||
supports_masking=False, | ||
) | ||
|
||
y = layer(x) | ||
|
||
self.assertEqual(y.shape, [1, 124, 768]) | ||
def test_position_embedding_invalid_input_shape(self): | ||
inputs = layers.Input([3]) | ||
with self.assertRaisesRegex( | ||
ValueError, "PositionEmbedding only accepts 3-dimensional input." | ||
): | ||
PositionEmbedding()(inputs) |
Oops, something went wrong.