Skip to content

Commit

Permalink
hb
Browse files Browse the repository at this point in the history
  • Loading branch information
MLRichter committed May 2, 2022
1 parent 044effb commit 45471a7
Showing 1 changed file with 179 additions and 0 deletions.
179 changes: 179 additions & 0 deletions models/efficientnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -637,6 +637,7 @@ def efficientnet_b5_perf3(*args, **kwargs):
kwargs.pop("pretrained")
model = _efficientnet("efficientnet_b5", width_mult=1.6, depth_mult=2.2, dropout=0.2, pretrained=False,
progress=True, inverted_residual_setting=inverted_residual_setting,
norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.01),
**kwargs)
model.name = "EfficentNetB5_Performance3"
return model
Expand All @@ -658,6 +659,8 @@ def efficientnet_b6_perf3(*args, **kwargs):
kwargs.pop("pretrained")
model = _efficientnet("efficientnet_b6", width_mult=1.8, depth_mult=2.6, dropout=0.2, pretrained=False,
progress=True, inverted_residual_setting=inverted_residual_setting,
norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.01),

**kwargs)
model.name = "EfficentNetB6_Performance3"
return model
Expand All @@ -679,6 +682,7 @@ def efficientnet_b7_perf3(*args, **kwargs):
kwargs.pop("pretrained")
model = _efficientnet("efficientnet_b7", width_mult=2.0, depth_mult=3.1, dropout=0.2, pretrained=False,
progress=True, inverted_residual_setting=inverted_residual_setting,
norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.01),
**kwargs)
model.name = "EfficentNetB7_Performance3"
return model
Expand Down Expand Up @@ -853,6 +857,93 @@ def efficientnet_b3_perf5(*args, **kwargs):
return model


@register_model
def efficientnet_b4_perf5(*args, **kwargs):
bneck_conf = partial(MBConvConfig, width_mult=1.4, depth_mult=1.8)
inverted_residual_setting = [
bneck_conf(1, 3, 1, 32, 16, 1),
bneck_conf(6, 3, 2, 16, 24, 2),
bneck_conf(6, 5, 2, 24, 40, 2),
bneck_conf(6, 3, 1, 40, 80, 3),
bneck_conf(6, 5, 2, 80, 112, 3),
bneck_conf(6, 5, 1, 112, 192, 4),
bneck_conf(6, 3, 1, 192, 320, 1),
]
if "pretrained" in kwargs:
kwargs.pop("pretrained")
model = _efficientnet("efficientnet_b4", width_mult=1.4, depth_mult=1.8, dropout=0.4, pretrained=False,
progress=True, inverted_residual_setting=inverted_residual_setting,
**kwargs)
model.name = "EfficentNetB4_Performance5"
return model


@register_model
def efficientnet_b5_perf5(*args, **kwargs):
bneck_conf = partial(MBConvConfig, width_mult=1.6, depth_mult=2.2)
inverted_residual_setting = [
bneck_conf(1, 3, 1, 32, 16, 1),
bneck_conf(6, 3, 2, 16, 24, 2),
bneck_conf(6, 5, 2, 24, 40, 2),
bneck_conf(6, 3, 1, 40, 80, 3),
bneck_conf(6, 5, 2, 80, 112, 3),
bneck_conf(6, 5, 1, 112, 192, 4),
bneck_conf(6, 3, 1, 192, 320, 1),
]
if "pretrained" in kwargs:
kwargs.pop("pretrained")
model = _efficientnet("efficientnet_b5", width_mult=1.6, depth_mult=2.2, dropout=0.4, pretrained=False,
progress=True, inverted_residual_setting=inverted_residual_setting,
norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.01),
**kwargs)
model.name = "EfficentNetB5_Performance5"
return model


@register_model
def efficientnet_b6_perf5(*args, **kwargs):
bneck_conf = partial(MBConvConfig, width_mult=1.8, depth_mult=2.6)
inverted_residual_setting = [
bneck_conf(1, 3, 1, 32, 16, 1),
bneck_conf(6, 3, 2, 16, 24, 2),
bneck_conf(6, 5, 2, 24, 40, 2),
bneck_conf(6, 3, 1, 40, 80, 3),
bneck_conf(6, 5, 2, 80, 112, 3),
bneck_conf(6, 5, 1, 112, 192, 4),
bneck_conf(6, 3, 1, 192, 320, 1),
]
if "pretrained" in kwargs:
kwargs.pop("pretrained")
model = _efficientnet("efficientnet_b5", width_mult=1.8, depth_mult=2.6, dropout=0.5, pretrained=False,
progress=True, inverted_residual_setting=inverted_residual_setting,
norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.01),
**kwargs)
model.name = "EfficentNetB5_Performance5"
return model


@register_model
def efficientnet_b7_perf5(*args, **kwargs):
bneck_conf = partial(MBConvConfig, width_mult=2.0, depth_mult=3.1)
inverted_residual_setting = [
bneck_conf(1, 3, 1, 32, 16, 1),
bneck_conf(6, 3, 2, 16, 24, 2),
bneck_conf(6, 5, 2, 24, 40, 2),
bneck_conf(6, 3, 1, 40, 80, 3),
bneck_conf(6, 5, 2, 80, 112, 3),
bneck_conf(6, 5, 1, 112, 192, 4),
bneck_conf(6, 3, 1, 192, 320, 1),
]
if "pretrained" in kwargs:
kwargs.pop("pretrained")
model = _efficientnet("efficientnet_b7", width_mult=2.0, depth_mult=3.1, dropout=0.5, pretrained=False,
progress=True, inverted_residual_setting=inverted_residual_setting,
norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.01),

**kwargs)
model.name = "EfficentNetB7_Performance5"
return model


@register_model
def efficientnet_b0_perf42(*args, **kwargs):
Expand Down Expand Up @@ -938,6 +1029,94 @@ def efficientnet_b3_perf42(*args, **kwargs):
return model


@register_model
def efficientnet_b4_perf42(*args, **kwargs):
bneck_conf = partial(MBConvConfig, width_mult=1.4, depth_mult=1.8)
inverted_residual_setting = [
bneck_conf(1, 3, 1, 32, 16, 1),
bneck_conf(6, 3, 2, 16, 24, 2),
bneck_conf(6, 5, 2, 24, 40, 2),
bneck_conf(6, 3, 2, 40, 80, 3),
bneck_conf(6, 5, 1, 80, 165, 7),
bneck_conf(6, 5, 2, 165, 320, 1),
#bneck_conf(6, 3, 1, 192, 320, 1),
]
if "pretrained" in kwargs:
kwargs.pop("pretrained")
model = _efficientnet("efficientnet_b3", width_mult=1.4, depth_mult=1.8, dropout=0.4, pretrained=False,
progress=True, inverted_residual_setting=inverted_residual_setting,
**kwargs)
model.name = "EfficentNetB2_Performance42"
return model


@register_model
def efficientnet_b5_perf42(*args, **kwargs):
bneck_conf = partial(MBConvConfig, width_mult=1.6, depth_mult=2.2)
inverted_residual_setting = [
bneck_conf(1, 3, 1, 32, 16, 1),
bneck_conf(6, 3, 2, 16, 24, 2),
bneck_conf(6, 5, 2, 24, 40, 2),
bneck_conf(6, 3, 2, 40, 80, 3),
bneck_conf(6, 5, 1, 80, 165, 7),
bneck_conf(6, 5, 2, 165, 320, 1),
#bneck_conf(6, 3, 1, 192, 320, 1),
]
if "pretrained" in kwargs:
kwargs.pop("pretrained")
model = _efficientnet("efficientnet_b3", width_mult=1.6, depth_mult=2.2, dropout=0.4, pretrained=False,
norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.01),
progress=True, inverted_residual_setting=inverted_residual_setting,
**kwargs)
model.name = "EfficentNetB2_Performance42"
return model


@register_model
def efficientnet_b6_perf42(*args, **kwargs):
bneck_conf = partial(MBConvConfig, width_mult=1.8, depth_mult=2.6)
inverted_residual_setting = [
bneck_conf(1, 3, 1, 32, 16, 1),
bneck_conf(6, 3, 2, 16, 24, 2),
bneck_conf(6, 5, 2, 24, 40, 2),
bneck_conf(6, 3, 2, 40, 80, 3),
bneck_conf(6, 5, 1, 80, 165, 7),
bneck_conf(6, 5, 2, 165, 320, 1),
#bneck_conf(6, 3, 1, 192, 320, 1),
]
if "pretrained" in kwargs:
kwargs.pop("pretrained")
model = _efficientnet("efficientnet_b3", width_mult=1.8, depth_mult=2.6, dropout=0.5, pretrained=False,
progress=True, inverted_residual_setting=inverted_residual_setting,
norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.01),
**kwargs)
model.name = "EfficentNetB2_Performance42"
return model


@register_model
def efficientnet_b7_perf42(*args, **kwargs):
bneck_conf = partial(MBConvConfig, width_mult=2.0, depth_mult=3.1)
inverted_residual_setting = [
bneck_conf(1, 3, 1, 32, 16, 1),
bneck_conf(6, 3, 2, 16, 24, 2),
bneck_conf(6, 5, 2, 24, 40, 2),
bneck_conf(6, 3, 2, 40, 80, 3),
bneck_conf(6, 5, 1, 80, 165, 7),
bneck_conf(6, 5, 2, 165, 320, 1),
#bneck_conf(6, 3, 1, 192, 320, 1),
]
if "pretrained" in kwargs:
kwargs.pop("pretrained")
model = _efficientnet("efficientnet_b3", width_mult=2.0, depth_mult=3.1, dropout=0.5, pretrained=False,
progress=True, inverted_residual_setting=inverted_residual_setting,
norm_layer=partial(nn.BatchNorm2d, eps=0.001, momentum=0.01),

**kwargs)
model.name = "EfficentNetB2_Performance42"
return model


@register_model
def efficientnet_b0_perf6(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> EfficientNet:
"""
Expand Down

0 comments on commit 45471a7

Please sign in to comment.