From c4d9725480890854c818e85d9437f30b1564cfa3 Mon Sep 17 00:00:00 2001 From: Mannat Singh Date: Mon, 22 Mar 2021 15:44:52 -0700 Subject: [PATCH] Fix SiLU breakage in RegNets for PT < 1.7 (#725) Summary: Pull Request resolved: https://github.com/facebookresearch/ClassyVision/pull/725 SiLU is only available from PT >= 1.7 Now that our tests work fine, we can finally detect and fix issues like these Reviewed By: kazhang Differential Revision: D27236649 fbshipit-source-id: ed5bc81a40d1a21def13d81b83b181c55e86f6f5 --- classy_vision/models/regnet.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/classy_vision/models/regnet.py b/classy_vision/models/regnet.py index a939d5998..ad4309a06 100644 --- a/classy_vision/models/regnet.py +++ b/classy_vision/models/regnet.py @@ -447,17 +447,15 @@ class RegNet(ClassyModel): def __init__(self, params: RegNetParams): super().__init__() - if params.activation_type == ActivationType.SILU and get_torch_version() < [ - 1, - 7, - ]: - raise RuntimeError("SiLU activation is only supported since PyTorch 1.7") - + silu = None if get_torch_version() < [1, 7] else nn.SiLU() activation = { ActivationType.RELU: nn.ReLU(params.relu_in_place), - ActivationType.SILU: nn.SiLU(), + ActivationType.SILU: silu, }[params.activation_type] + if activation is None: + raise RuntimeError("SiLU activation is only supported since PyTorch 1.7") + # Ad hoc stem self.stem = { StemType.RES_STEM_CIFAR: ResStemCifar,