From 51aebed7efa752b1d31b8253cc76a1d2d2b126d6 Mon Sep 17 00:00:00 2001 From: Peter Vieting Date: Thu, 8 Sep 2022 19:07:53 +0200 Subject: [PATCH] hybrid setup remove adapt_returnn_config_for_recog --- common/setups/rasr/hybrid_system.py | 48 +---------------------------- 1 file changed, 1 insertion(+), 47 deletions(-) diff --git a/common/setups/rasr/hybrid_system.py b/common/setups/rasr/hybrid_system.py index 8d3a5b254..5ee217106 100644 --- a/common/setups/rasr/hybrid_system.py +++ b/common/setups/rasr/hybrid_system.py @@ -107,52 +107,6 @@ def __init__( self.nn_checkpoints = {} # -------------------- Helpers -------------------- - @staticmethod - def adapt_returnn_config_for_recog(returnn_config: returnn.ReturnnConfig): - """ - Adapt a RETURNN config for recognition, e.g., remove loss and use log softmax activation in last layer - - :param ReturnnConfig returnn_config: - :rtype ReturnnConfig: - """ - assert isinstance(returnn_config, returnn.ReturnnConfig) - config = copy.deepcopy(returnn_config) - forward_output_layer = config.config.get("forward_output_layer", "output") - network = config.config.get("network") - for layer_name, layer in network.items(): - if layer.get("unit", None) in {"lstmp"}: - layer["unit"] = "nativelstm2" - if layer.get("target", None): - layer.pop("target") - layer.pop("loss", None) - layer.pop("loss_scale", None) - layer.pop("loss_opts", None) - if network[forward_output_layer]["class"] == "softmax": - network[forward_output_layer]["class"] = "linear" - network[forward_output_layer]["activation"] = "log_softmax" - elif network[forward_output_layer]["class"] == "linear": - if network[forward_output_layer]["activation"] == "softmax": - network[forward_output_layer]["activation"] = "log_softmax" - elif network[forward_output_layer]["activation"] == "sigmoid": - network[forward_output_layer]["activation"] = "log_sigmoid" - elif network[forward_output_layer]["activation"] == "exp": - network[forward_output_layer]["activation"] = None - elif network[forward_output_layer]["activation"] is None: - network[forward_output_layer]["activation"] = "log" - # target = 'classes' - if "cropped" in network: - if network["output"]["from"] == ["cropped"]: - network["output"]["from"] = "upsample" - network.pop("cropped") - if "lstm_bwd_1" in network: - network["lstm_bwd_1"]["from"] = "upsample" - network["lstm_fwd_1"]["from"] = "upsample" - if "lstm_fwd_1_no_init" in network: - network["lstm_bwd_1_no_init"]["from"] = "upsample" - network["lstm_fwd_1_no_init"]["from"] = "upsample" - - return config - @staticmethod def get_tf_flow( checkpoint_path: Union[Path, returnn.Checkpoint], @@ -492,7 +446,7 @@ def nn_recognition( native_lstm_job.add_alias("%s/compile_native_op" % name) graph_compile_job = returnn.CompileTFGraphJob( - self.adapt_returnn_config_for_recog(returnn_config), + returnn_config, returnn_root=self.returnn_root, returnn_python_exe=self.returnn_python_exe, )