From b66f3679a6c0e26c7efd347e9db8b0bd56f6b24e Mon Sep 17 00:00:00 2001 From: Vincent Tjeng Date: Sat, 25 Aug 2018 22:39:05 -0700 Subject: [PATCH] Use interval arithmetic always for first ReLU layer in example neural networks. --- src/utils/import_example_nets.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/utils/import_example_nets.jl b/src/utils/import_example_nets.jl index fbf84fed..34d58a1c 100644 --- a/src/utils/import_example_nets.jl +++ b/src/utils/import_example_nets.jl @@ -32,7 +32,7 @@ function get_example_network_params(name::String)::NeuralNet nn = Sequential([ Flatten(4), - fc1, ReLU(), + fc1, ReLU(interval_arithmetic), fc2, ReLU(), logits], name) return nn @@ -44,7 +44,7 @@ function get_example_network_params(name::String)::NeuralNet logits = get_matrix_params(param_dict, "logits", (100, 10)) nn = Sequential([ - conv1, ReLU(), + conv1, ReLU(interval_arithmetic), conv2, ReLU(), Flatten([1, 3, 2, 4]), fc1, ReLU(), @@ -57,7 +57,7 @@ function get_example_network_params(name::String)::NeuralNet nn = Sequential([ Flatten([1, 3, 2, 4]), - fc1, ReLU(), + fc1, ReLU(interval_arithmetic), logits], name) return nn else