Skip to content

Commit

Permalink
Merge pull request #1336 from goodfeli/thoughtididthis
Browse files Browse the repository at this point in the history
added arg_of_sigmoid
  • Loading branch information
Ian Goodfellow committed Jan 8, 2015
2 parents 7b8bac8 + 43e43ca commit 8c34a73
Show file tree
Hide file tree
Showing 2 changed files with 71 additions and 9 deletions.
42 changes: 41 additions & 1 deletion pylearn2/expr/nnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,46 @@ def arg_of_softmax(Y_hat):
return z


def arg_of_sigmoid(Y_hat):
"""
Given the output of a call to theano.tensor.nnet.sigmoid,
returns the argument to the sigmoid (by tracing the Theano
graph).
Parameters
----------
Y_hat : Variable
T.nnet.sigmoid(Z)
Returns
-------
Z : Variable
The variable that was passed to T.nnet.sigmoid to create `Y_hat`.
Raises an error if `Y_hat` is not actually the output of a theano
sigmoid.
"""
assert hasattr(Y_hat, 'owner')
owner = Y_hat.owner
assert owner is not None
op = owner.op
if isinstance(op, Print):
assert len(owner.inputs) == 1
Y_hat, = owner.inputs
owner = Y_hat.owner
op = owner.op
success = False
if isinstance(op, T.Elemwise):
if isinstance(op.scalar_op, T.nnet.sigm.ScalarSigmoid):
success = True
if not success:
raise TypeError("Expected Y_hat to be the output of a sigmoid, "
"but it appears to be the output of " + str(op) +
" of type " + str(type(op)))
z, = owner.inputs
assert z.ndim == 2
return z


def kl(Y, Y_hat, batch_axis):
"""
Warning: This function expects a sigmoid nonlinearity in the
Expand Down Expand Up @@ -323,4 +363,4 @@ def compute_f1(precision, recall):
"""
f1 = (2. * precision * recall /
T.maximum(1, precision + recall))
return f1
return f1
38 changes: 30 additions & 8 deletions pylearn2/expr/tests/test_nnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,13 @@
from theano import tensor as T

from pylearn2.models.mlp import MLP, Sigmoid
from pylearn2.expr.nnet import arg_of_sigmoid
from pylearn2.expr.nnet import pseudoinverse_softmax_numpy
from pylearn2.expr.nnet import softmax_numpy
from pylearn2.expr.nnet import softmax_ratio
from pylearn2.expr.nnet import compute_recall
from pylearn2.expr.nnet import kl
from pylearn2.expr.nnet import elemwise_kl
from pylearn2.expr.nnet import elemwise_kl
from pylearn2.utils import sharedX


Expand Down Expand Up @@ -83,7 +84,7 @@ def test_kl():
"""
init_mode = theano.config.compute_test_value
theano.config.compute_test_value = 'raise'

try:
mlp = MLP(layers=[Sigmoid(dim=10, layer_name='Y', irange=0.1)],
nvis=10)
Expand All @@ -101,7 +102,7 @@ def test_kl():
np.testing.assert_raises(ValueError, kl, Y, Y_hat, 1)
Y.tag.test_value[2][3] = -0.1
np.testing.assert_raises(ValueError, kl, Y, Y_hat, 1)

finally:
theano.config.compute_test_value = init_mode

Expand All @@ -112,10 +113,10 @@ def test_elemwise_kl():
input.
"""
init_mode = theano.config.compute_test_value
theano.config.compute_test_value = 'raise'
theano.config.compute_test_value = 'raise'

try:
mlp = MLP(layers=[Sigmoid(dim=10, layer_name='Y', irange=0.1)],
mlp = MLP(layers=[Sigmoid(dim=10, layer_name='Y', irange=0.1)],
nvis=10)
X = mlp.get_input_space().make_theano_batch()
Y = mlp.get_output_space().make_theano_batch()
Expand All @@ -131,8 +132,29 @@ def test_elemwise_kl():
np.testing.assert_raises(ValueError, elemwise_kl, Y, Y_hat)
Y.tag.test_value[2][3] = -0.1
np.testing.assert_raises(ValueError, elemwise_kl, Y, Y_hat)

finally:
theano.config.compute_test_value = init_mode


def test_arg_of_sigmoid_good():
"""
Tests that arg_of_sigmoid works when given a good input.
"""

X = T.matrix()
Y = T.nnet.sigmoid(X)
Z = arg_of_sigmoid(Y)
assert X is Z

def test_arg_of_sigmoid_bad():
"""
Tests that arg_of_sigmoid raises an error when given a bad input.
"""

X = T.matrix()
Y = T.nnet.softmax(X)
try:
Z = arg_of_sigmoid(Y)
except TypeError:
return
assert False # Should have failed

0 comments on commit 8c34a73

Please sign in to comment.