-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathhelper.py
39 lines (31 loc) · 1.26 KB
/
helper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import tensorflow as tf
def reduce_logsumexp(input_tensor, reduction_indices=1, keep_dims=False):
"""Computes the sum of elements across dimensions of a tensor in log domain.
It uses a similar API to tf.reduce_sum.
Args:
input_tensor: The tensor to reduce. Should have numeric type.
reduction_indices: The dimensions to reduce.
keep_dims: If true, retains reduced dimensions with length 1.
Returns:
The reduced tensor.
"""
max_input_tensor1 = tf.reduce_max(
input_tensor, reduction_indices, keep_dims=keep_dims)
max_input_tensor2 = max_input_tensor1
if not keep_dims:
max_input_tensor2 = tf.expand_dims(max_input_tensor2, reduction_indices)
return tf.log(
tf.reduce_sum(
tf.exp(input_tensor - max_input_tensor2),
reduction_indices,
keep_dims=keep_dims)) + max_input_tensor1
def logsoftmax(input_tensor):
"""Computes normal softmax nonlinearity in log domain.
It can be used to normalize log probability.
The softmax is always computed along the second dimension of the input Tensor.
Args:
input_tensor: Unnormalized log probability.
Returns:
normalized log probability.
"""
return input_tensor - reduce_logsumexp(input_tensor, reduction_indices=0, keep_dims=True)