-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathops.py
83 lines (64 loc) · 3.15 KB
/
ops.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
import tensorflow as tf
def weight_variable(name, shape):
"""Create a weight variable with appropriate initialization."""
initer = tf.truncated_normal_initializer(stddev=0.01)
return tf.get_variable('W_' + name, dtype=tf.float32,
shape=shape, initializer=initer)
def bias_variable(name, shape):
"""Create a bias variable with appropriate initialization."""
initial = tf.constant(0., shape=shape, dtype=tf.float32)
return tf.get_variable('b_' + name, dtype=tf.float32,
initializer=initial)
def fc_layer(bottom, out_dim, name,
add_reg=False,
nonlinearity=None,
batch_normalize=False,
is_train=True):
"""Create a fully connected layer"""
in_dim = bottom.get_shape()[1]
with tf.variable_scope(name):
weights = weight_variable(name, shape=[in_dim, out_dim])
tf.summary.histogram('histogram', weights)
biases = bias_variable(name, [out_dim])
layer = tf.matmul(bottom, weights)
layer += biases
if batch_normalize:
layer = batch_norm(layer, is_train)
if nonlinearity == 'relu':
layer = tf.nn.relu(layer)
elif nonlinearity == 'sigmoid':
layer = tf.nn.sigmoid(layer)
if add_reg:
tf.add_to_collection('weights', weights)
return layer
def flatten_layer(layer):
with tf.variable_scope('Flatten_layer'):
layer_shape = layer.get_shape()
num_features = layer_shape[1:4].num_elements()
layer_flat = tf.reshape(layer, [-1, num_features])
return layer_flat
def lrn(x, radius, alpha, beta, name, bias=1.0):
"""Create a local response normalization layer."""
return tf.nn.local_response_normalization(x, depth_radius=radius,
alpha=alpha, beta=beta,
bias=bias, name=name)
def batch_norm(inputs, is_training, decay=0.999, epsilon=1e-3):
"""Perform batch normalization on convolution or fully connected layer."""
scale = tf.Variable(tf.ones([inputs.get_shape()[-1]]))
beta = tf.Variable(tf.zeros([inputs.get_shape()[-1]]))
pop_mean = tf.Variable(tf.zeros([inputs.get_shape()[-1]]), trainable=False)
pop_var = tf.Variable(tf.ones([inputs.get_shape()[-1]]), trainable=False)
if is_training:
if len(inputs.get_shape().as_list()) == 4: # For convolutional layers
batch_mean, batch_var = tf.nn.moments(inputs, [0, 1, 2])
else: # For fully-connected layers
batch_mean, batch_var = tf.nn.moments(inputs, [0])
train_mean = tf.assign(pop_mean, pop_mean * decay + batch_mean * (1 - decay))
train_var = tf.assign(pop_var, pop_var * decay + batch_var * (1 - decay))
with tf.control_dependencies([train_mean, train_var]):
return tf.nn.batch_normalization(inputs, batch_mean, batch_var, beta, scale, epsilon)
else:
return tf.nn.batch_normalization(inputs, pop_mean, pop_var, beta, scale, epsilon)
def dropout(x, keep_prob):
"""Create a dropout layer."""
return tf.nn.dropout(x, keep_prob)