Skip to content

Commit 03efd4a

Browse files
committed
bug fix
1 parent ad217db commit 03efd4a

File tree

4 files changed

+10
-7
lines changed

4 files changed

+10
-7
lines changed

.gitignore

+4-1
Original file line numberDiff line numberDiff line change
@@ -11,4 +11,7 @@
1111

1212
java/.idea/*
1313
java/java.iml
14-
java/out/*
14+
java/out/*
15+
16+
data/*
17+
!data/.gitkeep

data/.gitkeep

Whitespace-only changes.

java/src/DeepLearning/Dropout.java

+5-5
Original file line numberDiff line numberDiff line change
@@ -103,14 +103,14 @@ public void train(int epochs, double[][] train_X, int[][] train_Y, boolean dropo
103103
prev_W = hiddenLayers[i+1].W;
104104
}
105105

106-
dy = new double[hidden_layer_sizes[i]];
107-
hiddenLayers[i].backward(layer_inputs.get(i), dy, layer_inputs.get(i+1), prev_dy, prev_W, lr);
108-
109106
if(dropout) {
110-
for(int j=0; j<dy.length; j++) {
111-
dy[j] *= dropout_masks.get(i)[j];
107+
for(int j=0; j<prev_dy.length; j++) {
108+
prev_dy[j] *= dropout_masks.get(i)[j];
112109
}
113110
}
111+
112+
dy = new double[hidden_layer_sizes[i]];
113+
hiddenLayers[i].backward(layer_inputs.get(i), dy, layer_inputs.get(i+1), prev_dy, prev_W, lr);
114114
}
115115

116116
}

python/RBM.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def contrastive_divergence(self, lr=0.1, k=1, input=None):
6161
self.W += lr * (numpy.dot(self.input.T, ph_mean)
6262
- numpy.dot(nv_samples.T, nh_means))
6363
self.vbias += lr * numpy.mean(self.input - nv_samples, axis=0)
64-
self.hbias += lr * numpy.mean(ph_sample - nh_means, axis=0)
64+
self.hbias += lr * numpy.mean(ph_mean - nh_means, axis=0)
6565

6666
# cost = self.get_reconstruction_cross_entropy()
6767
# return cost

0 commit comments

Comments
 (0)