Skip to content

Commit

Permalink
Make tf.random.get_seed() doesn't depend on op count, but the number …
Browse files Browse the repository at this point in the history
…of calls to it.

It was difficult to change internal op counts because there are many tests that depend on the specific seed that's currently based on op counts.  With this change, get_seed() doesn't depend on the internal op count but the number of calls to get_seed()

PiperOrigin-RevId: 294523232
Change-Id: I4c0b7ea69adef8aceafcfa4754c30e9ea7a8d1e9
  • Loading branch information
TF-Slim Team authored and copybara-github committed Feb 11, 2020
1 parent 3a73bea commit 6e71f24
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
4 changes: 2 additions & 2 deletions tf_slim/layers/optimizers_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ def testGradientNoise(self):
session.run(train, feed_dict={x: 5})
var_value, global_step_value = session.run([var, global_step])
# Due to randomness the following number may change if graph is different.
self.assertAlmostEqual(var_value, 9.86912, 4)
self.assertAlmostEqual(var_value, 9.801016, 4)
self.assertEqual(global_step_value, 1)

@disable_resource_variables
Expand All @@ -209,7 +209,7 @@ def testGradientNoiseWithClipping(self):
variables.global_variables_initializer().run()
session.run(train, feed_dict={x: 5})
var_value, global_step_value = session.run([var, global_step])
self.assertAlmostEqual(var_value, 9.86912, 4)
self.assertAlmostEqual(var_value, 9.801016, 4)
self.assertEqual(global_step_value, 1)

def testGradientClip(self):
Expand Down
2 changes: 1 addition & 1 deletion tf_slim/learning_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -885,7 +885,7 @@ def testTrainingSubsetsOfVariablesOnlyUpdatesThoseVariables(self):

# Update only biases.
loss = sess.run(train_biases)
self.assertGreater(loss, .5)
self.assertGreater(loss, .45)
new_weights, new_biases = sess.run([weights, biases])

# Check that the biases have been updated, but weights have not.
Expand Down

0 comments on commit 6e71f24

Please sign in to comment.