forked from calmiLovesAI/TensorFlow2.0_ResNet
-
Notifications
You must be signed in to change notification settings - Fork 0
/
evaluate.py
39 lines (31 loc) · 1.34 KB
/
evaluate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import tensorflow as tf
import config
from prepare_data import generate_datasets
from train import get_model
if __name__ == '__main__':
# GPU settings
gpus = tf.config.experimental.list_physical_devices('GPU')
if gpus:
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
# get the original_dataset
train_dataset, valid_dataset, test_dataset, train_count, valid_count, test_count = generate_datasets()
# print(train_dataset)
# load the model
model = get_model()
model.load_weights(filepath=config.save_model_dir)
# Get the accuracy on the test set
loss_object = tf.keras.metrics.SparseCategoricalCrossentropy()
test_loss = tf.keras.metrics.Mean()
test_accuracy = tf.keras.metrics.SparseCategoricalAccuracy()
@tf.function
def test_step(images, labels):
predictions = model(images, training=False)
t_loss = loss_object(labels, predictions)
test_loss(t_loss)
test_accuracy(labels, predictions)
for test_images, test_labels in test_dataset:
test_step(test_images, test_labels)
print("loss: {:.5f}, test accuracy: {:.5f}".format(test_loss.result(),
test_accuracy.result()))
print("The accuracy on test set is: {:.3f}%".format(test_accuracy.result()*100))