-
Notifications
You must be signed in to change notification settings - Fork 177
/
evaluate.py
50 lines (41 loc) · 1.81 KB
/
evaluate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import argparse
import tensorflow as tf
from configuration import save_model_dir
from prepare_data import generate_datasets
from train import process_features
from models import get_model
parser = argparse.ArgumentParser()
parser.add_argument("--idx", default=0, type=int)
if __name__ == '__main__':
gpus = tf.config.list_physical_devices('GPU')
if gpus:
try:
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
logical_gpus = tf.config.list_logical_devices('GPU')
print(len(gpus), "Physical GPUs,", len(logical_gpus), "Logical GPUs")
except RuntimeError as e:
print(e)
args = parser.parse_args()
# get the original_dataset
train_dataset, valid_dataset, test_dataset, train_count, valid_count, test_count = generate_datasets()
# load the model
model = get_model(args.idx)
model.load_weights(filepath=save_model_dir)
# model = tf.saved_model.load(save_model_dir)
# Get the accuracy on the test set
loss_object = tf.keras.metrics.SparseCategoricalCrossentropy()
test_loss = tf.keras.metrics.Mean()
test_accuracy = tf.keras.metrics.SparseCategoricalAccuracy()
# @tf.function
def test_step(images, labels):
predictions = model(images, training=False)
t_loss = loss_object(labels, predictions)
test_loss(t_loss)
test_accuracy(labels, predictions)
for features in test_dataset:
test_images, test_labels = process_features(features, data_augmentation=False)
test_step(test_images, test_labels)
print("loss: {:.5f}, test accuracy: {:.5f}".format(test_loss.result(),
test_accuracy.result()))
print("The accuracy on test set is: {:.3f}%".format(test_accuracy.result()*100))