-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathlogger.py
142 lines (122 loc) · 4.48 KB
/
logger.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
import json
import numpy as np
"""
Some simple logging functionality, inspired by rllab's logging.
Assumes that each diagnostic gets logged each iteration
Call logz.configure_output_dir() to start logging to a
tab-separated-values file (some_folder_name/log.txt)
To load the learning curves, you can do, for example
A = np.genfromtxt('/tmp/expt_1468984536/log.txt',delimiter='\t',dtype=None, names=True)
A['EpRewMean']
6 Mar 21
- new functionality: logging on TensorBoard
"""
import os.path as osp, shutil, time, atexit, os, subprocess
import pickle
#import tensorflow as tf
from torch.utils.tensorboard import SummaryWriter # Object for writing information to TensorBoard
color2num = dict(
gray=30,
red=31,
green=32,
yellow=33,
blue=34,
magenta=35,
cyan=36,
white=37,
crimson=38
)
def colorize(string, color, bold=False, highlight=False):
attr = []
num = color2num[color]
if highlight: num += 10
attr.append(str(num))
if bold: attr.append('1')
return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), string)
class G:
output_dir = None
output_file = None
first_row = True
log_headers = []
log_current_row = {}
writer = None
def configure_output_dir(d=None, tensorboard_file=True):
"""
Set output directory to d, or to /tmp/somerandomnumber if d is None
"""
G.output_dir = d or "/tmp/experiments/%i"%int(time.time())
assert not osp.exists(G.output_dir), "Log dir %s already exists! Delete it first or use a different dir"%G.output_dir
os.makedirs(G.output_dir)
G.output_file = open(osp.join(G.output_dir, "log.txt"), 'w')
atexit.register(G.output_file.close)
print(colorize("Logging data to %s"%G.output_file.name, 'green', bold=True))
G.print_header = True
# New addition for tensorboard
if tensorboard_file is True:
G.writer = SummaryWriter(G.output_dir) # setup TensorBoard
def log_tabular(key, val):
"""
Log a value of some diagnostic
Call this once for each diagnostic quantity, each iteration
"""
if G.first_row:
G.log_headers.append(key)
else:
assert key in G.log_headers, "Trying to introduce a new key %s that you didn't include in the first iteration"%key
assert key not in G.log_current_row, "You already set %s this iteration. Maybe you forgot to call dump_tabular()"%key
G.log_current_row[key] = val
def save_params(params):
with open(osp.join(G.output_dir, "params.json"), 'w') as out:
out.write(json.dumps(params, separators=(',\n','\t:\t'), sort_keys=True))
def pickle_tf_vars():
"""
Saves tensorflow variables
Requires them to be initialized first, also a default session must exist
"""
_dict = {v.name : v.eval() for v in tf.global_variables()}
with open(osp.join(G.output_dir, "vars.pkl"), 'wb') as f:
pickle.dump(_dict, f)
def dump_tabular(step=0):
"""
Write all of the diagnostics from the current iteration
"""
vals = []
key_lens = [len(key) for key in G.log_headers]
max_key_len = max(15,max(key_lens))
keystr = '%'+'%d'%max_key_len
fmt = "| " + keystr + "s | %15s |"
n_slashes = 22 + max_key_len
print("-"*n_slashes)
for key in G.log_headers:
val = G.log_current_row.get(key, "")
if hasattr(val, "__float__"): valstr = "%8.3g"%val
else: valstr = val
valstr = str(val)
print(fmt%(key, valstr))
vals.append(val)
G.writer.add_scalar(key,val,step) # Tensorboard logging facility
print("-"*n_slashes)
if G.output_file is not None:
# if G.first_row:
if G.print_header:
G.output_file.write("\t".join(G.log_headers))
G.output_file.write("\n")
G.output_file.write("\t".join(map(str,vals)))
G.output_file.write("\n")
G.output_file.flush()
G.log_current_row.clear()
G.first_row=False
G.print_header=False
def save_data():
sorted_data = {}
for key in G.log_headers:
val = G.log_current_row.get(key,"")
# We convert to np.array() and then to list to convert from np datatypes to native datatypes.
# This is necessary because json.dump cannot handle np.float32, for example.
sorted_data[key] = np.array(val).tolist()
# Overwrite already open file. We can simply seek to the beginning since the file will
# grow strictly monotonously.
filepath = osp.join(G.output_dir, "log.json")
with open(filepath, 'w') as f:
json.dump(sorted_data, f)
G.log_current_row.clear()